diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e115f54d1..300bdff3d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,7 +12,7 @@ jobs: with: java-version: 1.8 - name: Cache Gradle packages - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: | ~/.gradle/caches diff --git a/.github/workflows/core_dependencies.yml b/.github/workflows/dependencies-publish.yml similarity index 92% rename from .github/workflows/core_dependencies.yml rename to .github/workflows/dependencies-publish.yml index 29c0ca8a4..113dcaada 100644 --- a/.github/workflows/core_dependencies.yml +++ b/.github/workflows/dependencies-publish.yml @@ -1,4 +1,4 @@ -name: core-dependencies +name: dependencies-publish on: workflow_dispatch @@ -13,7 +13,7 @@ jobs: with: java-version: 1.8 - name: Cache Gradle packages - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: | ~/.gradle/caches diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 058488922..271429c4a 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,12 +22,12 @@ jobs: run: cd docs && yarn build - name: Deploy docs env: - GIT_USER: ravisuhag + GIT_USER: anshuman-gojek GIT_PASS: ${{ secrets.DOCU_RS_TOKEN }} DEPLOYMENT_BRANCH: gh-pages CURRENT_BRANCH: main working-directory: docs run: | - git config --global user.email "suhag.ravi@gmail.com" - git config --global user.name "ravisuhag" + git config --global user.email "anshuman.srivastava@gojek.com" + git config --global user.name "anshuman-gojek" yarn deploy diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index b673079b9..7fadb806a 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -42,7 +42,7 @@ jobs: with: java-version: 1.8 - name: Cache Gradle packages - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: | ~/.gradle/caches diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml index d55f212f9..80bc06630 100644 --- a/.github/workflows/package.yml +++ b/.github/workflows/package.yml @@ -13,7 +13,7 @@ jobs: with: java-version: 1.8 - name: Cache Gradle packages - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: | ~/.gradle/caches @@ -25,7 +25,7 @@ jobs: run: ./gradlew :dagger-common:publish env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Publish minimal and shadow packages of core - run: ./gradlew :dagger-core:minimalAndShadowPublish + - name: Publish minimal package of core + run: ./gradlew :dagger-core:minimalPublish env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md index a849b17d8..723b7163e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,40 +3,7 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -### [v0.2.0](https://github.com/odpf/dagger/releases/tag/v0.2.0) (2022-02-03) - -### Features - -- Flink version bump from 1.9 to 1.14.3. -- JSON Datatype support in Dagger. -- refactor codebase and UDFs with new Flink contracts. -- build and test fixes for the upgrade. -- updates in watermark preserver logic. -- stencil upgrade. - -### [v0.1.3](https://github.com/odpf/dagger/releases/tag/v0.1.3) (2021-12-15) - -### Bug Fixes - -* darts udf cache refresh ([#82](https://github.com/odpf/dagger/issues/82)) ([674fa1a](https://github.com/odpf/dagger/commit/674fa1abbc1b3a8c5a8f099dcc792b8e1137593b)) - -### [v0.1.2](https://github.com/odpf/dagger/releases/tag/v0.1.2) (2021-10-01) - -### Features - -* junit upgrade -* test cleanup and fixes - - -### [v0.1.1](https://github.com/odpf/dagger/releases/tag/v0.1.1) (2021-08-03) - -### Features - -* have consistency of versions across modules ([#45](https://github.com/odpf/dagger/issues/45)) ([8903507](https://github.com/odpf/dagger/commit/89035071e5e3b8f1c42081a3ec2f45be17803233)) -* new udf for proto bytes to string conversion ([5d396d2](https://github.com/odpf/dagger/commit/5d396d2bef2b438e4fac99858d237a641df90f88)) - -### [v0.1.0](https://github.com/odpf/dagger/releases/tag/v0.1.0) (2021-07-05) +### [v0.9.0](https://github.com/goto/dagger/releases/tag/v0.9.0) (2023-03-16) ### Features diff --git a/README.md b/README.md index 538f0190e..092504477 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ # Dagger -![build workflow](https://github.com/odpf/dagger/actions/workflows/build.yml/badge.svg) -![package workflow](https://github.com/odpf/dagger/actions/workflows/package.yml/badge.svg) +![build workflow](https://github.com/goto/dagger/actions/workflows/build.yml/badge.svg) +![package workflow](https://github.com/goto/dagger/actions/workflows/package.yml/badge.svg) [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg?logo=apache)](LICENSE) -[![Version](https://img.shields.io/github/v/release/odpf/dagger?logo=semantic-release)](https://github.com/odpf/dagger/releases/latest) +[![Version](https://img.shields.io/github/v/release/goto/dagger?logo=semantic-release)](https://github.com/goto/dagger/releases/latest) Dagger or Data Aggregator is an easy-to-use, configuration over code, cloud-native framework built on top of Apache Flink for stateful processing of data. With Dagger, you don't need to write custom applications or complicated code to process @@ -17,39 +17,39 @@ Discover why to use Dagger - **Scale:** Dagger scales in an instant, both vertically and horizontally for high performance streaming sink and zero data drops. - **Extensibility:** Add your own sink to dagger with a clearly defined interface or choose from already provided ones. Use Kafka and/or Parquet Files as stream sources. - **Flexibility:** Add custom business logic in form of plugins \(UDFs, Transformers, Preprocessors and Post Processors\) independent of the core logic. -- **Metrics:** Always know what’s going on with your deployment with built-in [monitoring](https://odpf.github.io/dagger/docs/reference/metrics) of throughput, response times, errors and more. +- **Metrics:** Always know what’s going on with your deployment with built-in [monitoring](https://goto.github.io/dagger/docs/reference/metrics) of throughput, response times, errors and more. ## What problems Dagger solves? * Map reduce -> [SQL](https://ci.apache.org/projects/flink/flink-docs-release-1.9/dev/table/sql.html) -* Enrichment -> [Post Processors](https://odpf.github.io/dagger/docs/advance/post_processor) -* Aggregation -> [SQL](https://ci.apache.org/projects/flink/flink-docs-release-1.9/dev/table/sql.html), [UDFs](https://odpf.github.io/dagger/docs/guides/use_udf) -* Masking -> [Hash Transformer](https://odpf.github.io/dagger/docs/reference/transformers#HashTransformer) -* Deduplication -> [Deduplication Transformer](https://odpf.github.io/dagger/docs/reference/transformers#DeDuplicationTransformer) -* Realtime long window processing -> [Longbow](https://odpf.github.io/dagger/docs/advance/longbow) +* Enrichment -> [Post Processors](https://goto.github.io/dagger/docs/advance/post_processor) +* Aggregation -> [SQL](https://ci.apache.org/projects/flink/flink-docs-release-1.9/dev/table/sql.html), [UDFs](https://goto.github.io/dagger/docs/guides/use_udf) +* Masking -> [Hash Transformer](https://goto.github.io/dagger/docs/reference/transformers#HashTransformer) +* Deduplication -> [Deduplication Transformer](https://goto.github.io/dagger/docs/reference/transformers#DeDuplicationTransformer) +* Realtime long window processing -> [Longbow](https://goto.github.io/dagger/docs/advance/longbow) -To know more, follow the detailed [documentation](https://odpf.github.io/dagger/). +To know more, follow the detailed [documentation](https://goto.github.io/dagger/). ## Usage Explore the following resources to get started with Dagger: -* [Guides](https://odpf.github.io/dagger/docs/guides/overview) provides guidance on [creating Dagger](https://odpf.github.io/dagger/docs/guides/create_dagger) with different sinks. -* [Concepts](https://odpf.github.io/dagger/docs/concepts/overview) describes all important Dagger concepts. -* [Advance](https://odpf.github.io/dagger/docs/advance/overview) contains details regarding advance features of Dagger. -* [Reference](https://odpf.github.io/dagger/docs/reference/overview) contains details about configurations, metrics and other aspects of Dagger. -* [Contribute](https://odpf.github.io/dagger/docs/contribute/contribution) contains resources for anyone who wants to contribute to Dagger. -* [Usecase](https://odpf.github.io/dagger/docs/usecase/overview) describes examples use cases which can be solved via Dagger. -* [Examples](https://odpf.github.io/dagger/docs/examples/overview) contains tutorials to try out some of Dagger's features with real-world usecases +* [Guides](https://goto.github.io/dagger/docs/guides/overview) provides guidance on [creating Dagger](https://goto.github.io/dagger/docs/guides/create_dagger) with different sinks. +* [Concepts](https://goto.github.io/dagger/docs/concepts/overview) describes all important Dagger concepts. +* [Advance](https://goto.github.io/dagger/docs/advance/overview) contains details regarding advance features of Dagger. +* [Reference](https://goto.github.io/dagger/docs/reference/overview) contains details about configurations, metrics and other aspects of Dagger. +* [Contribute](https://goto.github.io/dagger/docs/contribute/contribution) contains resources for anyone who wants to contribute to Dagger. +* [Usecase](https://goto.github.io/dagger/docs/usecase/overview) describes examples use cases which can be solved via Dagger. +* [Examples](https://goto.github.io/dagger/docs/examples/overview) contains tutorials to try out some of Dagger's features with real-world usecases ## Running locally -Please follow this [Dagger Quickstart Guide](https://odpf.github.io/dagger/docs/guides/quickstart) for setting up a local running Dagger consuming from Kafka or to set up a Docker Compose for Dagger. +Please follow this [Dagger Quickstart Guide](https://goto.github.io/dagger/docs/guides/quickstart) for setting up a local running Dagger consuming from Kafka or to set up a Docker Compose for Dagger. -**Note:** Sample configuration for running a basic dagger can be found [here](https://odpf.github.io/dagger/docs/guides/create_dagger#common-configurations). For detailed configurations, refer [here](https://odpf.github.io/dagger/docs/reference/configuration). +**Note:** Sample configuration for running a basic dagger can be found [here](https://goto.github.io/dagger/docs/guides/create_dagger#common-configurations). For detailed configurations, refer [here](https://goto.github.io/dagger/docs/reference/configuration). -Find more detailed steps on local setup [here](https://odpf.github.io/dagger/docs/guides/create_dagger). +Find more detailed steps on local setup [here](https://goto.github.io/dagger/docs/guides/create_dagger). ## Running on cluster -Refer [here](https://odpf.github.io/dagger/docs/guides/deployment) for details regarding Dagger deployment. +Refer [here](https://goto.github.io/dagger/docs/guides/deployment) for details regarding Dagger deployment. ## Running tests ```sh @@ -67,12 +67,12 @@ $ ./gradlew clean Development of Dagger happens in the open on GitHub, and we are grateful to the community for contributing bug fixes and improvements. Read below to learn how you can take part in improving Dagger. -Read our [contributing guide](https://odpf.github.io/dagger/docs/contribute/contribution) to learn about our development process, how to propose bug fixes and improvements, and how to build and test your changes to Dagger. +Read our [contributing guide](https://goto.github.io/dagger/docs/contribute/contribution) to learn about our development process, how to propose bug fixes and improvements, and how to build and test your changes to Dagger. -To help you get your feet wet and get you familiar with our contribution process, we have a list of [good first issues](https://github.com/odpf/dagger/labels/good%20first%20issue) that contain bugs which have a relatively limited scope. This is a great place to get started. +To help you get your feet wet and get you familiar with our contribution process, we have a list of [good first issues](https://github.com/goto/dagger/labels/good%20first%20issue) that contain bugs which have a relatively limited scope. This is a great place to get started. ## Credits -This project exists thanks to all the [contributors](https://github.com/odpf/dagger/graphs/contributors). +This project exists thanks to all the [contributors](https://github.com/goto/dagger/graphs/contributors). ## License Dagger is [Apache 2.0](LICENSE) licensed. diff --git a/build.gradle b/build.gradle index 0af27232b..ef5a05b59 100644 --- a/build.gradle +++ b/build.gradle @@ -15,7 +15,7 @@ subprojects { apply plugin: 'idea' apply plugin: 'checkstyle' - group 'io.odpf' + group 'com.gotocompany' checkstyle { toolVersion '7.6.1' diff --git a/config/checkstyle/checkstyle.xml b/config/checkstyle/checkstyle.xml index dad0a53d8..4074eed88 100644 --- a/config/checkstyle/checkstyle.xml +++ b/config/checkstyle/checkstyle.xml @@ -95,7 +95,7 @@ - + diff --git a/dagger-common/build.gradle b/dagger-common/build.gradle index 27f55307f..0067b8687 100644 --- a/dagger-common/build.gradle +++ b/dagger-common/build.gradle @@ -58,26 +58,32 @@ dependencies { compileOnly group: 'org.apache.flink', name: 'flink-table', version: flinkVersion compileOnly group: 'org.apache.flink', name: 'flink-table-api-java-bridge_2.11', version: flinkVersion compileOnly group: 'org.apache.flink', name: 'flink-connector-kafka_2.11', version: flinkVersion + compileOnly 'com.gotocompany:stencil:0.6.0' - dependenciesCommonJar ('org.apache.hadoop:hadoop-client:2.8.3') { - exclude module:"commons-cli" - exclude module:"commons-compress" + dependenciesCommonJar('org.apache.hadoop:hadoop-client:3.3.6') { + exclude module: "commons-cli" + exclude module: "commons-compress" + exclude group: 'com.squareup.okhttp3', module: 'okhttp' } - dependenciesCommonJar 'com.google.cloud.bigdataoss:gcs-connector:1.9.0-hadoop2' + dependenciesCommonJar group: 'com.google.cloud.bigdataoss', name: 'gcs-connector', version: 'hadoop2-2.2.16' + + dependenciesCommonJar group: 'org.apache.hadoop', name: 'hadoop-aliyun', version: '3.4.1' + dependenciesCommonJar group: 'com.qcloud.cos', name: 'hadoop-cos', version: '3.4.0-8.3.17' + dependenciesCommonJar group: 'com.tencentcloudapi', name: 'tencentcloud-sdk-java-common', version: '3.1.1201' + dependenciesCommonJar 'org.apache.flink:flink-metrics-dropwizard:' + flinkVersion dependenciesCommonJar 'org.apache.flink:flink-json:' + flinkVersion dependenciesCommonJar 'com.jayway.jsonpath:json-path:2.4.0' - dependenciesCommonJar 'io.odpf:stencil:0.2.1' dependenciesCommonJar 'com.google.code.gson:gson:2.8.2' dependenciesCommonJar 'org.apache.parquet:parquet-column:1.12.2' - testImplementation 'junit:junit:4.13' + testImplementation 'junit:junit:4.13.1' testImplementation 'org.jmockit:jmockit:1.25' testImplementation 'org.mockito:mockito-core:2.25.1' testImplementation 'io.grpc:grpc-protobuf:1.18.0' testImplementation 'io.grpc:grpc-api:1.37.0' testImplementation 'io.grpc:grpc-stub:1.18.0' - testImplementation 'com.google.protobuf:protobuf-java:3.5.0' + testImplementation 'com.google.protobuf:protobuf-java:3.16.3' testImplementation 'com.google.protobuf:protobuf-java-util:3.5.0' testImplementation 'org.grpcmock:grpcmock-junit5:0.5.0' } @@ -127,7 +133,7 @@ publishing { repositories { maven { name = "GitHubPackages" - url = "https://maven.pkg.github.com/odpf/dagger" + url = "https://maven.pkg.github.com/goto/dagger" credentials { username = System.getenv("GITHUB_ACTOR") password = System.getenv("GITHUB_TOKEN") diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/configuration/Configuration.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/configuration/Configuration.java similarity index 85% rename from dagger-common/src/main/java/io/odpf/dagger/common/configuration/Configuration.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/configuration/Configuration.java index 737fa3a50..c819475a1 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/configuration/Configuration.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/configuration/Configuration.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.configuration; +package com.gotocompany.dagger.common.configuration; import org.apache.flink.api.java.utils.ParameterTool; @@ -15,6 +15,10 @@ public ParameterTool getParam() { return param; } + public String getString(String configKey) { + return param.get(configKey); + } + public String getString(String configKey, String defaultValue) { return param.get(configKey, defaultValue); } diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/core/Constants.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/Constants.java similarity index 54% rename from dagger-common/src/main/java/io/odpf/dagger/common/core/Constants.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/core/Constants.java index 204578798..d1ab88b6f 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/core/Constants.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/Constants.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.core; +package com.gotocompany.dagger.common.core; public class Constants { public static final String SCHEMA_REGISTRY_STENCIL_ENABLE_KEY = "SCHEMA_REGISTRY_STENCIL_ENABLE"; @@ -6,9 +6,19 @@ public class Constants { public static final String SCHEMA_REGISTRY_STENCIL_URLS_KEY = "SCHEMA_REGISTRY_STENCIL_URLS"; public static final String SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT = ""; public static final String SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS = "SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS"; - public static final Integer SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT = 60000; + public static final Integer SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT = 10000; public static final String SCHEMA_REGISTRY_STENCIL_FETCH_HEADERS_KEY = "SCHEMA_REGISTRY_STENCIL_FETCH_HEADERS"; public static final String SCHEMA_REGISTRY_STENCIL_FETCH_HEADERS_DEFAULT = ""; + public static final String SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY = "SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH"; + public static final boolean SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT = false; + public static final String SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY = "SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS"; + public static final Long SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT = 900000L; + public static final String SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY = "SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY"; + public static final String SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT = "LONG_POLLING"; + public static final String SCHEMA_REGISTRY_STENCIL_FETCH_BACKOFF_MIN_MS_KEY = "SCHEMA_REGISTRY_STENCIL_FETCH_BACKOFF_MIN_MS"; + public static final Long SCHEMA_REGISTRY_STENCIL_FETCH_BACKOFF_MIN_MS_DEFAULT = 60000L; + public static final String SCHEMA_REGISTRY_STENCIL_FETCH_RETRIES_KEY = "SCHEMA_REGISTRY_STENCIL_FETCH_RETRIES"; + public static final Integer SCHEMA_REGISTRY_STENCIL_FETCH_RETRIES_DEFAULT = 4; public static final String UDF_TELEMETRY_GROUP_KEY = "udf"; public static final String GAUGE_ASPECT_NAME = "value"; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/core/DaggerContext.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/DaggerContext.java similarity index 92% rename from dagger-common/src/main/java/io/odpf/dagger/common/core/DaggerContext.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/core/DaggerContext.java index 3aee9c749..79c8eaa2b 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/core/DaggerContext.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/DaggerContext.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.common.core; +package com.gotocompany.dagger.common.core; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.exceptions.DaggerContextException; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.exceptions.DaggerContextException; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.EnvironmentSettings; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; diff --git a/dagger-common/src/main/java/com/gotocompany/dagger/common/core/FieldDescriptorCache.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/FieldDescriptorCache.java new file mode 100644 index 000000000..8b265dc87 --- /dev/null +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/FieldDescriptorCache.java @@ -0,0 +1,58 @@ +package com.gotocompany.dagger.common.core; + +import com.google.protobuf.Descriptors; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + + +public class FieldDescriptorCache implements Serializable { + private final Map fieldDescriptorIndexMap = new HashMap<>(); + private final Map protoDescriptorArityMap = new HashMap<>(); + + public FieldDescriptorCache(Descriptors.Descriptor descriptor) { + + cacheFieldDescriptorMap(descriptor); + } + + public void cacheFieldDescriptorMap(Descriptors.Descriptor descriptor) { + + if (protoDescriptorArityMap.containsKey(descriptor.getFullName())) { + return; + } + List descriptorFields = descriptor.getFields(); + protoDescriptorArityMap.putIfAbsent(descriptor.getFullName(), descriptorFields.size()); + + for (Descriptors.FieldDescriptor fieldDescriptor : descriptorFields) { + fieldDescriptorIndexMap.putIfAbsent(fieldDescriptor.getFullName(), fieldDescriptor.getIndex()); + } + + for (Descriptors.FieldDescriptor fieldDescriptor : descriptorFields) { + if (fieldDescriptor.getType().toString().equals("MESSAGE")) { + cacheFieldDescriptorMap(fieldDescriptor.getMessageType()); + + } + } + } + + public int getOriginalFieldIndex(Descriptors.FieldDescriptor fieldDescriptor) { + if (!fieldDescriptorIndexMap.containsKey(fieldDescriptor.getFullName())) { + throw new IllegalArgumentException("The Field Descriptor " + fieldDescriptor.getFullName() + " was not found in the cache"); + } + return fieldDescriptorIndexMap.get(fieldDescriptor.getFullName()); + } + + public boolean containsField(String fieldName) { + + return fieldDescriptorIndexMap.containsKey(fieldName); + } + + public int getOriginalFieldCount(Descriptors.Descriptor descriptor) { + if (!protoDescriptorArityMap.containsKey(descriptor.getFullName())) { + throw new IllegalArgumentException("The Proto Descriptor " + descriptor.getFullName() + " was not found in the cache"); + } + return protoDescriptorArityMap.get(descriptor.getFullName()); + } +} diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/core/StencilClientOrchestrator.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/StencilClientOrchestrator.java similarity index 68% rename from dagger-common/src/main/java/io/odpf/dagger/common/core/StencilClientOrchestrator.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/core/StencilClientOrchestrator.java index a3b61a7cc..f182e66f1 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/core/StencilClientOrchestrator.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/StencilClientOrchestrator.java @@ -1,11 +1,12 @@ -package io.odpf.dagger.common.core; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.stencil.StencilClientFactory; -import io.odpf.stencil.client.StencilClient; -import io.odpf.stencil.config.StencilConfig; -import org.apache.http.Header; -import org.apache.http.message.BasicHeader; +package com.gotocompany.dagger.common.core; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.stencil.StencilClientFactory; +import com.gotocompany.stencil.cache.SchemaRefreshStrategy; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.stencil.config.StencilConfig; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.message.BasicHeader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -16,7 +17,7 @@ import java.util.List; import java.util.stream.Collectors; -import static io.odpf.dagger.common.core.Constants.*; +import static com.gotocompany.dagger.common.core.Constants.*; /** * The Stencil client orchestrator for dagger. @@ -37,13 +38,29 @@ public StencilClientOrchestrator(Configuration configuration) { this.stencilUrls = getStencilUrls(); } - StencilConfig createStencilConfig() { + public StencilConfig createStencilConfig() { return StencilConfig.builder() .fetchHeaders(getHeaders(configuration)) .fetchTimeoutMs(configuration.getInteger(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS, SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT)) + .cacheAutoRefresh(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)) + .cacheTtlMs(configuration.getLong(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT)) + .refreshStrategy(getSchemaRefreshStrategy(configuration.getString(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT))) + .fetchBackoffMinMs(configuration.getLong(SCHEMA_REGISTRY_STENCIL_FETCH_BACKOFF_MIN_MS_KEY, SCHEMA_REGISTRY_STENCIL_FETCH_BACKOFF_MIN_MS_DEFAULT)) + .fetchRetries(configuration.getInteger(SCHEMA_REGISTRY_STENCIL_FETCH_RETRIES_KEY, SCHEMA_REGISTRY_STENCIL_FETCH_RETRIES_DEFAULT)) .build(); } + private SchemaRefreshStrategy getSchemaRefreshStrategy(String refreshStrategy) { + if (refreshStrategy == null) { + return SchemaRefreshStrategy.longPollingStrategy(); + } + if (refreshStrategy.equalsIgnoreCase("VERSION_BASED_REFRESH")) { + return SchemaRefreshStrategy.versionBasedRefresh(); + } + return SchemaRefreshStrategy.longPollingStrategy(); + + } + private List
getHeaders(Configuration config) { String headerString = config.getString(SCHEMA_REGISTRY_STENCIL_FETCH_HEADERS_KEY, SCHEMA_REGISTRY_STENCIL_FETCH_HEADERS_DEFAULT); return parseHeaders(headerString); @@ -55,6 +72,7 @@ private List
getHeaders(Configuration config) { * @return the stencil client */ public StencilClient getStencilClient() { + if (stencilClient != null) { return stencilClient; } diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/core/StreamInfo.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/StreamInfo.java similarity index 95% rename from dagger-common/src/main/java/io/odpf/dagger/common/core/StreamInfo.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/core/StreamInfo.java index c634ac0f5..7b87b9fb4 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/core/StreamInfo.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/StreamInfo.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.core; +package com.gotocompany.dagger.common.core; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.types.Row; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/core/Transformer.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/Transformer.java similarity index 75% rename from dagger-common/src/main/java/io/odpf/dagger/common/core/Transformer.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/core/Transformer.java index 207d0f158..5f31d33a5 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/core/Transformer.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/core/Transformer.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.core; +package com.gotocompany.dagger.common.core; /** * The interface for all the transformer. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/DaggerContextException.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/DaggerContextException.java similarity index 88% rename from dagger-common/src/main/java/io/odpf/dagger/common/exceptions/DaggerContextException.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/DaggerContextException.java index b0e79da2a..dcae1217a 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/DaggerContextException.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/DaggerContextException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.exceptions; +package com.gotocompany.dagger.common.exceptions; /** * The class Exception if there is something wrong with Dagger context object. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/DescriptorNotFoundException.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/DescriptorNotFoundException.java similarity index 95% rename from dagger-common/src/main/java/io/odpf/dagger/common/exceptions/DescriptorNotFoundException.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/DescriptorNotFoundException.java index 50cd49ff8..f249d2639 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/DescriptorNotFoundException.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/DescriptorNotFoundException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.exceptions; +package com.gotocompany.dagger.common.exceptions; /** * The class Exception if Descriptor not found. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DaggerDeserializationException.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/DaggerDeserializationException.java similarity index 89% rename from dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DaggerDeserializationException.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/DaggerDeserializationException.java index 2e8acbe31..4baddb397 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DaggerDeserializationException.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/DaggerDeserializationException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.exceptions.serde; +package com.gotocompany.dagger.common.exceptions.serde; /** * The class Exception if failed on Deserialize the protobuf message. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DaggerSerializationException.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/DaggerSerializationException.java similarity index 88% rename from dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DaggerSerializationException.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/DaggerSerializationException.java index f95dc1b4b..a87dab726 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DaggerSerializationException.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/DaggerSerializationException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.exceptions.serde; +package com.gotocompany.dagger.common.exceptions.serde; /** * The class Exception if failed on Serializing the protobuf message. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DataTypeNotSupportedException.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/DataTypeNotSupportedException.java similarity index 86% rename from dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DataTypeNotSupportedException.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/DataTypeNotSupportedException.java index 4aa3d5afe..96ba7d679 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DataTypeNotSupportedException.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/DataTypeNotSupportedException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.exceptions.serde; +package com.gotocompany.dagger.common.exceptions.serde; /** * The class Exception for unsupported protobuf data type. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/EnumFieldNotFoundException.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/EnumFieldNotFoundException.java similarity index 86% rename from dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/EnumFieldNotFoundException.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/EnumFieldNotFoundException.java index 48f437e68..15c2fb40a 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/EnumFieldNotFoundException.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/EnumFieldNotFoundException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.exceptions.serde; +package com.gotocompany.dagger.common.exceptions.serde; /** * The class Exception if Enum field not found in proto descriptor. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidColumnMappingException.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/InvalidColumnMappingException.java similarity index 92% rename from dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidColumnMappingException.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/InvalidColumnMappingException.java index e84e02dd4..75224a4c9 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidColumnMappingException.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/InvalidColumnMappingException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.exceptions.serde; +package com.gotocompany.dagger.common.exceptions.serde; /** * The class Exception if there is Invalid Column Mapping. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidDataTypeException.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/InvalidDataTypeException.java similarity index 85% rename from dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidDataTypeException.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/InvalidDataTypeException.java index d47caa42f..4954b47cd 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidDataTypeException.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/InvalidDataTypeException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.exceptions.serde; +package com.gotocompany.dagger.common.exceptions.serde; /** * The class Exception if there is an Invalid Data type. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidJSONSchemaException.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/InvalidJSONSchemaException.java similarity index 75% rename from dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidJSONSchemaException.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/InvalidJSONSchemaException.java index 640c7935c..a3def3374 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidJSONSchemaException.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/InvalidJSONSchemaException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.exceptions.serde; +package com.gotocompany.dagger.common.exceptions.serde; public class InvalidJSONSchemaException extends RuntimeException { public InvalidJSONSchemaException(Exception innerException) { diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/SimpleGroupParsingException.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/SimpleGroupParsingException.java similarity index 82% rename from dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/SimpleGroupParsingException.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/SimpleGroupParsingException.java index c651d17d4..e2b162605 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/SimpleGroupParsingException.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/exceptions/serde/SimpleGroupParsingException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.exceptions.serde; +package com.gotocompany.dagger.common.exceptions.serde; /** * This runtime exception is thrown when a field cannot be parsed from a Parquet SimpleGroup. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/metrics/aspects/AspectType.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/aspects/AspectType.java similarity index 66% rename from dagger-common/src/main/java/io/odpf/dagger/common/metrics/aspects/AspectType.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/aspects/AspectType.java index e28665128..47b19499b 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/metrics/aspects/AspectType.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/aspects/AspectType.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.metrics.aspects; +package com.gotocompany.dagger.common.metrics.aspects; /** * The enum Aspect type. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/metrics/aspects/Aspects.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/aspects/Aspects.java similarity index 69% rename from dagger-common/src/main/java/io/odpf/dagger/common/metrics/aspects/Aspects.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/aspects/Aspects.java index 291cb0497..6d66ef558 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/metrics/aspects/Aspects.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/aspects/Aspects.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.metrics.aspects; +package com.gotocompany.dagger.common.metrics.aspects; /** * The interface for aspects. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/metrics/managers/CounterStatsManager.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/managers/CounterStatsManager.java similarity index 92% rename from dagger-common/src/main/java/io/odpf/dagger/common/metrics/managers/CounterStatsManager.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/managers/CounterStatsManager.java index 8b924a1ae..46f9dede1 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/metrics/managers/CounterStatsManager.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/managers/CounterStatsManager.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.common.metrics.managers; +package com.gotocompany.dagger.common.metrics.managers; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/metrics/managers/GaugeStatsManager.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/managers/GaugeStatsManager.java similarity index 95% rename from dagger-common/src/main/java/io/odpf/dagger/common/metrics/managers/GaugeStatsManager.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/managers/GaugeStatsManager.java index 79d8d32e4..ddcde1f91 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/metrics/managers/GaugeStatsManager.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/managers/GaugeStatsManager.java @@ -1,10 +1,9 @@ -package io.odpf.dagger.common.metrics.managers; +package com.gotocompany.dagger.common.metrics.managers; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; -import io.odpf.dagger.common.metrics.aspects.Aspects; - /** * The Gauge stats manager. */ diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/metrics/managers/MeterStatsManager.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/managers/MeterStatsManager.java similarity index 92% rename from dagger-common/src/main/java/io/odpf/dagger/common/metrics/managers/MeterStatsManager.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/managers/MeterStatsManager.java index 46de410d7..968267639 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/metrics/managers/MeterStatsManager.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/metrics/managers/MeterStatsManager.java @@ -1,5 +1,7 @@ -package io.odpf.dagger.common.metrics.managers; +package com.gotocompany.dagger.common.metrics.managers; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; import org.apache.flink.dropwizard.metrics.DropwizardHistogramWrapper; import org.apache.flink.dropwizard.metrics.DropwizardMeterWrapper; import org.apache.flink.metrics.Histogram; @@ -7,13 +9,11 @@ import org.apache.flink.metrics.MetricGroup; import com.codahale.metrics.SlidingTimeWindowReservoir; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; import java.util.HashMap; import java.util.concurrent.TimeUnit; -import static io.odpf.dagger.common.core.Constants.SLIDING_TIME_WINDOW; +import static com.gotocompany.dagger.common.core.Constants.SLIDING_TIME_WINDOW; /** diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/DaggerDeserializer.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/DaggerDeserializer.java similarity index 80% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/DaggerDeserializer.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/DaggerDeserializer.java index 928148ae5..e24cf608e 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/DaggerDeserializer.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/DaggerDeserializer.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.serde; +package com.gotocompany.dagger.common.serde; import org.apache.flink.api.java.typeutils.ResultTypeQueryable; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/DaggerInternalTypeInformation.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/DaggerInternalTypeInformation.java similarity index 91% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/DaggerInternalTypeInformation.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/DaggerInternalTypeInformation.java index 757c42396..2dd563df6 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/DaggerInternalTypeInformation.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/DaggerInternalTypeInformation.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.common.serde; +package com.gotocompany.dagger.common.serde; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.Constants; +import com.gotocompany.dagger.common.core.Constants; import java.util.ArrayList; import java.util.Arrays; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/DataTypes.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/DataTypes.java similarity index 51% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/DataTypes.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/DataTypes.java index e1bd4f4fe..6be1bf8e2 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/DataTypes.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/DataTypes.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.serde; +package com.gotocompany.dagger.common.serde; public enum DataTypes { JSON, diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/json/deserialization/JsonDeserializer.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonDeserializer.java similarity index 90% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/json/deserialization/JsonDeserializer.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonDeserializer.java index 3f234424e..9bce511b6 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/json/deserialization/JsonDeserializer.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonDeserializer.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.common.serde.json.deserialization; +package com.gotocompany.dagger.common.serde.json.deserialization; -import io.odpf.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.exceptions.serde.DaggerDeserializationException; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.formats.json.JsonRowDeserializationSchema; import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema; import org.apache.flink.types.Row; -import io.odpf.dagger.common.exceptions.serde.DaggerDeserializationException; import org.apache.kafka.clients.consumer.ConsumerRecord; import java.io.IOException; @@ -15,7 +15,7 @@ import java.sql.Timestamp; import java.time.Instant; -import static io.odpf.dagger.common.core.Constants.ROWTIME; +import static com.gotocompany.dagger.common.core.Constants.ROWTIME; public class JsonDeserializer implements KafkaDeserializationSchema, DaggerDeserializer { private final JsonRowDeserializationSchema jsonRowDeserializationSchema; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/json/deserialization/JsonType.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonType.java similarity index 83% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/json/deserialization/JsonType.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonType.java index e2e12dd3d..53aea049b 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/json/deserialization/JsonType.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonType.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.common.serde.json.deserialization; +package com.gotocompany.dagger.common.serde.json.deserialization; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.formats.json.JsonRowSchemaConverter; import org.apache.flink.types.Row; -import io.odpf.dagger.common.serde.DaggerInternalTypeInformation; +import com.gotocompany.dagger.common.serde.DaggerInternalTypeInformation; import java.io.Serializable; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/parquet/SimpleGroupValidation.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/parquet/SimpleGroupValidation.java similarity index 98% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/parquet/SimpleGroupValidation.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/parquet/SimpleGroupValidation.java index 3fa8f9c57..07d5e18d4 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/parquet/SimpleGroupValidation.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/parquet/SimpleGroupValidation.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.serde.parquet; +package com.gotocompany.dagger.common.serde.parquet; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializer.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializer.java similarity index 80% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializer.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializer.java index b860109a5..f9a0e49e9 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializer.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializer.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.common.serde.parquet.deserialization; +package com.gotocompany.dagger.common.serde.parquet.deserialization; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.exceptions.serde.DaggerDeserializationException; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.proto.deserialization.ProtoType; -import io.odpf.dagger.common.serde.typehandler.RowFactory; -import io.odpf.dagger.common.serde.typehandler.complex.TimestampHandler; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.exceptions.serde.DaggerDeserializationException; +import com.gotocompany.dagger.common.serde.proto.deserialization.ProtoType; +import com.gotocompany.dagger.common.serde.typehandler.RowFactory; +import com.gotocompany.dagger.common.serde.typehandler.complex.TimestampHandler; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.types.Row; import org.apache.parquet.example.data.simple.SimpleGroup; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoDeserializer.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoDeserializer.java similarity index 76% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoDeserializer.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoDeserializer.java index 66ec2ec0e..5cf61eb90 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoDeserializer.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoDeserializer.java @@ -1,13 +1,14 @@ -package io.odpf.dagger.common.serde.proto.deserialization; +package com.gotocompany.dagger.common.serde.proto.deserialization; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.exceptions.serde.DaggerDeserializationException; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.typehandler.RowFactory; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.exceptions.serde.DaggerDeserializationException; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.typehandler.RowFactory; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema; import org.apache.flink.types.Row; @@ -29,6 +30,8 @@ public class ProtoDeserializer implements KafkaDeserializationSchema, Dagge private final int timestampFieldIndex; private final StencilClientOrchestrator stencilClientOrchestrator; private final TypeInformation typeInformation; + private final FieldDescriptorCache fieldDescriptorCache; + private final boolean stencilAutoRefreshEnable; /** * Instantiates a new Proto deserializer. @@ -43,6 +46,8 @@ public ProtoDeserializer(String protoClassName, int timestampFieldIndex, String this.timestampFieldIndex = timestampFieldIndex; this.stencilClientOrchestrator = stencilClientOrchestrator; this.typeInformation = new ProtoType(protoClassName, rowtimeAttributeName, stencilClientOrchestrator).getRowType(); + this.fieldDescriptorCache = new FieldDescriptorCache(getProtoParser()); + this.stencilAutoRefreshEnable = stencilClientOrchestrator.createStencilConfig().getCacheAutoRefresh(); } @Override @@ -80,14 +85,25 @@ private Descriptors.Descriptor getProtoParser() { } private Row createDefaultInvalidRow(DynamicMessage defaultInstance) { - Row row = RowFactory.createRow(defaultInstance, 2); + Row row; + if (stencilAutoRefreshEnable) { + row = RowFactory.createRow(defaultInstance, 2, fieldDescriptorCache); + } else { + row = RowFactory.createRow(defaultInstance, 2); + } row.setField(row.getArity() - 2, false); row.setField(row.getArity() - 1, new Timestamp(0)); return row; } private Row addTimestampFieldToRow(DynamicMessage proto) { - Row finalRecord = RowFactory.createRow(proto, 2); + Row finalRecord; + if (stencilAutoRefreshEnable) { + finalRecord = RowFactory.createRow(proto, 2, fieldDescriptorCache); + } else { + finalRecord = RowFactory.createRow(proto, 2); + } + Descriptors.FieldDescriptor fieldDescriptor = proto.getDescriptorForType().findFieldByNumber(timestampFieldIndex); DynamicMessage timestampProto = (DynamicMessage) proto.getField(fieldDescriptor); List timestampFields = timestampProto.getDescriptorForType().getFields(); @@ -99,4 +115,6 @@ private Row addTimestampFieldToRow(DynamicMessage proto) { finalRecord.setField(finalRecord.getArity() - 1, Timestamp.from(Instant.ofEpochSecond(timestampSeconds, timestampNanos))); return finalRecord; } + + } diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoType.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoType.java similarity index 83% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoType.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoType.java index 5e084ddc6..3e154a16e 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoType.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoType.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.common.serde.proto.deserialization; +package com.gotocompany.dagger.common.serde.proto.deserialization; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.serde.typehandler.TypeInformationFactory; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.Descriptor; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.serde.DaggerInternalTypeInformation; -import io.odpf.dagger.common.serde.typehandler.TypeInformationFactory; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.DaggerInternalTypeInformation; import java.io.Serializable; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/serialization/KafkaProtoSerializer.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/serialization/KafkaProtoSerializer.java similarity index 91% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/serialization/KafkaProtoSerializer.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/serialization/KafkaProtoSerializer.java index 1977a1092..3716b8daa 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/serialization/KafkaProtoSerializer.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/serialization/KafkaProtoSerializer.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.common.serde.proto.serialization; +package com.gotocompany.dagger.common.serde.proto.serialization; +import com.gotocompany.dagger.common.exceptions.serde.DaggerSerializationException; import org.apache.flink.api.common.serialization.SerializationSchema.InitializationContext; import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema; import org.apache.flink.types.Row; -import io.odpf.dagger.common.exceptions.serde.DaggerSerializationException; import org.apache.kafka.clients.producer.ProducerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/serialization/ProtoSerializer.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/serialization/ProtoSerializer.java similarity index 89% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/serialization/ProtoSerializer.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/serialization/ProtoSerializer.java index a3881fa39..1ddcf5e77 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/proto/serialization/ProtoSerializer.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/proto/serialization/ProtoSerializer.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.common.serde.proto.serialization; +package com.gotocompany.dagger.common.serde.proto.serialization; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.exceptions.serde.DaggerSerializationException; -import io.odpf.dagger.common.exceptions.serde.InvalidColumnMappingException; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.exceptions.serde.DaggerSerializationException; +import com.gotocompany.dagger.common.exceptions.serde.InvalidColumnMappingException; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; import org.apache.flink.types.Row; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; import java.io.Serializable; import java.util.Arrays; @@ -103,7 +103,7 @@ private DynamicMessage.Builder populateBuilder(DynamicMessage.Builder builder, D if (data != null) { try { builder = typeHandler.transformToProtoBuilder(builder, data); - } catch (IllegalArgumentException e) { + } catch (RuntimeException e) { String protoType = fieldDescriptor.getType().toString(); if (fieldDescriptor.isRepeated()) { protoType = String.format("REPEATED %s", fieldDescriptor.getType()); diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/PrimitiveTypeHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/PrimitiveTypeHandler.java similarity index 81% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/PrimitiveTypeHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/PrimitiveTypeHandler.java index 0ca4bb6da..43b904077 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/PrimitiveTypeHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/PrimitiveTypeHandler.java @@ -1,10 +1,11 @@ -package io.odpf.dagger.common.serde.typehandler; +package com.gotocompany.dagger.common.serde.typehandler; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.exceptions.serde.InvalidDataTypeException; +import com.gotocompany.dagger.common.serde.typehandler.primitive.PrimitiveHandler; +import com.gotocompany.dagger.common.serde.typehandler.primitive.PrimitiveHandlerFactory; import org.apache.flink.api.common.typeinfo.TypeInformation; -import io.odpf.dagger.common.exceptions.serde.InvalidDataTypeException; -import io.odpf.dagger.common.serde.typehandler.primitive.PrimitiveHandlerFactory; -import io.odpf.dagger.common.serde.typehandler.primitive.PrimitiveHandler; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import org.apache.parquet.example.data.simple.SimpleGroup; @@ -54,6 +55,11 @@ public Object transformFromProto(Object field) { return field; } + @Override + public Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache) { + return field; + } + @Override public Object transformFromParquet(SimpleGroup simpleGroup) { PrimitiveHandler primitiveHandler = PrimitiveHandlerFactory.getTypeHandler(fieldDescriptor); diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/RowFactory.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/RowFactory.java similarity index 67% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/RowFactory.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/RowFactory.java index 99d3e92b4..790d707a7 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/RowFactory.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/RowFactory.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.common.serde.typehandler; - -import org.apache.flink.types.Row; +package com.gotocompany.dagger.common.serde.typehandler; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.DynamicMessage; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import org.apache.flink.types.Row; import org.apache.parquet.example.data.simple.SimpleGroup; import java.util.List; @@ -14,6 +14,8 @@ * The Factory class for Row. */ public class RowFactory { + + /** * Create row from specified input map and descriptor. * @@ -53,6 +55,32 @@ public static Row createRow(DynamicMessage proto, int extraColumns) { return row; } + + /** + * Create row from specified proto and extra columns. + * + * @param proto the proto + * @param extraColumns the extra columns + * @return the row + */ + public static Row createRow(DynamicMessage proto, int extraColumns, FieldDescriptorCache cache) { + List descriptorFields = proto.getDescriptorForType().getFields(); + int fieldCount = cache.getOriginalFieldCount(proto.getDescriptorForType()); + + Row row = new Row(fieldCount + extraColumns); + for (FieldDescriptor fieldDescriptor : descriptorFields) { + + if (!cache.containsField(fieldDescriptor.getFullName())) { + continue; + } + + TypeHandler typeHandler = TypeHandlerFactory.getTypeHandler(fieldDescriptor); + row.setField(cache.getOriginalFieldIndex(fieldDescriptor), typeHandler.transformFromProtoUsingCache(proto.getField(fieldDescriptor), cache)); + } + return row; + } + + public static Row createRow(Descriptors.Descriptor descriptor, SimpleGroup simpleGroup, int extraColumns) { List descriptorFields = descriptor.getFields(); Row row = new Row(descriptorFields.size() + extraColumns); @@ -76,4 +104,15 @@ public static Row createRow(Descriptors.Descriptor descriptor, SimpleGroup simpl public static Row createRow(DynamicMessage proto) { return createRow(proto, 0); } + + /** + * Create row from specfied proto and extra columns equals to zero. + * + * @param proto the proto + * @return the row + */ + public static Row createRow(DynamicMessage proto, FieldDescriptorCache cache) { + return createRow(proto, 0, cache); + } + } diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/TypeHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/TypeHandler.java similarity index 80% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/TypeHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/TypeHandler.java index f0947df39..c7f3cdaba 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/TypeHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/TypeHandler.java @@ -1,5 +1,6 @@ -package io.odpf.dagger.common.serde.typehandler; +package com.gotocompany.dagger.common.serde.typehandler; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; import org.apache.flink.api.common.typeinfo.TypeInformation; import com.google.protobuf.DynamicMessage; @@ -41,6 +42,15 @@ public interface TypeHandler { */ Object transformFromProto(Object field); + /** + * Transform from protobuf message. + * + * @param field the field + * @param cache + * @return the object + */ + Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache); + /** * Transform from parquet SimpleGroup. * diff --git a/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/TypeHandlerFactory.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/TypeHandlerFactory.java new file mode 100644 index 000000000..2599f06bc --- /dev/null +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/TypeHandlerFactory.java @@ -0,0 +1,75 @@ +package com.gotocompany.dagger.common.serde.typehandler; + +import com.google.protobuf.Descriptors; +import com.gotocompany.dagger.common.serde.typehandler.repeated.RepeatedEnumHandler; +import com.gotocompany.dagger.common.serde.typehandler.repeated.RepeatedMessageHandler; +import com.gotocompany.dagger.common.serde.typehandler.repeated.RepeatedPrimitiveHandler; +import com.gotocompany.dagger.common.serde.typehandler.repeated.RepeatedStructMessageHandler; +import com.gotocompany.dagger.common.serde.typehandler.complex.EnumHandler; +import com.gotocompany.dagger.common.serde.typehandler.complex.MapHandler; +import com.gotocompany.dagger.common.serde.typehandler.complex.MessageHandler; +import com.gotocompany.dagger.common.serde.typehandler.complex.StructMessageHandler; +import com.gotocompany.dagger.common.serde.typehandler.complex.TimestampHandler; +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * The factory class for Type handler. + */ +public class TypeHandlerFactory { + private static Map> typeHandlerMap = new ConcurrentHashMap<>(); + + /** + * Gets type handler. + * + * @param fieldDescriptor the field descriptor + * @return the type handler + */ + public static TypeHandler getTypeHandler(final Descriptors.FieldDescriptor fieldDescriptor) { + int newHashCode = fieldDescriptor.hashCode(); + + /* this means we have already created and persisted the handler corresponding to + the field descriptor in the map and hence we can directly return it */ + if (typeHandlerMap.containsKey(fieldDescriptor.getFullName()) && typeHandlerMap.get(fieldDescriptor.getFullName()).getKey() == newHashCode) { + Pair pair = typeHandlerMap.get(fieldDescriptor.getFullName()); + return pair.getValue(); + } else { + /* this means that either it is a new field not encountered before and/or same field but with an updated field descriptor object + in either case, we create a new handler and persist it in the map */ + TypeHandler handler = getSpecificHandlers(fieldDescriptor) + .stream() + .filter(TypeHandler::canHandle) + .findFirst() + .orElseGet(() -> new PrimitiveTypeHandler(fieldDescriptor)); + Pair pair = ImmutablePair.of(newHashCode, handler); + typeHandlerMap.put(fieldDescriptor.getFullName(), pair); + return handler; + } + } + + /** + * Clear type handler map. + */ + protected static void clearTypeHandlerMap() { + typeHandlerMap.clear(); + } + + private static List getSpecificHandlers(Descriptors.FieldDescriptor fieldDescriptor) { + return Arrays.asList( + new MapHandler(fieldDescriptor), + new TimestampHandler(fieldDescriptor), + new EnumHandler(fieldDescriptor), + new StructMessageHandler(fieldDescriptor), + new RepeatedStructMessageHandler(fieldDescriptor), + new RepeatedPrimitiveHandler(fieldDescriptor), + new RepeatedMessageHandler(fieldDescriptor), + new RepeatedEnumHandler(fieldDescriptor), + new MessageHandler(fieldDescriptor) + ); + } +} diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/TypeInformationFactory.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/TypeInformationFactory.java similarity index 89% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/TypeInformationFactory.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/TypeInformationFactory.java index 7272887a0..d429ecce8 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/TypeInformationFactory.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/TypeInformationFactory.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.common.serde.typehandler; +package com.gotocompany.dagger.common.serde.typehandler; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.types.Row; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/EnumHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/EnumHandler.java similarity index 86% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/EnumHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/EnumHandler.java index 1ec8ef44e..0b0621cd6 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/EnumHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/EnumHandler.java @@ -1,13 +1,14 @@ -package io.odpf.dagger.common.serde.typehandler.complex; +package com.gotocompany.dagger.common.serde.typehandler.complex; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.exceptions.serde.EnumFieldNotFoundException; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.common.exceptions.serde.EnumFieldNotFoundException; import org.apache.parquet.example.data.simple.SimpleGroup; /** @@ -61,6 +62,11 @@ public Object transformFromProto(Object field) { return String.valueOf(field).trim(); } + @Override + public Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache) { + return String.valueOf(field).trim(); + } + @Override public Object transformFromParquet(SimpleGroup simpleGroup) { String defaultEnumValue = fieldDescriptor.getEnumType().findValueByNumber(0).getName(); diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/MapHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/MapHandler.java similarity index 83% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/MapHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/MapHandler.java index 6b5d05ae6..266129138 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/MapHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/MapHandler.java @@ -1,12 +1,13 @@ -package io.odpf.dagger.common.serde.typehandler.complex; +package com.gotocompany.dagger.common.serde.typehandler.complex; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.common.serde.typehandler.RowFactory; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; -import io.odpf.dagger.common.serde.typehandler.TypeInformationFactory; -import io.odpf.dagger.common.serde.typehandler.repeated.RepeatedMessageHandler; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeInformationFactory; +import com.gotocompany.dagger.common.serde.typehandler.repeated.RepeatedMessageHandler; +import com.gotocompany.dagger.common.serde.typehandler.RowFactory; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.types.Row; @@ -17,9 +18,9 @@ import java.util.Map; import java.util.Map.Entry; -import static io.odpf.dagger.common.serde.parquet.SimpleGroupValidation.checkFieldExistsAndIsInitialized; -import static io.odpf.dagger.common.serde.parquet.SimpleGroupValidation.checkIsLegacySimpleGroupMap; -import static io.odpf.dagger.common.serde.parquet.SimpleGroupValidation.checkIsStandardSimpleGroupMap; +import static com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation.checkFieldExistsAndIsInitialized; +import static com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation.checkIsLegacySimpleGroupMap; +import static com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation.checkIsStandardSimpleGroupMap; /** * The type Map proto handler. @@ -89,6 +90,11 @@ public Object transformFromProto(Object field) { return repeatedMessageHandler.transformFromProto(field); } + @Override + public Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache) { + return repeatedMessageHandler.transformFromProtoUsingCache(field, cache); + } + @Override public Object transformFromParquet(SimpleGroup simpleGroup) { String fieldName = fieldDescriptor.getName(); diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/MessageHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/MessageHandler.java similarity index 85% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/MessageHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/MessageHandler.java index 009ffcae7..c7dcd12f9 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/MessageHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/MessageHandler.java @@ -1,11 +1,12 @@ -package io.odpf.dagger.common.serde.typehandler.complex; +package com.gotocompany.dagger.common.serde.typehandler.complex; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; -import io.odpf.dagger.common.serde.typehandler.RowFactory; -import io.odpf.dagger.common.serde.typehandler.TypeInformationFactory; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.common.serde.typehandler.RowFactory; +import com.gotocompany.dagger.common.serde.typehandler.TypeInformationFactory; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.formats.json.JsonRowSerializationSchema; import org.apache.flink.types.Row; @@ -80,6 +81,11 @@ public Object transformFromProto(Object field) { return RowFactory.createRow((DynamicMessage) field); } + @Override + public Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache) { + return RowFactory.createRow((DynamicMessage) field, cache); + } + @Override public Object transformFromParquet(SimpleGroup simpleGroup) { String fieldName = fieldDescriptor.getName(); diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/StructMessageHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/StructMessageHandler.java similarity index 83% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/StructMessageHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/StructMessageHandler.java index 5f307f75a..bb3833acb 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/StructMessageHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/StructMessageHandler.java @@ -1,6 +1,7 @@ -package io.odpf.dagger.common.serde.typehandler.complex; +package com.gotocompany.dagger.common.serde.typehandler.complex; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; @@ -44,6 +45,11 @@ public Object transformFromProto(Object field) { return null; } + @Override + public Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache) { + return null; + } + @Override public Object transformFromParquet(SimpleGroup simpleGroup) { return null; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/TimestampHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/TimestampHandler.java similarity index 91% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/TimestampHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/TimestampHandler.java index 08e745f22..32f659452 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/complex/TimestampHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/complex/TimestampHandler.java @@ -1,10 +1,11 @@ -package io.odpf.dagger.common.serde.typehandler.complex; +package com.gotocompany.dagger.common.serde.typehandler.complex; import com.google.protobuf.Timestamp; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.RowFactory; -import io.odpf.dagger.common.serde.typehandler.TypeInformationFactory; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.RowFactory; +import com.gotocompany.dagger.common.serde.typehandler.TypeInformationFactory; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.types.Row; @@ -108,6 +109,11 @@ public Object transformFromProto(Object field) { return RowFactory.createRow((DynamicMessage) field); } + @Override + public Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache) { + return RowFactory.createRow((DynamicMessage) field, cache); + } + @Override public Object transformFromParquet(SimpleGroup simpleGroup) { String fieldName = fieldDescriptor.getName(); diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/BooleanHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/BooleanHandler.java similarity index 94% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/BooleanHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/BooleanHandler.java index ed8de1997..46917901f 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/BooleanHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/BooleanHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import com.google.common.primitives.Booleans; import com.google.protobuf.Descriptors; @@ -6,7 +6,7 @@ import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.parquet.example.data.simple.SimpleGroup; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; import java.util.List; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/ByteStringHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/ByteStringHandler.java similarity index 95% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/ByteStringHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/ByteStringHandler.java index dabc2c51e..146ace65f 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/ByteStringHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/ByteStringHandler.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/DoubleHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/DoubleHandler.java similarity index 94% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/DoubleHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/DoubleHandler.java index 37f77d520..e326c677d 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/DoubleHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/DoubleHandler.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import com.google.common.primitives.Doubles; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.FieldDescriptor.JavaType; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.parquet.example.data.simple.SimpleGroup; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/FloatHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/FloatHandler.java similarity index 94% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/FloatHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/FloatHandler.java index b3f69dc8a..6083800d1 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/FloatHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/FloatHandler.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import com.google.common.primitives.Floats; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.FieldDescriptor.JavaType; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.parquet.example.data.simple.SimpleGroup; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/IntegerHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/IntegerHandler.java similarity index 94% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/IntegerHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/IntegerHandler.java index cdaf675a5..c181776dc 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/IntegerHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/IntegerHandler.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import com.google.common.primitives.Ints; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.FieldDescriptor.JavaType; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.parquet.example.data.simple.SimpleGroup; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/LongHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/LongHandler.java similarity index 94% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/LongHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/LongHandler.java index a254edc90..9292f7cf0 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/LongHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/LongHandler.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.FieldDescriptor.JavaType; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.parquet.example.data.simple.SimpleGroup; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/PrimitiveHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/PrimitiveHandler.java similarity index 96% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/PrimitiveHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/PrimitiveHandler.java index fb517d702..27b353e00 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/PrimitiveHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/PrimitiveHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.parquet.example.data.simple.SimpleGroup; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactory.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactory.java similarity index 90% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactory.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactory.java index bccedcd94..bd4d1fb40 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactory.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactory.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; -import io.odpf.dagger.common.exceptions.serde.DataTypeNotSupportedException; +import com.gotocompany.dagger.common.exceptions.serde.DataTypeNotSupportedException; import com.google.protobuf.Descriptors; import java.util.Arrays; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/StringHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/StringHandler.java similarity index 94% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/StringHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/StringHandler.java index 37483281e..71258dbf6 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/primitive/StringHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/primitive/StringHandler.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.FieldDescriptor.JavaType; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedEnumHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedEnumHandler.java similarity index 70% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedEnumHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedEnumHandler.java index fe8d9cb08..a916d9b85 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedEnumHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedEnumHandler.java @@ -1,17 +1,19 @@ -package io.odpf.dagger.common.serde.typehandler.repeated; - -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.common.typeinfo.Types; -import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; +package com.gotocompany.dagger.common.serde.typehandler.repeated; import com.google.gson.Gson; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.exceptions.serde.EnumFieldNotFoundException; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; import org.apache.parquet.example.data.simple.SimpleGroup; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @@ -40,9 +42,28 @@ public boolean canHandle() { @Override public DynamicMessage.Builder transformToProtoBuilder(DynamicMessage.Builder builder, Object field) { + if (!canHandle() || field == null) { + return builder; + } + List rowElements = field.getClass().isArray() ? Arrays.asList((Object[]) field) : (List) field; + + List value = rowElements.stream() + .map(this::getEnumValue) + .collect(Collectors.toList()); + + builder.setField(fieldDescriptor, value); return builder; } + private Descriptors.EnumValueDescriptor getEnumValue(Object field) { + String stringValue = String.valueOf(field).trim(); + Descriptors.EnumValueDescriptor valueByName = fieldDescriptor.getEnumType().findValueByName(stringValue); + if (valueByName == null) { + throw new EnumFieldNotFoundException("field: " + stringValue + " not found in " + fieldDescriptor.getFullName()); + } + return valueByName; + } + @Override public Object transformFromPostProcessor(Object field) { return getValue(field); @@ -53,6 +74,11 @@ public Object transformFromProto(Object field) { return getValue(field); } + @Override + public Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache) { + return getValue(field); + } + @Override public Object transformFromParquet(SimpleGroup simpleGroup) { String defaultEnumValue = fieldDescriptor.getEnumType().findValueByNumber(0).getName(); diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedMessageHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedMessageHandler.java similarity index 86% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedMessageHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedMessageHandler.java index cfed9e93b..6adbb712a 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedMessageHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedMessageHandler.java @@ -1,11 +1,12 @@ -package io.odpf.dagger.common.serde.typehandler.repeated; +package com.gotocompany.dagger.common.serde.typehandler.repeated; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.serde.parquet.SimpleGroupValidation; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; -import io.odpf.dagger.common.serde.typehandler.RowFactory; -import io.odpf.dagger.common.serde.typehandler.TypeInformationFactory; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.parquet.SimpleGroupValidation; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.common.serde.typehandler.RowFactory; +import com.gotocompany.dagger.common.serde.typehandler.TypeInformationFactory; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.formats.json.JsonRowSerializationSchema; @@ -94,6 +95,16 @@ public Object transformFromProto(Object field) { return rows.toArray(); } + @Override + public Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache) { + ArrayList rows = new ArrayList<>(); + if (field != null) { + List protos = (List) field; + protos.forEach(proto -> rows.add(RowFactory.createRow(proto, cache))); + } + return rows.toArray(); + } + @Override public Object transformFromParquet(SimpleGroup simpleGroup) { String fieldName = fieldDescriptor.getName(); diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandler.java similarity index 79% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandler.java index 52b2760ed..0c194bae9 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandler.java @@ -1,11 +1,12 @@ -package io.odpf.dagger.common.serde.typehandler.repeated; +package com.gotocompany.dagger.common.serde.typehandler.repeated; -import io.odpf.dagger.common.serde.typehandler.PrimitiveTypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.typehandler.primitive.PrimitiveHandler; +import com.gotocompany.dagger.common.serde.typehandler.primitive.PrimitiveHandlerFactory; +import com.gotocompany.dagger.common.serde.typehandler.PrimitiveTypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; import org.apache.flink.api.common.typeinfo.TypeInformation; -import io.odpf.dagger.common.serde.typehandler.primitive.PrimitiveHandlerFactory; -import io.odpf.dagger.common.serde.typehandler.primitive.PrimitiveHandler; import com.google.gson.Gson; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.DynamicMessage; @@ -69,6 +70,12 @@ public Object transformFromProto(Object field) { return primitiveHandler.parseRepeatedObjectField(field); } + @Override + public Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache) { + PrimitiveHandler primitiveHandler = PrimitiveHandlerFactory.getTypeHandler(fieldDescriptor); + return primitiveHandler.parseRepeatedObjectField(field); + } + @Override public Object transformFromParquet(SimpleGroup simpleGroup) { PrimitiveHandler primitiveHandler = PrimitiveHandlerFactory.getTypeHandler(fieldDescriptor); diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandler.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandler.java similarity index 83% rename from dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandler.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandler.java index 4521c3054..4c35de1f6 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandler.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandler.java @@ -1,6 +1,7 @@ -package io.odpf.dagger.common.serde.typehandler.repeated; +package com.gotocompany.dagger.common.serde.typehandler.repeated; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; @@ -44,6 +45,11 @@ public Object transformFromProto(Object field) { return null; } + @Override + public Object transformFromProtoUsingCache(Object field, FieldDescriptorCache cache) { + return null; + } + @Override public Object transformFromParquet(SimpleGroup simpleGroup) { return null; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/udfs/AggregateUdf.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/AggregateUdf.java similarity index 80% rename from dagger-common/src/main/java/io/odpf/dagger/common/udfs/AggregateUdf.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/AggregateUdf.java index e7242c8a9..2b9938027 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/udfs/AggregateUdf.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/AggregateUdf.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.common.udfs; +package com.gotocompany.dagger.common.udfs; -import io.odpf.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; import org.apache.flink.table.functions.AggregateFunction; import org.apache.flink.table.functions.FunctionContext; -import static io.odpf.dagger.common.core.Constants.GAUGE_ASPECT_NAME; -import static io.odpf.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; +import static com.gotocompany.dagger.common.core.Constants.GAUGE_ASPECT_NAME; +import static com.gotocompany.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; /** * This class will not publish the UDF telemetry. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/udfs/ScalarUdf.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/ScalarUdf.java similarity index 67% rename from dagger-common/src/main/java/io/odpf/dagger/common/udfs/ScalarUdf.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/ScalarUdf.java index 83685f25e..9256d482c 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/udfs/ScalarUdf.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/ScalarUdf.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.common.udfs; +package com.gotocompany.dagger.common.udfs; -import io.odpf.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; import org.apache.flink.table.functions.FunctionContext; import org.apache.flink.table.functions.ScalarFunction; -import static io.odpf.dagger.common.core.Constants.GAUGE_ASPECT_NAME; -import static io.odpf.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; +import static com.gotocompany.dagger.common.core.Constants.GAUGE_ASPECT_NAME; +import static com.gotocompany.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; /** * Abstract class for Scalar udf. @@ -38,4 +38,9 @@ public String getName() { public GaugeStatsManager getGaugeStatsManager() { return gaugeStatsManager; } + + // For testing purpose only + public void setGaugeStatsManager(GaugeStatsManager gaugeStatsManager) { + this.gaugeStatsManager = gaugeStatsManager; + } } diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/udfs/TableUdf.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/TableUdf.java similarity index 77% rename from dagger-common/src/main/java/io/odpf/dagger/common/udfs/TableUdf.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/TableUdf.java index ae5b3f28e..43e5770b8 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/udfs/TableUdf.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/TableUdf.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.common.udfs; +package com.gotocompany.dagger.common.udfs; -import io.odpf.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; import org.apache.flink.table.functions.FunctionContext; import org.apache.flink.table.functions.TableFunction; -import static io.odpf.dagger.common.core.Constants.GAUGE_ASPECT_NAME; -import static io.odpf.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; +import static com.gotocompany.dagger.common.core.Constants.GAUGE_ASPECT_NAME; +import static com.gotocompany.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; /** * Abstarct class for Table udf. diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/udfs/UdfFactory.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/UdfFactory.java similarity index 94% rename from dagger-common/src/main/java/io/odpf/dagger/common/udfs/UdfFactory.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/UdfFactory.java index 13969ff8f..bed3085bb 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/udfs/UdfFactory.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/UdfFactory.java @@ -1,9 +1,8 @@ -package io.odpf.dagger.common.udfs; +package com.gotocompany.dagger.common.udfs; +import com.gotocompany.dagger.common.configuration.Configuration; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import io.odpf.dagger.common.configuration.Configuration; - import java.util.HashSet; /** diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/LastColumnWatermark.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/LastColumnWatermark.java similarity index 94% rename from dagger-common/src/main/java/io/odpf/dagger/common/watermark/LastColumnWatermark.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/LastColumnWatermark.java index 761143030..8ee9171e4 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/LastColumnWatermark.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/LastColumnWatermark.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.watermark; +package com.gotocompany.dagger.common.watermark; import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner; import org.apache.flink.api.common.eventtime.WatermarkStrategy; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/NoWatermark.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/NoWatermark.java similarity index 86% rename from dagger-common/src/main/java/io/odpf/dagger/common/watermark/NoWatermark.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/NoWatermark.java index ecbcfd285..afa6d606e 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/NoWatermark.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/NoWatermark.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.watermark; +package com.gotocompany.dagger.common.watermark; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.types.Row; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/RowtimeFieldWatermark.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/RowtimeFieldWatermark.java similarity index 95% rename from dagger-common/src/main/java/io/odpf/dagger/common/watermark/RowtimeFieldWatermark.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/RowtimeFieldWatermark.java index 28b8e0844..a4d5b558a 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/RowtimeFieldWatermark.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/RowtimeFieldWatermark.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.watermark; +package com.gotocompany.dagger.common.watermark; import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner; import org.apache.flink.api.common.eventtime.WatermarkStrategy; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/StreamWatermarkAssigner.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/StreamWatermarkAssigner.java similarity index 95% rename from dagger-common/src/main/java/io/odpf/dagger/common/watermark/StreamWatermarkAssigner.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/StreamWatermarkAssigner.java index cfdc4c5ef..feac54918 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/StreamWatermarkAssigner.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/StreamWatermarkAssigner.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.watermark; +package com.gotocompany.dagger.common.watermark; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.types.Row; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/WatermarkStrategyDefinition.java b/dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/WatermarkStrategyDefinition.java similarity index 84% rename from dagger-common/src/main/java/io/odpf/dagger/common/watermark/WatermarkStrategyDefinition.java rename to dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/WatermarkStrategyDefinition.java index d0a4eca99..1e19c8658 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/WatermarkStrategyDefinition.java +++ b/dagger-common/src/main/java/com/gotocompany/dagger/common/watermark/WatermarkStrategyDefinition.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.watermark; +package com.gotocompany.dagger.common.watermark; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.types.Row; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/TypeHandlerFactory.java b/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/TypeHandlerFactory.java deleted file mode 100644 index ef31ff706..000000000 --- a/dagger-common/src/main/java/io/odpf/dagger/common/serde/typehandler/TypeHandlerFactory.java +++ /dev/null @@ -1,57 +0,0 @@ -package io.odpf.dagger.common.serde.typehandler; - -import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.serde.typehandler.complex.EnumHandler; -import io.odpf.dagger.common.serde.typehandler.complex.MapHandler; -import io.odpf.dagger.common.serde.typehandler.complex.MessageHandler; -import io.odpf.dagger.common.serde.typehandler.complex.StructMessageHandler; -import io.odpf.dagger.common.serde.typehandler.complex.TimestampHandler; -import io.odpf.dagger.common.serde.typehandler.repeated.RepeatedEnumHandler; -import io.odpf.dagger.common.serde.typehandler.repeated.RepeatedMessageHandler; -import io.odpf.dagger.common.serde.typehandler.repeated.RepeatedPrimitiveHandler; -import io.odpf.dagger.common.serde.typehandler.repeated.RepeatedStructMessageHandler; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -/** - * The factory class for Type handler. - */ -public class TypeHandlerFactory { - private static Map typeHandlerMap = new ConcurrentHashMap<>(); - - /** - * Gets type handler. - * - * @param fieldDescriptor the field descriptor - * @return the type handler - */ - public static TypeHandler getTypeHandler(final Descriptors.FieldDescriptor fieldDescriptor) { - return typeHandlerMap.computeIfAbsent(fieldDescriptor.getFullName(), - k -> getSpecificHandlers(fieldDescriptor).stream().filter(TypeHandler::canHandle) - .findFirst().orElseGet(() -> new PrimitiveTypeHandler(fieldDescriptor))); - } - - /** - * Clear type handler map. - */ - protected static void clearTypeHandlerMap() { - typeHandlerMap.clear(); - } - - private static List getSpecificHandlers(Descriptors.FieldDescriptor fieldDescriptor) { - return Arrays.asList( - new MapHandler(fieldDescriptor), - new TimestampHandler(fieldDescriptor), - new EnumHandler(fieldDescriptor), - new StructMessageHandler(fieldDescriptor), - new RepeatedStructMessageHandler(fieldDescriptor), - new RepeatedPrimitiveHandler(fieldDescriptor), - new RepeatedMessageHandler(fieldDescriptor), - new RepeatedEnumHandler(fieldDescriptor), - new MessageHandler(fieldDescriptor) - ); - } -} diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/configuration/ConfigurationTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/configuration/ConfigurationTest.java similarity index 86% rename from dagger-common/src/test/java/io/odpf/dagger/common/configuration/ConfigurationTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/configuration/ConfigurationTest.java index f9a6caa5c..2119f25cf 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/configuration/ConfigurationTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/configuration/ConfigurationTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.configuration; +package com.gotocompany.dagger.common.configuration; import org.apache.flink.api.java.utils.ParameterTool; @@ -7,6 +7,7 @@ import org.mockito.Mock; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; @@ -30,6 +31,11 @@ public void shouldGetStringFromParamTool() { assertEquals("test_value", configuration.getString("test_config", "test_default")); } + @Test + public void shouldGetNullIfParamIsNotSet() { + assertNull(configuration.getString("config_not_exist")); + } + @Test public void shouldGetIntegerFromParamTool() { when(parameterTool.getInt("test_config", 1)).thenReturn(2); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/core/DaggerContextTestBase.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/core/DaggerContextTestBase.java similarity index 94% rename from dagger-common/src/test/java/io/odpf/dagger/common/core/DaggerContextTestBase.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/core/DaggerContextTestBase.java index 19608a028..38e26f5d7 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/core/DaggerContextTestBase.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/core/DaggerContextTestBase.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.common.core; +package com.gotocompany.dagger.common.core; -import io.odpf.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.configuration.Configuration; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; diff --git a/dagger-common/src/test/java/com/gotocompany/dagger/common/core/FieldDescriptorCacheTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/core/FieldDescriptorCacheTest.java new file mode 100644 index 000000000..ebfb96dcf --- /dev/null +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/core/FieldDescriptorCacheTest.java @@ -0,0 +1,54 @@ +package com.gotocompany.dagger.common.core; + +import com.gotocompany.dagger.consumer.TestApiLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.*; +import static org.mockito.MockitoAnnotations.initMocks; + +public class FieldDescriptorCacheTest { + + + @Before + public void setup() { + initMocks(this); + } + + @Test + public void shouldReturnTrueIfFieldPresentInMap() { + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + assertTrue(fieldDescriptorCache.containsField("com.gotocompany.dagger.consumer.TestBookingLogMessage.order_number")); + } + + @Test + public void shouldReturnFalseIfFieldNotPresentInMap() { + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + assertFalse(fieldDescriptorCache.containsField("xyz")); + } + + @Test + public void shouldReturnOriginalFieldIndex() { + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + assertEquals(1, fieldDescriptorCache.getOriginalFieldIndex(TestBookingLogMessage.getDescriptor().findFieldByName("order_number"))); + } + + @Test + public void shouldReturnOriginalFieldCount() { + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + assertEquals(49, fieldDescriptorCache.getOriginalFieldCount(TestBookingLogMessage.getDescriptor())); + } + + @Test + public void shouldThrowExceptionIfFieldNotPresentInCacheForFieldCount() { + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + assertThrows("The Proto Descriptor com.gotocompany.dagger.consumer.TestApiLogMessage was not found in the cache", IllegalArgumentException.class, () -> fieldDescriptorCache.getOriginalFieldCount(TestApiLogMessage.getDescriptor())); + } + + @Test + public void shouldThrowExceptionIfFieldNotPresentInCacheForFieldIndex() { + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + assertThrows("The Field Descriptor com.gotocompany.dagger.consumer.TestApiLogMessage.event_timestamp was not found in the cache", IllegalArgumentException.class, () -> fieldDescriptorCache.getOriginalFieldIndex(TestApiLogMessage.getDescriptor().findFieldByName("event_timestamp"))); + } +} diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/core/StencilClientOrchestratorTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/core/StencilClientOrchestratorTest.java similarity index 52% rename from dagger-common/src/test/java/io/odpf/dagger/common/core/StencilClientOrchestratorTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/core/StencilClientOrchestratorTest.java index 1cf6ed783..8c04e4d9f 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/core/StencilClientOrchestratorTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/core/StencilClientOrchestratorTest.java @@ -1,10 +1,11 @@ -package io.odpf.dagger.common.core; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.stencil.client.ClassLoadStencilClient; -import io.odpf.stencil.client.MultiURLStencilClient; -import io.odpf.stencil.client.StencilClient; -import io.odpf.stencil.config.StencilConfig; +package com.gotocompany.dagger.common.core; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.stencil.cache.SchemaRefreshStrategy; +import com.gotocompany.stencil.client.ClassLoadStencilClient; +import com.gotocompany.stencil.client.MultiURLStencilClient; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.stencil.config.StencilConfig; import org.apache.flink.api.java.utils.ParameterTool; import org.junit.Before; import org.junit.Test; @@ -13,7 +14,7 @@ import java.lang.reflect.Field; import java.util.*; -import static io.odpf.dagger.common.core.Constants.*; +import static com.gotocompany.dagger.common.core.Constants.*; import static org.junit.Assert.*; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; @@ -38,6 +39,9 @@ public void shouldReturnClassLoadStencilClientIfStencilDisabled() throws NoSuchF when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_ENABLE_KEY, SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT); when(configuration.getString(SCHEMA_REGISTRY_STENCIL_URLS_KEY, SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT); when(configuration.getInteger(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS, SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT); + when(configuration.getLong(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT); + when(configuration.getString(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT); StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(configuration); stencilClient = stencilClientOrchestrator.getStencilClient(); @@ -54,6 +58,9 @@ public void shouldReturnMultiURLStencilClient() throws NoSuchFieldException, Ill + "http://localhost/events/latest," + "http://localhost/entities/release"); when(configuration.getInteger(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS, SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT); + when(configuration.getLong(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT); + when(configuration.getString(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT); StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(configuration); stencilClient = stencilClientOrchestrator.getStencilClient(); @@ -68,6 +75,9 @@ public void shouldEnrichStencilClient() throws NoSuchFieldException, IllegalAcce when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_ENABLE_KEY, SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT)).thenReturn(true); when(configuration.getInteger(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS, SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT); when(configuration.getString(SCHEMA_REGISTRY_STENCIL_URLS_KEY, SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT)).thenReturn("http://localhost/latest,"); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT); + when(configuration.getLong(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT); + when(configuration.getString(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT); StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(configuration); StencilClient oldStencilClient = stencilClientOrchestrator.getStencilClient(); @@ -94,6 +104,9 @@ public void shouldNotEnrichIfNoNewAdditionalURLsAdded() throws NoSuchFieldExcept when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_ENABLE_KEY, SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT)).thenReturn(true); when(configuration.getString(SCHEMA_REGISTRY_STENCIL_URLS_KEY, SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT)).thenReturn("http://localhost/latest,"); when(configuration.getInteger(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS, SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT); + when(configuration.getLong(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT); + when(configuration.getString(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT); StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(configuration); StencilClient oldStencilClient = stencilClientOrchestrator.getStencilClient(); @@ -119,6 +132,9 @@ public void shouldReturnClassLoadStencilClientWhenStencilDisabledAndEnrichmentSt when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_ENABLE_KEY, SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT); when(configuration.getString(SCHEMA_REGISTRY_STENCIL_URLS_KEY, SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT); when(configuration.getInteger(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS, SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT); + when(configuration.getLong(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT); + when(configuration.getString(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT); StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(configuration); List enrichmentStencilURLs = Collections @@ -138,7 +154,7 @@ public void shouldReturnDefaultTimeoutIfTimeoutMsConfigNotSet() { Configuration config = getConfig(configMap); StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); - assertEquals(SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS_DEFAULT, stencilConfig.getFetchTimeoutMs()); + assertEquals(Integer.valueOf(10000), stencilConfig.getFetchTimeoutMs()); } @Test @@ -152,6 +168,133 @@ public void shouldReturnConfiguredTimeoutIfTimeoutMsConfigIsSet() { assertEquals(Integer.valueOf(8000), stencilConfig.getFetchTimeoutMs()); } + @Test + public void shouldReturnTrueIfCacheAutoRefreshIsSetToTrue() { + Map configMap = new HashMap() {{ + put(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, "true"); + }}; + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertTrue(stencilConfig.getCacheAutoRefresh()); + } + + @Test + public void shouldReturnFalseIfCacheAutoRefreshIsSetToFalse() { + Map configMap = new HashMap() {{ + put(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, "false"); + }}; + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertFalse(stencilConfig.getCacheAutoRefresh()); + } + + @Test + public void shouldReturnFalseIfCacheAutoRefreshIsNotSet() { + Map configMap = new HashMap<>(); + + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertFalse(stencilConfig.getCacheAutoRefresh()); + } + + @Test + public void shouldReturnConfiguredValueIfCacheTtlMsConfigIsSet() { + Map configMap = new HashMap() {{ + put(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY, "7800"); + }}; + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertEquals(Long.valueOf(7800), stencilConfig.getCacheTtlMs()); + } + + @Test + public void shouldReturnDefaultValueIfCacheTtlMsConfigIsNotSet() { + Map configMap = new HashMap<>(); + + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertEquals(Long.valueOf(900000), stencilConfig.getCacheTtlMs()); + } + + @Test + public void shouldReturnVersionBasedIfRefreshStrategyConfigIsSet() { + Map configMap = new HashMap() {{ + put(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, "VERSION_BASED_REFRESH"); + }}; + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertEquals(SchemaRefreshStrategy.versionBasedRefresh().getClass(), stencilConfig.getRefreshStrategy().getClass()); + } + + @Test + public void shouldReturnLongPollingIfRefreshStrategyConfigIsNotSet() { + Map configMap = new HashMap<>(); + + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertEquals(SchemaRefreshStrategy.longPollingStrategy().getClass(), stencilConfig.getRefreshStrategy().getClass()); + } + + @Test + public void shouldReturnLongPollingIfRefreshStrategyConfigIsInvalid() { + Map configMap = new HashMap() {{ + put(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, "xyz"); + }}; + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertEquals(SchemaRefreshStrategy.longPollingStrategy().getClass(), stencilConfig.getRefreshStrategy().getClass()); + } + + @Test + public void shouldReturnConfiguredValueIfFetchBackoffMinMsConfigIsSet() { + Map configMap = new HashMap() {{ + put(SCHEMA_REGISTRY_STENCIL_FETCH_BACKOFF_MIN_MS_KEY, "7800"); + }}; + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertEquals(Long.valueOf(7800), stencilConfig.getFetchBackoffMinMs()); + } + + @Test + public void shouldReturnDefaultValueIfFetchBackoffMinMsConfigIsNotSet() { + Map configMap = new HashMap<>(); + + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertEquals(Long.valueOf(60000), stencilConfig.getFetchBackoffMinMs()); + } + + @Test + public void shouldReturnConfiguredValueIfFetchRetriesConfigIsSet() { + Map configMap = new HashMap() {{ + put(SCHEMA_REGISTRY_STENCIL_FETCH_RETRIES_KEY, "9"); + }}; + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertEquals(Integer.valueOf(9), stencilConfig.getFetchRetries()); + } + + @Test + public void shouldReturnDefaultValueIfFetchRetriesConfigIsNotSet() { + Map configMap = new HashMap<>(); + + Configuration config = getConfig(configMap); + StencilClientOrchestrator stencilClientOrchestrator = new StencilClientOrchestrator(config); + StencilConfig stencilConfig = stencilClientOrchestrator.createStencilConfig(); + assertEquals(Integer.valueOf(4), stencilConfig.getFetchRetries()); + } + @Test public void shouldReturnEmptyHeadersIfHeadersConfigIsNotSet() { Map configMap = new HashMap<>(); @@ -196,4 +339,5 @@ public void shouldReturnParsedHeaderIfHeaderStringValid() { assertEquals("key2: val2", stencilConfig.getFetchHeaders().get(1).toString()); assertEquals("key3: val3", stencilConfig.getFetchHeaders().get(2).toString()); } + } diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/CounterStatsManagerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/CounterStatsManagerTest.java similarity index 95% rename from dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/CounterStatsManagerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/CounterStatsManagerTest.java index 4a96300d9..2a7516f8e 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/CounterStatsManagerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/CounterStatsManagerTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.common.metrics.managers; +package com.gotocompany.dagger.common.metrics.managers; -import io.odpf.dagger.common.metrics.managers.utils.TestAspects; +import com.gotocompany.dagger.common.metrics.managers.utils.TestAspects; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.metrics.SimpleCounter; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/GaugeStatsManagerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/GaugeStatsManagerTest.java similarity index 93% rename from dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/GaugeStatsManagerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/GaugeStatsManagerTest.java index e88013116..697526a16 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/GaugeStatsManagerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/GaugeStatsManagerTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.common.metrics.managers; +package com.gotocompany.dagger.common.metrics.managers; -import io.odpf.dagger.common.metrics.managers.utils.TestAspects; +import com.gotocompany.dagger.common.metrics.managers.utils.TestAspects; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/MeterStatsManagerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/MeterStatsManagerTest.java similarity index 95% rename from dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/MeterStatsManagerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/MeterStatsManagerTest.java index 5a5c4134e..cd6fa410d 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/MeterStatsManagerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/MeterStatsManagerTest.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.common.metrics.managers; +package com.gotocompany.dagger.common.metrics.managers; +import com.gotocompany.dagger.common.metrics.managers.utils.TestAspects; import org.apache.flink.dropwizard.metrics.DropwizardMeterWrapper; import org.apache.flink.metrics.Histogram; import org.apache.flink.metrics.Meter; import org.apache.flink.metrics.MetricGroup; -import io.odpf.dagger.common.metrics.managers.utils.TestAspects; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/utils/TestAspects.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/utils/TestAspects.java similarity index 75% rename from dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/utils/TestAspects.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/utils/TestAspects.java index bc2e02d2e..a9fee7d69 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/metrics/managers/utils/TestAspects.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/metrics/managers/utils/TestAspects.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.common.metrics.managers.utils; +package com.gotocompany.dagger.common.metrics.managers.utils; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; public enum TestAspects implements Aspects { TEST_ASPECT_ONE("test_aspect1", AspectType.Histogram), diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/json/deserialization/JsonDeserializerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonDeserializerTest.java similarity index 98% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/json/deserialization/JsonDeserializerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonDeserializerTest.java index c20d422d7..ad0538863 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/json/deserialization/JsonDeserializerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonDeserializerTest.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.common.serde.json.deserialization; +package com.gotocompany.dagger.common.serde.json.deserialization; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.exceptions.serde.DaggerDeserializationException; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.exceptions.serde.DaggerDeserializationException; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.junit.Before; import org.junit.Test; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/json/deserialization/JsonTypeTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonTypeTest.java similarity index 98% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/json/deserialization/JsonTypeTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonTypeTest.java index 583238249..4f4764a50 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/json/deserialization/JsonTypeTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/json/deserialization/JsonTypeTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.serde.json.deserialization; +package com.gotocompany.dagger.common.serde.json.deserialization; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.typeutils.RowTypeInfo; @@ -8,7 +8,7 @@ import org.junit.Test; import org.junit.rules.ExpectedException; -import static io.odpf.dagger.common.core.Constants.ROWTIME; +import static com.gotocompany.dagger.common.core.Constants.ROWTIME; import static org.apache.flink.api.common.typeinfo.BasicArrayTypeInfo.STRING_ARRAY_TYPE_INFO; import static org.apache.flink.api.common.typeinfo.Types.*; import static org.junit.Assert.assertArrayEquals; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/parquet/SimpleGroupValidationTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/parquet/SimpleGroupValidationTest.java similarity index 99% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/parquet/SimpleGroupValidationTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/parquet/SimpleGroupValidationTest.java index 673e70d24..786313954 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/parquet/SimpleGroupValidationTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/parquet/SimpleGroupValidationTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.serde.parquet; +package com.gotocompany.dagger.common.serde.parquet; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializerTest.java similarity index 88% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializerTest.java index 9af785c68..b14700736 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/parquet/deserialization/SimpleGroupDeserializerTest.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.common.serde.parquet.deserialization; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.exceptions.serde.DaggerDeserializationException; -import io.odpf.dagger.consumer.TestBookingLogKey; -import io.odpf.dagger.consumer.TestPrimitiveMessage; +package com.gotocompany.dagger.common.serde.parquet.deserialization; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.exceptions.serde.DaggerDeserializationException; +import com.gotocompany.dagger.consumer.TestBookingLogKey; +import com.gotocompany.dagger.consumer.TestPrimitiveMessage; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.types.Row; @@ -21,8 +21,8 @@ import java.sql.Timestamp; import java.time.Instant; -import static io.odpf.dagger.common.core.Constants.*; -import static io.odpf.dagger.common.core.Constants.SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT; +import static com.gotocompany.dagger.common.core.Constants.*; +import static com.gotocompany.dagger.common.core.Constants.SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT; import static org.apache.flink.api.common.typeinfo.Types.*; import static org.apache.flink.api.common.typeinfo.Types.SQL_TIMESTAMP; import static org.junit.Assert.*; @@ -41,6 +41,9 @@ public void setUp() { initMocks(this); when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_ENABLE_KEY, SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT); when(configuration.getString(SCHEMA_REGISTRY_STENCIL_URLS_KEY, SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT); + when(configuration.getLong(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT); + when(configuration.getString(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT); stencilClientOrchestrator = new StencilClientOrchestrator(configuration); } diff --git a/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoDeserializerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoDeserializerTest.java new file mode 100644 index 000000000..9a949edba --- /dev/null +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoDeserializerTest.java @@ -0,0 +1,491 @@ +package com.gotocompany.dagger.common.serde.proto.deserialization; + +import com.google.protobuf.Struct; +import com.google.protobuf.Timestamp; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.exceptions.serde.DaggerDeserializationException; +import com.gotocompany.dagger.common.serde.typehandler.RowFactory; +import com.gotocompany.dagger.consumer.*; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.java.typeutils.RowTypeInfo; +import org.apache.flink.types.Row; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static com.gotocompany.dagger.common.core.Constants.*; +import static org.apache.flink.api.common.typeinfo.Types.*; +import static org.junit.Assert.*; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +public class ProtoDeserializerTest { + + private StencilClientOrchestrator stencilClientOrchestrator; + + + @Mock + private Configuration configuration; + + @Mock + private RowFactory rowFactory; + + @Before + public void setUp() { + initMocks(this); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_ENABLE_KEY, SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT); + when(configuration.getString(SCHEMA_REGISTRY_STENCIL_URLS_KEY, SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + } + + @Test + public void shouldAlwaysReturnFalseForEndOfStream() { + assertFalse(new ProtoDeserializer(TestBookingLogKey.class.getTypeName(), 4, "rowtime", stencilClientOrchestrator).isEndOfStream(null)); + } + + @Test + public void shouldReturnProducedType() { + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogKey.class.getTypeName(), 3, "rowtime", stencilClientOrchestrator); + TypeInformation producedType = protoDeserializer.getProducedType(); + assertArrayEquals( + new String[]{"service_type", "order_number", "order_url", "status", "event_timestamp", INTERNAL_VALIDATION_FIELD_KEY, "rowtime"}, + ((RowTypeInfo) producedType).getFieldNames()); + assertArrayEquals( + new TypeInformation[]{STRING, STRING, STRING, STRING, ROW_NAMED(new String[]{"seconds", "nanos"}, LONG, INT), BOOLEAN, SQL_TIMESTAMP}, + ((RowTypeInfo) producedType).getFieldTypes()); + } + + @Test + public void shouldDeserializeProtoAsRowWithSimpleFields() { + String expectedOrderNumber = "111"; + final int expectedIterationNumber = 10; + byte[] protoBytes = TestBookingLogMessage.newBuilder().setOrderNumber(expectedOrderNumber).setCancelReasonId(expectedIterationNumber).build().toByteArray(); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + assertEquals(expectedOrderNumber, row.getField(bookingLogFieldIndex("order_number"))); + assertEquals(expectedIterationNumber, row.getField(bookingLogFieldIndex("cancel_reason_id"))); + } + + @Test + public void shouldAddExtraFieldsToRow() { + String expectedOrderNumber = "111"; + byte[] protoBytes = TestBookingLogMessage + .newBuilder() + .setOrderNumber(expectedOrderNumber) + .setEventTimestamp(Timestamp.newBuilder().setSeconds(1595548800L).setNanos(0).build()) + .build() + .toByteArray(); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + int size = row.getArity(); + assertEquals(51, size); + assertTrue("Didn't add field at the penultimate index", (Boolean) row.getField(size - 2)); + assertEquals(1595548800000L, ((java.sql.Timestamp) row.getField(size - 1)).getTime()); + } + + @Test + public void shouldDeserializeEnumAsString() { + + byte[] protoBytes = TestBookingLogMessage.newBuilder().setServiceType(TestServiceType.Enum.GO_RIDE).setStatus(TestBookingStatus.Enum.COMPLETED).build().toByteArray(); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + assertEquals(TestServiceType.Enum.GO_RIDE.toString(), row.getField(bookingLogFieldIndex("service_type"))); + assertEquals(TestBookingStatus.Enum.COMPLETED.toString(), row.getField(bookingLogFieldIndex("status"))); + } + + @Test + public void shouldDeserializeNestedMessagesAsSubRows() { + final int expectedSeconds = 10; + final int expectedNanoSeconds = 10; + final int expectedAccuracy = 111; + final int expectedLatitude = 222; + final int accuracyFieldIndex = 7; + final int latitudeFieldIndex = 2; + Timestamp expectedTimestamp = Timestamp.newBuilder().setSeconds(expectedSeconds).setNanos(expectedNanoSeconds).build(); + TestLocation testLocation = TestLocation.newBuilder().setAccuracyMeter(expectedAccuracy).setLatitude(expectedLatitude).build(); + byte[] protoBytes = TestBookingLogMessage.newBuilder() + .setEventTimestamp(expectedTimestamp) + .setDriverPickupLocation(testLocation).build().toByteArray(); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + Row eventTimestampRow = (Row) row.getField(bookingLogFieldIndex("event_timestamp")); + assertEquals(expectedTimestamp.getSeconds(), eventTimestampRow.getField(0)); + assertEquals(expectedTimestamp.getNanos(), eventTimestampRow.getField(1)); + + Row locationRow = (Row) row.getField(bookingLogFieldIndex("driver_pickup_location")); + assertEquals(testLocation.getAccuracyMeter(), locationRow.getField(accuracyFieldIndex)); + assertEquals(testLocation.getLatitude(), locationRow.getField(latitudeFieldIndex)); + } + + @Test + public void shouldDeserializeArrayOfObjectAsSubRows() throws IOException { + byte[] protoBytes = TestBookingLogMessage.newBuilder() + .setOrderNumber("EXAMPLE_ORDER_1") + .addRoutes(TestRoute.newBuilder().setDistanceInKms(1.0f).setRouteOrder(4).build()) + .addRoutes(TestRoute.newBuilder().setDistanceInKms(2.0f).setRouteOrder(5).build()) + .addRoutes(TestRoute.newBuilder().setDistanceInKms(3.0f).setRouteOrder(6).build()) + .build().toByteArray(); + + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + Object[] routes = (Object[]) row.getField(bookingLogFieldIndex("routes")); + Row firstRouteRow = (Row) routes[0]; + assertEquals(firstRouteRow.getField(routeFieldIndex("distance_in_kms")), 1.0f); + assertEquals(firstRouteRow.getField(routeFieldIndex("route_order")), 4); + + Row secondRouteRow = (Row) routes[1]; + assertEquals(secondRouteRow.getField(routeFieldIndex("distance_in_kms")), 2.0f); + assertEquals(secondRouteRow.getField(routeFieldIndex("route_order")), 5); + + Row thirdRouteRow = (Row) routes[2]; + assertEquals(thirdRouteRow.getField(routeFieldIndex("distance_in_kms")), 3.0f); + assertEquals(thirdRouteRow.getField(routeFieldIndex("route_order")), 6); + } + + @Test + public void shouldDeserializeArrayOfString() throws IOException { + byte[] protoBytes = TestBookingLogMessage.newBuilder() + .setOrderNumber("EXAMPLE-ID-01") + .addMetaArray("EXAMPLE-REGISTERED-DEVICE-01") + .addMetaArray("EXAMPLE-REGISTERED-DEVICE-02") + .addMetaArray("EXAMPLE-REGISTERED-DEVICE-03") + .addMetaArray("EXAMPLE-REGISTERED-DEVICE-04") + .build().toByteArray(); + + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + String[] strings = (String[]) row.getField(bookingLogFieldIndex("meta_array")); + + assertEquals("EXAMPLE-REGISTERED-DEVICE-01", strings[0]); + assertEquals("EXAMPLE-REGISTERED-DEVICE-02", strings[1]); + assertEquals("EXAMPLE-REGISTERED-DEVICE-03", strings[2]); + assertEquals("EXAMPLE-REGISTERED-DEVICE-04", strings[3]); + + } + + @Test + public void shouldDeserializeProtobufMapAsSubRows() throws IOException { + String orderNumber = "1"; + Map currentState = new HashMap(); + currentState.put("force_close", "true"); + currentState.put("image", "example.png"); + + byte[] protoBytes = TestBookingLogMessage.newBuilder() + .putAllMetadata(currentState) + .setOrderNumber(orderNumber).build().toByteArray(); + + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + Object[] currentStateRowList = (Object[]) row.getField(bookingLogFieldIndex("metadata")); + + assertTrue(currentState.keySet().contains(((Row) currentStateRowList[0]).getField(0))); + assertTrue(currentState.values().contains(((Row) currentStateRowList[0]).getField(1))); + assertTrue(currentState.keySet().contains(((Row) currentStateRowList[1]).getField(0))); + assertTrue(currentState.values().contains(((Row) currentStateRowList[1]).getField(1))); + + assertEquals(orderNumber, row.getField(bookingLogFieldIndex("order_number"))); + } + + @Test + public void shouldDeserializeProtobufMapOfNullValueAsSubRows() throws IOException { + String orderNumber = "1"; + Map metaData = new HashMap(); + metaData.put("force_close", "true"); + metaData.put("image", ""); + + byte[] protoBytes = TestBookingLogMessage.newBuilder() + .putAllMetadata(metaData) + .setOrderNumber(orderNumber).build().toByteArray(); + + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + Object[] currentStateRowList = (Object[]) row.getField(bookingLogFieldIndex("metadata")); + + assertTrue(metaData.keySet().contains(((Row) currentStateRowList[0]).getField(0))); + assertTrue(metaData.values().contains(((Row) currentStateRowList[0]).getField(1))); + assertTrue(metaData.keySet().contains(((Row) currentStateRowList[1]).getField(0))); + assertTrue(metaData.values().contains(((Row) currentStateRowList[1]).getField(1))); + + assertEquals(orderNumber, row.getField(bookingLogFieldIndex("order_number"))); + } + + @Test + public void shouldIgnoreStructWhileDeserialising() { + byte[] protoBytes = TestNestedRepeatedMessage.newBuilder() + .addMetadata(Struct.getDefaultInstance()) + .addMetadata(Struct.getDefaultInstance()) + .setNumberField(5) + .build().toByteArray(); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestNestedRepeatedMessage.class.getTypeName(), 6, "rowtime", stencilClientOrchestrator); + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + assertNull(row.getField(4)); + assertEquals(row.getField(2), 5); + } + + @Test + public void shouldThrowExceptionIfNotAbleToDeserialise() { + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestNestedRepeatedMessage.class.getTypeName(), 6, "rowtime", stencilClientOrchestrator); + assertThrows(DaggerDeserializationException.class, + () -> protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, null))); + } + + @Test + public void shouldReturnInvalidRow() { + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, "test".getBytes())); + assertFalse((boolean) row.getField(row.getArity() - 2)); + assertEquals(new java.sql.Timestamp(0), row.getField(row.getArity() - 1)); + } + + @Test + public void shouldDeserializeProtoAsRowWithSimpleFieldsWhenStencilAutoRefreshEnabled() { + String expectedOrderNumber = "111"; + final int expectedIterationNumber = 10; + byte[] protoBytes = TestBookingLogMessage.newBuilder().setOrderNumber(expectedOrderNumber).setCancelReasonId(expectedIterationNumber).build().toByteArray(); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(true); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + assertEquals(expectedOrderNumber, row.getField(bookingLogFieldIndex("order_number"))); + assertEquals(expectedIterationNumber, row.getField(bookingLogFieldIndex("cancel_reason_id"))); + } + + @Test + public void shouldAddExtraFieldsToRowWhenStencilAutoRefreshEnabled() { + String expectedOrderNumber = "111"; + byte[] protoBytes = TestBookingLogMessage + .newBuilder() + .setOrderNumber(expectedOrderNumber) + .setEventTimestamp(Timestamp.newBuilder().setSeconds(1595548800L).setNanos(0).build()) + .build() + .toByteArray(); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(true); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + int size = row.getArity(); + assertEquals(51, size); + assertTrue("Didn't add field at the penultimate index", (Boolean) row.getField(size - 2)); + assertEquals(1595548800000L, ((java.sql.Timestamp) row.getField(size - 1)).getTime()); + } + + @Test + public void shouldDeserializeEnumAsStringWhenStencilAutoRefreshEnabled() { + + byte[] protoBytes = TestBookingLogMessage.newBuilder().setServiceType(TestServiceType.Enum.GO_RIDE).setStatus(TestBookingStatus.Enum.COMPLETED).build().toByteArray(); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(true); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + assertEquals(TestServiceType.Enum.GO_RIDE.toString(), row.getField(bookingLogFieldIndex("service_type"))); + assertEquals(TestBookingStatus.Enum.COMPLETED.toString(), row.getField(bookingLogFieldIndex("status"))); + } + + @Test + public void shouldDeserializeNestedMessagesAsSubRowsWhenStencilAutoRefreshEnabled() { + final int expectedSeconds = 10; + final int expectedNanoSeconds = 10; + final int expectedAccuracy = 111; + final int expectedLatitude = 222; + final int accuracyFieldIndex = 7; + final int latitudeFieldIndex = 2; + Timestamp expectedTimestamp = Timestamp.newBuilder().setSeconds(expectedSeconds).setNanos(expectedNanoSeconds).build(); + TestLocation testLocation = TestLocation.newBuilder().setAccuracyMeter(expectedAccuracy).setLatitude(expectedLatitude).build(); + byte[] protoBytes = TestBookingLogMessage.newBuilder() + .setEventTimestamp(expectedTimestamp) + .setDriverPickupLocation(testLocation).build().toByteArray(); + + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(true); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + Row eventTimestampRow = (Row) row.getField(bookingLogFieldIndex("event_timestamp")); + assertEquals(expectedTimestamp.getSeconds(), eventTimestampRow.getField(0)); + assertEquals(expectedTimestamp.getNanos(), eventTimestampRow.getField(1)); + + Row locationRow = (Row) row.getField(bookingLogFieldIndex("driver_pickup_location")); + assertEquals(testLocation.getAccuracyMeter(), locationRow.getField(accuracyFieldIndex)); + assertEquals(testLocation.getLatitude(), locationRow.getField(latitudeFieldIndex)); + } + + @Test + public void shouldDeserializeArrayOfObjectAsSubRowsWhenStencilAutoRefreshEnabled() throws IOException { + byte[] protoBytes = TestBookingLogMessage.newBuilder() + .setOrderNumber("EXAMPLE_ORDER_1") + .addRoutes(TestRoute.newBuilder().setDistanceInKms(1.0f).setRouteOrder(4).build()) + .addRoutes(TestRoute.newBuilder().setDistanceInKms(2.0f).setRouteOrder(5).build()) + .addRoutes(TestRoute.newBuilder().setDistanceInKms(3.0f).setRouteOrder(6).build()) + .build().toByteArray(); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(true); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + Object[] routes = (Object[]) row.getField(bookingLogFieldIndex("routes")); + Row firstRouteRow = (Row) routes[0]; + assertEquals(firstRouteRow.getField(routeFieldIndex("distance_in_kms")), 1.0f); + assertEquals(firstRouteRow.getField(routeFieldIndex("route_order")), 4); + + Row secondRouteRow = (Row) routes[1]; + assertEquals(secondRouteRow.getField(routeFieldIndex("distance_in_kms")), 2.0f); + assertEquals(secondRouteRow.getField(routeFieldIndex("route_order")), 5); + + Row thirdRouteRow = (Row) routes[2]; + assertEquals(thirdRouteRow.getField(routeFieldIndex("distance_in_kms")), 3.0f); + assertEquals(thirdRouteRow.getField(routeFieldIndex("route_order")), 6); + } + + @Test + public void shouldDeserializeArrayOfStringWhenStencilAutoRefreshEnabled() throws IOException { + byte[] protoBytes = TestBookingLogMessage.newBuilder() + .setOrderNumber("EXAMPLE-ID-01") + .addMetaArray("EXAMPLE-REGISTERED-DEVICE-01") + .addMetaArray("EXAMPLE-REGISTERED-DEVICE-02") + .addMetaArray("EXAMPLE-REGISTERED-DEVICE-03") + .addMetaArray("EXAMPLE-REGISTERED-DEVICE-04") + .build().toByteArray(); + + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(true); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + String[] strings = (String[]) row.getField(bookingLogFieldIndex("meta_array")); + + assertEquals("EXAMPLE-REGISTERED-DEVICE-01", strings[0]); + assertEquals("EXAMPLE-REGISTERED-DEVICE-02", strings[1]); + assertEquals("EXAMPLE-REGISTERED-DEVICE-03", strings[2]); + assertEquals("EXAMPLE-REGISTERED-DEVICE-04", strings[3]); + + } + + @Test + public void shouldDeserializeProtobufMapAsSubRowsWhenStencilAutoRefreshEnabled() throws IOException { + String orderNumber = "1"; + Map currentState = new HashMap(); + currentState.put("force_close", "true"); + currentState.put("image", "example.png"); + + byte[] protoBytes = TestBookingLogMessage.newBuilder() + .putAllMetadata(currentState) + .setOrderNumber(orderNumber).build().toByteArray(); + + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(true); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + Object[] currentStateRowList = (Object[]) row.getField(bookingLogFieldIndex("metadata")); + + assertTrue(currentState.keySet().contains(((Row) currentStateRowList[0]).getField(0))); + assertTrue(currentState.values().contains(((Row) currentStateRowList[0]).getField(1))); + assertTrue(currentState.keySet().contains(((Row) currentStateRowList[1]).getField(0))); + assertTrue(currentState.values().contains(((Row) currentStateRowList[1]).getField(1))); + + assertEquals(orderNumber, row.getField(bookingLogFieldIndex("order_number"))); + } + + @Test + public void shouldDeserializeProtobufMapOfNullValueAsSubRowsWhenStencilAutoRefreshEnabled() throws IOException { + String orderNumber = "1"; + Map metaData = new HashMap<>(); + metaData.put("force_close", "true"); + metaData.put("image", ""); + + byte[] protoBytes = TestBookingLogMessage.newBuilder() + .putAllMetadata(metaData) + .setOrderNumber(orderNumber).build().toByteArray(); + + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(true); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + + Object[] currentStateRowList = (Object[]) row.getField(bookingLogFieldIndex("metadata")); + + assertTrue(metaData.keySet().contains(((Row) currentStateRowList[0]).getField(0))); + assertTrue(metaData.values().contains(((Row) currentStateRowList[0]).getField(1))); + assertTrue(metaData.keySet().contains(((Row) currentStateRowList[1]).getField(0))); + assertTrue(metaData.values().contains(((Row) currentStateRowList[1]).getField(1))); + + assertEquals(orderNumber, row.getField(bookingLogFieldIndex("order_number"))); + } + + @Test + public void shouldIgnoreStructWhileDeserialisingWhenStencilAutoRefreshEnabled() { + byte[] protoBytes = TestNestedRepeatedMessage.newBuilder() + .addMetadata(Struct.getDefaultInstance()) + .addMetadata(Struct.getDefaultInstance()) + .setNumberField(5) + .build().toByteArray(); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(true); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestNestedRepeatedMessage.class.getTypeName(), 6, "rowtime", stencilClientOrchestrator); + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); + assertNull(row.getField(4)); + assertEquals(row.getField(2), 5); + } + + @Test + public void shouldReturnInvalidRowWhenStencilAutoRefreshEnabled() { + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(true); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + + ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); + Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, "test".getBytes())); + assertFalse((boolean) row.getField(row.getArity() - 2)); + assertEquals(new java.sql.Timestamp(0), row.getField(row.getArity() - 1)); + } + + + @Test + public void shouldThrowDescriptorNotFoundExceptionForStringClass() { + assertThrows(DescriptorNotFoundException.class, + () -> new ProtoDeserializer(String.class.getTypeName(), 6, "rowtime", stencilClientOrchestrator)); + } + + private int bookingLogFieldIndex(String propertyName) { + return TestBookingLogMessage.getDescriptor().findFieldByName(propertyName).getIndex(); + } + + private int routeFieldIndex(String propertyName) { + return TestRoute.getDescriptor().findFieldByName(propertyName).getIndex(); + } + + +} diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoTypeTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoTypeTest.java similarity index 81% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoTypeTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoTypeTest.java index 9f6f7273f..81aac42fe 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoTypeTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/deserialization/ProtoTypeTest.java @@ -1,24 +1,21 @@ -package io.odpf.dagger.common.serde.proto.deserialization; +package com.gotocompany.dagger.common.serde.proto.deserialization; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestNestedRepeatedMessage; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.types.Row; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestNestedRepeatedMessage; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; -import static io.odpf.dagger.common.core.Constants.*; +import static com.gotocompany.dagger.common.core.Constants.*; import static org.apache.flink.api.common.typeinfo.Types.*; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; +import static org.junit.Assert.*; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; @@ -34,14 +31,17 @@ public void setup() { initMocks(this); when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_ENABLE_KEY, SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT); when(configuration.getString(SCHEMA_REGISTRY_STENCIL_URLS_KEY, SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT); + when(configuration.getLong(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT); + when(configuration.getString(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT); stencilClientOrchestrator = new StencilClientOrchestrator(configuration); } @Test public void shouldGiveAllColumnNamesOfProtoAlongWithRowtime() { - ProtoType feedbackKeyProtoType = new ProtoType("io.odpf.dagger.consumer.TestFeedbackLogKey", "rowtime", stencilClientOrchestrator); - ProtoType bookingKeyProtoType = new ProtoType("io.odpf.dagger.consumer.TestBookingLogKey", "rowtime", stencilClientOrchestrator); + ProtoType feedbackKeyProtoType = new ProtoType("com.gotocompany.dagger.consumer.TestFeedbackLogKey", "rowtime", stencilClientOrchestrator); + ProtoType bookingKeyProtoType = new ProtoType("com.gotocompany.dagger.consumer.TestBookingLogKey", "rowtime", stencilClientOrchestrator); assertArrayEquals( new String[]{"order_number", "event_timestamp", INTERNAL_VALIDATION_FIELD_KEY, "rowtime"}, @@ -54,7 +54,7 @@ public void shouldGiveAllColumnNamesOfProtoAlongWithRowtime() { @Test public void shouldGiveAllTypesOfFieldsAlongWithRowtime() { - ProtoType protoType = new ProtoType("io.odpf.dagger.consumer.TestBookingLogKey", "rowtime", stencilClientOrchestrator); + ProtoType protoType = new ProtoType("com.gotocompany.dagger.consumer.TestBookingLogKey", "rowtime", stencilClientOrchestrator); assertArrayEquals( new TypeInformation[]{STRING, STRING, STRING, STRING, ROW_NAMED(new String[]{"seconds", "nanos"}, LONG, INT), BOOLEAN, SQL_TIMESTAMP}, @@ -63,7 +63,7 @@ public void shouldGiveAllTypesOfFieldsAlongWithRowtime() { @Test public void shouldThrowConfigurationExceptionWhenClassNotFound() { - ProtoType protoType = new ProtoType("io.odpf.dagger.consumer.NotFoundClass", "rowtime", stencilClientOrchestrator); + ProtoType protoType = new ProtoType("com.gotocompany.dagger.consumer.NotFoundClass", "rowtime", stencilClientOrchestrator); assertThrows(DescriptorNotFoundException.class, () -> protoType.getRowType()); } diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/serialization/KafkaProtoSerializerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/serialization/KafkaProtoSerializerTest.java similarity index 94% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/serialization/KafkaProtoSerializerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/serialization/KafkaProtoSerializerTest.java index c6a37c794..85fa26dd7 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/serialization/KafkaProtoSerializerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/serialization/KafkaProtoSerializerTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.common.serde.proto.serialization; +package com.gotocompany.dagger.common.serde.proto.serialization; -import io.odpf.dagger.common.exceptions.serde.DaggerSerializationException; +import com.gotocompany.dagger.common.exceptions.serde.DaggerSerializationException; import org.apache.flink.types.Row; import org.apache.kafka.clients.producer.ProducerRecord; import org.junit.Assert; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/serialization/ProtoSerializerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/serialization/ProtoSerializerTest.java similarity index 82% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/serialization/ProtoSerializerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/serialization/ProtoSerializerTest.java index 367d6e5a4..6875b384e 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/serialization/ProtoSerializerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/proto/serialization/ProtoSerializerTest.java @@ -1,21 +1,14 @@ -package io.odpf.dagger.common.serde.proto.serialization; +package com.gotocompany.dagger.common.serde.proto.serialization; -import io.odpf.dagger.common.exceptions.serde.DaggerSerializationException; -import io.odpf.dagger.common.exceptions.serde.InvalidDataTypeException; -import org.apache.flink.types.Row; - -import io.odpf.stencil.StencilClientFactory; -import io.odpf.stencil.client.StencilClient; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.exceptions.serde.InvalidColumnMappingException; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestEnrichedBookingLogMessage; -import io.odpf.dagger.consumer.TestProfile; -import io.odpf.dagger.consumer.TestSerDeLogKey; -import io.odpf.dagger.consumer.TestSerDeLogMessage; -import io.odpf.dagger.consumer.TestServiceType; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.exceptions.serde.DaggerSerializationException; +import com.gotocompany.dagger.common.exceptions.serde.InvalidColumnMappingException; +import com.gotocompany.dagger.consumer.*; +import com.gotocompany.stencil.StencilClientFactory; +import com.gotocompany.stencil.client.StencilClient; +import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -43,8 +36,8 @@ public void setup() { @Test public void shouldSerializeKeyForProto() throws InvalidProtocolBufferException { String[] columnNames = {"window_start_time", "window_end_time", "s2_id_level", "s2_id", "service_type"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestSerDeLogKey"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestSerDeLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestSerDeLogKey"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestSerDeLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); long seconds = System.currentTimeMillis() / 1000; @@ -78,8 +71,8 @@ public void shouldSerializeKeyForProto() throws InvalidProtocolBufferException { public void shouldSerializeMessageProto() throws InvalidProtocolBufferException { String[] columnNames = {"window_start_time", "window_end_time", "s2_id_level", "s2_id", "service_type", "unique_customers", "event_timestamp", "string_type", "bool_type", "message_type", "repeated_message_type", "map_type"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestSerDeLogKey"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestSerDeLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestSerDeLogKey"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestSerDeLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); long seconds = System.currentTimeMillis() / 1000; @@ -137,8 +130,8 @@ public void shouldSerializeMessageProto() throws InvalidProtocolBufferException @Test public void shouldSerializeDataForOneFieldInNestedProtoWhenMappedFromQuery() throws InvalidProtocolBufferException { String[] columnNames = {"customer_profile.customer_id"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestEnrichedBookingLogMessage"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestEnrichedBookingLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(1); @@ -155,8 +148,8 @@ public void shouldSerializeDataForOneFieldInNestedProtoWhenMappedFromQuery() thr @Test public void shouldSerializeDataForMultipleFieldsInSameNestedProtoWhenMappedFromQuery() throws InvalidProtocolBufferException { String[] columnNames = {"customer_profile.name", "customer_profile.email", "customer_profile.phone_verified"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestEnrichedBookingLogMessage"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestEnrichedBookingLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(3); @@ -177,8 +170,8 @@ public void shouldSerializeDataForMultipleFieldsInSameNestedProtoWhenMappedFromQ @Test public void shouldSerializeDataForMultipleFieldsInDifferentNestedProtoWhenMappedFromQuery() throws InvalidProtocolBufferException { String[] columnNames = {"order_number", "service_type", "customer_price", "customer_total_fare_without_surge", "driver_pickup_location.name", "driver_pickup_location.latitude"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestBookingLogKey"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestBookingLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestBookingLogKey"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(6); @@ -205,8 +198,8 @@ public void shouldSerializeDataForMultipleFieldsInDifferentNestedProtoWhenMapped @Test public void shouldThrowExceptionWhenColumnDoesNotExists() { String[] columnNames = {"order_number", "driver_pickup_location.invalid"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestBookingLogKey"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestBookingLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestBookingLogKey"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(2); element.setField(0, "order_number"); @@ -214,7 +207,7 @@ public void shouldThrowExceptionWhenColumnDoesNotExists() { InvalidColumnMappingException exception = assertThrows(InvalidColumnMappingException.class, () -> serializer.serializeValue(element)); - assertEquals("column invalid doesn't exists in the proto of io.odpf.dagger.consumer.TestLocation", + assertEquals("column invalid doesn't exists in the proto of com.gotocompany.dagger.consumer.TestLocation", exception.getMessage()); } @@ -222,8 +215,8 @@ public void shouldThrowExceptionWhenColumnDoesNotExists() { @Test public void shouldMapOtherFieldsWhenOneOfTheFirstFieldIsInvalidForANestedFieldInTheQuery() throws InvalidProtocolBufferException { String[] columnNames = {"blah.invalid", "customer_email"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestBookingLogKey"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestBookingLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestBookingLogKey"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(2); element.setField(0, "order_number"); @@ -237,8 +230,8 @@ public void shouldMapOtherFieldsWhenOneOfTheFirstFieldIsInvalidForANestedFieldIn @Test public void shouldMapEmptyDataWhenFieldIsInvalidInTheQuery() { String[] columnNames = {"invalid"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestBookingLogKey"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestBookingLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestBookingLogKey"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(1); element.setField(0, "order_number"); @@ -251,8 +244,8 @@ public void shouldMapEmptyDataWhenFieldIsInvalidInTheQuery() { @Test public void shouldMapOtherFieldsWhenOneOfTheFieldIsInvalidInTheQuery() throws InvalidProtocolBufferException { String[] columnNames = {"invalid", "order_number"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestBookingLogKey"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestBookingLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestBookingLogKey"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(2); element.setField(0, "some_data"); @@ -266,8 +259,8 @@ public void shouldMapOtherFieldsWhenOneOfTheFieldIsInvalidInTheQuery() throws In @Test public void shouldNotThrowExceptionWhenPrimitiveTypeCanBeCasted() throws InvalidProtocolBufferException { String[] columnNames = {"order_number"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestBookingLogKey"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestBookingLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestBookingLogKey"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(1); element.setField(0, 1234); @@ -279,15 +272,15 @@ public void shouldNotThrowExceptionWhenPrimitiveTypeCanBeCasted() throws Invalid @Test public void shouldThrowExceptionWhenPrimitiveTypeCanNotBeCasted() { String[] columnNames = {"customer_price"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestBookingLogKey"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestBookingLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestBookingLogKey"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(1); element.setField(0, "invalid_number"); - InvalidDataTypeException exception = assertThrows(InvalidDataTypeException.class, + InvalidColumnMappingException exception = assertThrows(InvalidColumnMappingException.class, () -> serializer.serializeValue(element)); - assertEquals("type mismatch of field: customer_price, expecting DOUBLE type, actual type class java.lang.String", + assertEquals("column invalid: type mismatch of column customer_price, expecting DOUBLE type. Actual type class java.lang.String", exception.getMessage()); } @@ -295,8 +288,8 @@ public void shouldThrowExceptionWhenPrimitiveTypeCanNotBeCasted() { public void shouldHandleRepeatedTypeWhenTypeDoesNotMatch() { String[] columnNames = {"meta_array"}; - String outputProtoKey = "io.odpf.dagger.consumer.TestBookingLogKey"; - String outputProtoMessage = "io.odpf.dagger.consumer.TestBookingLogMessage"; + String outputProtoKey = "com.gotocompany.dagger.consumer.TestBookingLogKey"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(1); element.setField(0, 1234); @@ -310,7 +303,7 @@ public void shouldHandleRepeatedTypeWhenTypeDoesNotMatch() { @Test public void shouldSerializeMessageWhenOnlyMessageProtoProvided() throws InvalidProtocolBufferException { String[] columnNames = {"order_number", "driver_id"}; - String outputProtoMessage = "io.odpf.dagger.consumer.TestBookingLogMessage"; + String outputProtoMessage = "com.gotocompany.dagger.consumer.TestBookingLogMessage"; ProtoSerializer serializer = new ProtoSerializer(null, outputProtoMessage, columnNames, stencilClientOrchestrator); String orderNumber = "RB-1234"; @@ -328,7 +321,7 @@ public void shouldSerializeMessageWhenOnlyMessageProtoProvided() throws InvalidP @Test public void shouldReturnNullKeyWhenOnlyMessageProtoProvided() { String[] columnNames = {"s2_id_level"}; - String protoMessage = "io.odpf.dagger.consumer.TestSerDeLogMessage"; + String protoMessage = "com.gotocompany.dagger.consumer.TestSerDeLogMessage"; ProtoSerializer serializer = new ProtoSerializer(null, protoMessage, columnNames, stencilClientOrchestrator); @@ -345,7 +338,7 @@ public void shouldReturnNullKeyWhenOnlyMessageProtoProvided() { @Test public void shouldReturnNullKeyWhenKeyIsEmptyString() { String[] columnNames = {"s2_id_level"}; - String protoMessage = "io.odpf.dagger.consumer.TestSerDeLogMessage"; + String protoMessage = "com.gotocompany.dagger.consumer.TestSerDeLogMessage"; ProtoSerializer serializer = new ProtoSerializer("", protoMessage, columnNames, stencilClientOrchestrator); Row element = new Row(1); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/PrimitiveTypeHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/PrimitiveTypeHandlerTest.java similarity index 87% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/PrimitiveTypeHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/PrimitiveTypeHandlerTest.java index 6effa7297..bad98f688 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/PrimitiveTypeHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/PrimitiveTypeHandlerTest.java @@ -1,13 +1,14 @@ -package io.odpf.dagger.common.serde.typehandler; +package com.gotocompany.dagger.common.serde.typehandler; import com.google.protobuf.ByteString; -import io.odpf.dagger.consumer.TestMessageEnvelope; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.consumer.TestMessageEnvelope; import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.common.exceptions.serde.InvalidDataTypeException; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.common.exceptions.serde.InvalidDataTypeException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.io.api.Binary; import org.apache.parquet.schema.GroupType; @@ -103,6 +104,17 @@ public void shouldReturnSameValueForTransformFromProto() { assertEquals("123", primitiveTypeHandler.transformFromProto("123")); } + @Test + public void shouldReturnSameValueForTransformFromProtoUsingCache() { + Descriptors.Descriptor descriptor = TestBookingLogMessage.getDescriptor(); + Descriptors.FieldDescriptor stringFieldDescriptor = descriptor.findFieldByName("order_number"); + PrimitiveTypeHandler primitiveTypeHandler = new PrimitiveTypeHandler(stringFieldDescriptor); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + assertEquals(123, primitiveTypeHandler.transformFromProtoUsingCache(123, fieldDescriptorCache)); + assertEquals("123", primitiveTypeHandler.transformFromProtoUsingCache("123", fieldDescriptorCache)); + } + @Test public void shouldReturnTypeInformation() { Descriptors.Descriptor descriptor = TestBookingLogMessage.getDescriptor(); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/RowFactoryTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/RowFactoryTest.java similarity index 74% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/RowFactoryTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/RowFactoryTest.java index 4976d11ae..2f44f1c68 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/RowFactoryTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/RowFactoryTest.java @@ -1,7 +1,8 @@ -package io.odpf.dagger.common.serde.typehandler; +package com.gotocompany.dagger.common.serde.typehandler; -import io.odpf.dagger.consumer.TestPrimitiveMessage; -import io.odpf.dagger.consumer.TestReason; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.consumer.TestPrimitiveMessage; +import com.gotocompany.dagger.consumer.TestReason; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeutils.TypeSerializer; @@ -11,7 +12,7 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.io.api.Binary; import org.apache.parquet.schema.GroupType; @@ -126,6 +127,62 @@ public void shouldBeAbleToCreateAValidCopyOfTheRowCreated() throws InvalidProtoc assertEquals(copy.toString(), row.toString()); } + @Test + public void shouldCreateRowUsingCacheForDynamicMessage() throws InvalidProtocolBufferException { + TestBookingLogMessage customerLogMessage = TestBookingLogMessage.newBuilder().build(); + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), customerLogMessage.toByteArray()); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + Row row = RowFactory.createRow(dynamicMessage, fieldDescriptorCache); + assertNotNull(row); + assertEquals(49, row.getArity()); + } + + @Test + public void shouldCreateRowUsingCacheWithPSetFieldsForDynamicMessage() throws InvalidProtocolBufferException { + TestBookingLogMessage customerLogMessage = TestBookingLogMessage + .newBuilder() + .setCustomerId("144614") + .setCustomerUrl("https://www.abcd.com/1234") + .build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), customerLogMessage.toByteArray()); + Row row = RowFactory.createRow(dynamicMessage, fieldDescriptorCache); + assertEquals("144614", row.getField(5)); + assertEquals("https://www.abcd.com/1234", row.getField(6)); + } + + + @Test + public void shouldBeAbleToCreateAValidCopyOfTheRowCreatedUsingCache() throws InvalidProtocolBufferException { + TestBookingLogMessage customerLogMessage = TestBookingLogMessage + .newBuilder() + .setCustomerId("144614") + .setCustomerUrl("https://www.abcd.com/1234") + .build(); + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), customerLogMessage.toByteArray()); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + Row row = RowFactory.createRow(dynamicMessage, fieldDescriptorCache); + assertEquals("144614", row.getField(5)); + assertEquals("https://www.abcd.com/1234", row.getField(6)); + ArrayList> typeInformations = new ArrayList<>(); + ExecutionConfig config = new ExecutionConfig(); + TestBookingLogMessage.getDescriptor().getFields().forEach(fieldDescriptor -> { + typeInformations.add(TypeHandlerFactory.getTypeHandler(fieldDescriptor).getTypeInformation()); + }); + ArrayList> typeSerializers = new ArrayList<>(); + typeInformations.forEach(rowTypeInformation -> { + typeSerializers.add(rowTypeInformation.createSerializer(config)); + }); + RowSerializer rowSerializer = new RowSerializer(typeSerializers.toArray(new TypeSerializer[0])); + + Row copy = rowSerializer.copy(row); + + assertEquals(copy.toString(), row.toString()); + } + @Test public void shouldCreateRowWithPositionIndexingFromSimpleGroup() { Descriptors.Descriptor descriptor = TestPrimitiveMessage.getDescriptor(); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/TypeHandlerFactoryTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/TypeHandlerFactoryTest.java similarity index 65% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/TypeHandlerFactoryTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/TypeHandlerFactoryTest.java index 0cd7a5e1e..155b5b05f 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/TypeHandlerFactoryTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/TypeHandlerFactoryTest.java @@ -1,19 +1,22 @@ -package io.odpf.dagger.common.serde.typehandler; +package com.gotocompany.dagger.common.serde.typehandler; +import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.serde.typehandler.complex.EnumHandler; -import io.odpf.dagger.common.serde.typehandler.complex.MapHandler; -import io.odpf.dagger.common.serde.typehandler.complex.MessageHandler; -import io.odpf.dagger.common.serde.typehandler.complex.StructMessageHandler; -import io.odpf.dagger.common.serde.typehandler.complex.TimestampHandler; -import io.odpf.dagger.common.serde.typehandler.repeated.RepeatedEnumHandler; -import io.odpf.dagger.common.serde.typehandler.repeated.RepeatedMessageHandler; -import io.odpf.dagger.common.serde.typehandler.repeated.RepeatedPrimitiveHandler; -import io.odpf.dagger.common.serde.typehandler.repeated.RepeatedStructMessageHandler; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestFeedbackLogMessage; -import io.odpf.dagger.consumer.TestNestedRepeatedMessage; -import io.odpf.dagger.consumer.TestRepeatedEnumMessage; +import com.google.protobuf.InvalidProtocolBufferException; +import com.gotocompany.dagger.common.serde.typehandler.complex.EnumHandler; +import com.gotocompany.dagger.common.serde.typehandler.complex.MapHandler; +import com.gotocompany.dagger.common.serde.typehandler.complex.MessageHandler; +import com.gotocompany.dagger.common.serde.typehandler.complex.StructMessageHandler; +import com.gotocompany.dagger.common.serde.typehandler.complex.TimestampHandler; +import com.gotocompany.dagger.common.serde.typehandler.repeated.RepeatedEnumHandler; +import com.gotocompany.dagger.common.serde.typehandler.repeated.RepeatedMessageHandler; +import com.gotocompany.dagger.common.serde.typehandler.repeated.RepeatedPrimitiveHandler; +import com.gotocompany.dagger.common.serde.typehandler.repeated.RepeatedStructMessageHandler; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestFeedbackLogMessage; +import com.gotocompany.dagger.consumer.TestGrpcResponse; +import com.gotocompany.dagger.consumer.TestNestedRepeatedMessage; +import com.gotocompany.dagger.consumer.TestRepeatedEnumMessage; import org.junit.Before; import org.junit.Test; @@ -22,6 +25,7 @@ import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; public class TypeHandlerFactoryTest { @Before @@ -29,6 +33,34 @@ public void setup() { TypeHandlerFactory.clearTypeHandlerMap(); } + @Test + public void shouldReturnTheSameHandlerObjectWhenBothFieldDescriptorFullNameAndFieldDescriptorHashCodeIsSame() { + Descriptors.FieldDescriptor mapFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("metadata"); + TypeHandler typeHandler1 = TypeHandlerFactory.getTypeHandler(mapFieldDescriptor); + TypeHandler typeHandler2 = TypeHandlerFactory.getTypeHandler(mapFieldDescriptor); + + assertEquals(typeHandler1, typeHandler2); + } + + @Test + public void shouldReturnDifferentCopiesOfHandlerObjectWhenFieldDescriptorFullNameIsSameButHashCodeIsDifferent() throws Descriptors.DescriptorValidationException, InvalidProtocolBufferException { + /* get a field descriptor in the usual way */ + Descriptors.FieldDescriptor mapFieldDescriptor1 = TestGrpcResponse.getDescriptor().findFieldByName("success"); + + /* serialize descriptor to byte[], then deserialize to get a new object of field descriptor */ + byte[] descriptorByteArray = TestGrpcResponse.getDescriptor().getFile().toProto().toByteArray(); + DescriptorProtos.FileDescriptorProto fileDescriptorProto = DescriptorProtos.FileDescriptorProto.parseFrom(descriptorByteArray); + Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(fileDescriptorProto, new Descriptors.FileDescriptor[]{}); + Descriptors.FieldDescriptor mapFieldDescriptor2 = fileDescriptor + .findMessageTypeByName("TestGrpcResponse") + .findFieldByName("success"); + + TypeHandler typeHandler1 = TypeHandlerFactory.getTypeHandler(mapFieldDescriptor1); + TypeHandler typeHandler2 = TypeHandlerFactory.getTypeHandler(mapFieldDescriptor2); + assertNotEquals(mapFieldDescriptor1.hashCode(), mapFieldDescriptor2.hashCode()); + assertNotEquals(typeHandler1, typeHandler2); + } + @Test public void shouldReturnMapHandlerIfMapFieldDescriptorPassed() { Descriptors.FieldDescriptor mapFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("metadata"); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/TypeInformationFactoryTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/TypeInformationFactoryTest.java similarity index 87% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/TypeInformationFactoryTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/TypeInformationFactoryTest.java index 02f734a33..ab2b999f6 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/TypeInformationFactoryTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/TypeInformationFactoryTest.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.common.serde.typehandler; +package com.gotocompany.dagger.common.serde.typehandler; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.consumer.TestBookingLogKey; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.consumer.TestBookingLogKey; import org.junit.Assert; import org.junit.Test; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/EnumHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/EnumHandlerTest.java similarity index 90% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/EnumHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/EnumHandlerTest.java index 750b9d064..5f7151b33 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/EnumHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/EnumHandlerTest.java @@ -1,10 +1,11 @@ -package io.odpf.dagger.common.serde.typehandler.complex; +package com.gotocompany.dagger.common.serde.typehandler.complex; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; -import io.odpf.dagger.consumer.*; -import io.odpf.dagger.common.exceptions.serde.EnumFieldNotFoundException; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.consumer.*; +import com.gotocompany.dagger.common.exceptions.serde.EnumFieldNotFoundException; import org.apache.flink.api.common.typeinfo.Types; import org.apache.parquet.example.data.simple.SimpleGroup; @@ -76,7 +77,7 @@ public void shouldThrowExceptionIfFieldNotFoundInGivenEnumFieldTypeDescriptor() DynamicMessage.Builder builder = DynamicMessage.newBuilder(enumFieldDescriptor.getContainingType()); EnumFieldNotFoundException exception = Assert.assertThrows(EnumFieldNotFoundException.class, () -> enumHandler.transformToProtoBuilder(builder, "test")); - assertEquals("field: test not found in io.odpf.dagger.consumer.TestBookingLogMessage.service_type", exception.getMessage()); + assertEquals("field: test not found in com.gotocompany.dagger.consumer.TestBookingLogMessage.service_type", exception.getMessage()); } @Test @@ -157,6 +158,16 @@ public void shouldTransformValueFromProto() { assertEquals("DRIVER_FOUND", enumHandler.transformFromProto("DRIVER_FOUND")); } + @Test + public void shouldTransformValueFromProtoUsingCache() { + Descriptors.Descriptor descriptor = TestBookingLogMessage.getDescriptor(); + Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName("status"); + EnumHandler enumHandler = new EnumHandler(fieldDescriptor); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + assertEquals("DRIVER_FOUND", enumHandler.transformFromProtoUsingCache("DRIVER_FOUND", fieldDescriptorCache)); + } + @Test public void shouldConvertEnumToJsonString() { Descriptors.FieldDescriptor fieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("status"); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/MapHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/MapHandlerTest.java similarity index 77% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/MapHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/MapHandlerTest.java index 5a9c018e2..fe407b80d 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/MapHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/MapHandlerTest.java @@ -1,16 +1,12 @@ -package io.odpf.dagger.common.serde.typehandler.complex; +package com.gotocompany.dagger.common.serde.typehandler.complex; +import com.google.protobuf.*; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestComplexMap; +import com.gotocompany.dagger.consumer.TestMessage; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.types.Row; - -import com.google.protobuf.Descriptors; -import com.google.protobuf.DynamicMessage; -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.MapEntry; -import com.google.protobuf.WireFormat; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestComplexMap; -import io.odpf.dagger.consumer.TestMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.apache.parquet.schema.LogicalTypeAnnotation; @@ -18,16 +14,9 @@ import org.apache.parquet.schema.PrimitiveType; import org.junit.Test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; -import static org.apache.parquet.schema.Types.buildMessage; -import static org.apache.parquet.schema.Types.repeatedGroup; -import static org.apache.parquet.schema.Types.requiredGroup; -import static org.apache.parquet.schema.Types.requiredMap; +import static org.apache.parquet.schema.Types.*; import static org.junit.Assert.*; public class MapHandlerTest { @@ -310,6 +299,156 @@ public void shouldReturnArrayOfRowsHavingFieldsSetAsInputMapHavingComplexDataFie assertEquals(expected, outputValues.get(0)); } + + @Test + public void shouldReturnArrayOfRowHavingSameSizeAsInputMapForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor mapFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("metadata"); + MapHandler mapHandler = new MapHandler(mapFieldDescriptor); + MapEntry mapEntry = MapEntry + .newDefaultInstance(mapFieldDescriptor.getMessageType(), WireFormat.FieldType.STRING, "", WireFormat.FieldType.STRING, ""); + TestBookingLogMessage driverProfileFlattenLogMessage = TestBookingLogMessage + .newBuilder() + .addRepeatedField(mapFieldDescriptor, mapEntry.toBuilder().setKey("a").setValue("123").buildPartial()) + .addRepeatedField(mapFieldDescriptor, mapEntry.toBuilder().setKey("b").setValue("456").buildPartial()) + .build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), driverProfileFlattenLogMessage.toByteArray()); + + List outputValues = Arrays.asList((Object[]) mapHandler.transformFromProtoUsingCache(dynamicMessage.getField(mapFieldDescriptor), fieldDescriptorCache)); + + assertEquals(2, outputValues.size()); + } + + @Test + public void shouldReturnArrayOfRowHavingFieldsSetAsInputMapAndOfSizeTwoForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor mapFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("metadata"); + MapHandler mapHandler = new MapHandler(mapFieldDescriptor); + MapEntry mapEntry = MapEntry + .newDefaultInstance(mapFieldDescriptor.getMessageType(), WireFormat.FieldType.STRING, "", WireFormat.FieldType.STRING, ""); + TestBookingLogMessage driverProfileFlattenLogMessage = TestBookingLogMessage + .newBuilder() + .addRepeatedField(mapFieldDescriptor, mapEntry.toBuilder().setKey("a").setValue("123").buildPartial()) + .addRepeatedField(mapFieldDescriptor, mapEntry.toBuilder().setKey("b").setValue("456").buildPartial()) + .build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), driverProfileFlattenLogMessage.toByteArray()); + + List outputValues = Arrays.asList((Object[]) mapHandler.transformFromProtoUsingCache(dynamicMessage.getField(mapFieldDescriptor), fieldDescriptorCache)); + + assertEquals(Row.of("a", "123"), outputValues.get(0)); + assertEquals(Row.of("b", "456"), outputValues.get(1)); + } + + @Test + public void shouldReturnArrayOfRowHavingSameSizeAsInputMapHavingComplexDataFieldsForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor mapFieldDescriptor = TestComplexMap.getDescriptor().findFieldByName("complex_map"); + MapHandler mapHandler = new MapHandler(mapFieldDescriptor); + Map complexMap = new HashMap<>(); + complexMap.put(1, TestMessage.newBuilder().setOrderNumber("123").setOrderDetails("abc").build()); + complexMap.put(2, TestMessage.newBuilder().setOrderNumber("456").setOrderDetails("efg").build()); + TestComplexMap testComplexMap = TestComplexMap.newBuilder().putAllComplexMap(complexMap).build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestComplexMap.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestComplexMap.getDescriptor(), testComplexMap.toByteArray()); + + List outputValues = Arrays.asList((Object[]) mapHandler.transformFromProtoUsingCache(dynamicMessage.getField(mapFieldDescriptor), fieldDescriptorCache)); + + assertEquals(2, outputValues.size()); + } + + @Test + public void shouldReturnArrayOfRowsHavingFieldsSetAsInputMapHavingComplexDataFieldsAndOfSizeTwoForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor mapFieldDescriptor = TestComplexMap.getDescriptor().findFieldByName("complex_map"); + MapHandler mapHandler = new MapHandler(mapFieldDescriptor); + Map complexMap = new HashMap<>(); + complexMap.put(1, TestMessage.newBuilder().setOrderNumber("123").setOrderDetails("abc").build()); + complexMap.put(2, TestMessage.newBuilder().setOrderNumber("456").setOrderDetails("efg").build()); + TestComplexMap testComplexMap = TestComplexMap.newBuilder().putAllComplexMap(complexMap).build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestComplexMap.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestComplexMap.getDescriptor(), testComplexMap.toByteArray()); + + List outputValues = Arrays.asList((Object[]) mapHandler.transformFromProtoUsingCache(dynamicMessage.getField(mapFieldDescriptor), fieldDescriptorCache)); + + Row mapEntry1 = Row.of(1, Row.of("123", "", "abc")); + Row mapEntry2 = Row.of(2, Row.of("456", "", "efg")); + + assertEquals(mapEntry1, outputValues.get(0)); + assertEquals(mapEntry2, outputValues.get(1)); + } + + @Test + public void shouldReturnArrayOfRowsHavingFieldsSetAsInputMapHavingComplexDataFieldsIfKeyIsSetAsDefaultProtoValueForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor mapFieldDescriptor = TestComplexMap.getDescriptor().findFieldByName("complex_map"); + MapHandler mapHandler = new MapHandler(mapFieldDescriptor); + Map complexMap = new HashMap<>(); + complexMap.put(0, TestMessage.newBuilder().setOrderNumber("123").setOrderDetails("abc").build()); + TestComplexMap testComplexMap = TestComplexMap.newBuilder().putAllComplexMap(complexMap).build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestComplexMap.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestComplexMap.getDescriptor(), testComplexMap.toByteArray()); + + List outputValues = Arrays.asList((Object[]) mapHandler.transformFromProtoUsingCache(dynamicMessage.getField(mapFieldDescriptor), fieldDescriptorCache)); + + Row expected = Row.of(0, Row.of("123", "", "abc")); + assertEquals(expected, outputValues.get(0)); + } + + @Test + public void shouldReturnArrayOfRowsHavingFieldsSetAsInputMapHavingComplexDataFieldsIfValueIsDefaultForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor mapFieldDescriptor = TestComplexMap.getDescriptor().findFieldByName("complex_map"); + MapHandler mapHandler = new MapHandler(mapFieldDescriptor); + Map complexMap = new HashMap<>(); + complexMap.put(1, TestMessage.getDefaultInstance()); + TestComplexMap testComplexMap = TestComplexMap.newBuilder().putAllComplexMap(complexMap).build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestComplexMap.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestComplexMap.getDescriptor(), testComplexMap.toByteArray()); + + List outputValues = Arrays.asList((Object[]) mapHandler.transformFromProtoUsingCache(dynamicMessage.getField(mapFieldDescriptor), fieldDescriptorCache)); + + Row expected = Row.of(1, Row.of("", "", "")); + + assertEquals(expected, outputValues.get(0)); + } + + @Test + public void shouldReturnArrayOfRowsHavingFieldsSetAsInputMapHavingComplexDataFieldsIfKeyAndValueAreDefaultForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor mapFieldDescriptor = TestComplexMap.getDescriptor().findFieldByName("complex_map"); + MapHandler mapHandler = new MapHandler(mapFieldDescriptor); + Map complexMap = new HashMap<>(); + complexMap.put(0, TestMessage.getDefaultInstance()); + TestComplexMap testComplexMap = TestComplexMap.newBuilder().putAllComplexMap(complexMap).build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestComplexMap.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestComplexMap.getDescriptor(), testComplexMap.toByteArray()); + + List outputValues = Arrays.asList((Object[]) mapHandler.transformFromProtoUsingCache(dynamicMessage.getField(mapFieldDescriptor), fieldDescriptorCache)); + + Row expected = Row.of(0, Row.of("", "", "")); + + assertEquals(expected, outputValues.get(0)); + } + + @Test + public void shouldReturnArrayOfRowsHavingFieldsSetAsInputMapHavingComplexDataFieldsForDefaultInstanceForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor mapFieldDescriptor = TestComplexMap.getDescriptor().findFieldByName("complex_map"); + MapHandler mapHandler = new MapHandler(mapFieldDescriptor); + Map complexMap = new HashMap<>(); + complexMap.put(0, TestMessage.newBuilder().setOrderNumber("").setOrderDetails("").build()); + TestComplexMap testComplexMap = TestComplexMap.newBuilder().putAllComplexMap(complexMap).build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestComplexMap.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestComplexMap.getDescriptor(), testComplexMap.toByteArray()); + + List outputValues = Arrays.asList((Object[]) mapHandler.transformFromProtoUsingCache(dynamicMessage.getField(mapFieldDescriptor), fieldDescriptorCache)); + + Row expected = Row.of(0, Row.of("", "", "")); + assertEquals(expected, outputValues.get(0)); + } + @Test public void shouldReturnEmptyArrayOfRowIfNullPassedForTransformForKafka() { Descriptors.FieldDescriptor mapFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("metadata"); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/MessageHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/MessageHandlerTest.java similarity index 83% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/MessageHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/MessageHandlerTest.java index 48faff3da..1881f9e0f 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/MessageHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/MessageHandlerTest.java @@ -1,6 +1,7 @@ -package io.odpf.dagger.common.serde.typehandler.complex; +package com.gotocompany.dagger.common.serde.typehandler.complex; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.types.Row; @@ -8,8 +9,8 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestPaymentOptionMetadata; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestPaymentOptionMetadata; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.apache.parquet.schema.PrimitiveType; @@ -268,4 +269,42 @@ public void shouldReturnRowContainingDefaultValuesForFieldsWhenTransformFromParq assertEquals("", row.getField(0)); assertEquals("", row.getField(1)); } + + @Test + public void shouldReturnRowGivenAMapForFieldDescriptorOfTypeMessageIfAllValueArePassedForTransformFromProtoMap() throws InvalidProtocolBufferException { + TestBookingLogMessage bookingLogMessage = TestBookingLogMessage + .newBuilder() + .setPaymentOptionMetadata(TestPaymentOptionMetadata.newBuilder().setMaskedCard("test1").setNetwork("test2").build()) + .build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestPaymentOptionMetadata.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), bookingLogMessage.toByteArray()); + + Descriptors.Descriptor descriptor = TestBookingLogMessage.getDescriptor(); + Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName("payment_option_metadata"); + + Row value = (Row) new MessageHandler(fieldDescriptor).transformFromProtoUsingCache(dynamicMessage.getField(fieldDescriptor), fieldDescriptorCache); + + assertEquals("test1", value.getField(0)); + assertEquals("test2", value.getField(1)); + } + + @Test + public void shouldReturnRowGivenAMapForFieldDescriptorOfTypeMessageIfAllValueAreNotPassedForTransformFromProtoMap() throws InvalidProtocolBufferException { + TestBookingLogMessage bookingLogMessage = TestBookingLogMessage + .newBuilder() + .setPaymentOptionMetadata(TestPaymentOptionMetadata.newBuilder().setMaskedCard("test1").build()) + .build(); + + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestPaymentOptionMetadata.getDescriptor()); + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), bookingLogMessage.toByteArray()); + + Descriptors.Descriptor descriptor = TestBookingLogMessage.getDescriptor(); + Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName("payment_option_metadata"); + + Row value = (Row) new MessageHandler(fieldDescriptor).transformFromProtoUsingCache(dynamicMessage.getField(fieldDescriptor), fieldDescriptorCache); + + assertEquals("test1", value.getField(0)); + assertEquals("", value.getField(1)); + } } diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/StructMessageHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/StructMessageHandlerTest.java similarity index 84% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/StructMessageHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/StructMessageHandlerTest.java index b065a40e9..e1b0883f2 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/StructMessageHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/StructMessageHandlerTest.java @@ -1,13 +1,14 @@ -package io.odpf.dagger.common.serde.typehandler.complex; +package com.gotocompany.dagger.common.serde.typehandler.complex; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestRepeatedEnumMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestRepeatedEnumMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.junit.Test; @@ -63,6 +64,15 @@ public void shouldReturnNullForTransformForKafka() { assertNull(structMessageHandler.transformFromProto("test")); } + @Test + public void shouldReturnNullForTransformFromProtoUsingCache() { + Descriptors.FieldDescriptor fieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("profile_data"); + StructMessageHandler structMessageHandler = new StructMessageHandler(fieldDescriptor); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + assertNull(structMessageHandler.transformFromProtoUsingCache("test", fieldDescriptorCache)); + } + @Test public void shouldReturnTypeInformation() { Descriptors.FieldDescriptor fieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("profile_data"); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/TimestampHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/TimestampHandlerTest.java similarity index 90% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/TimestampHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/TimestampHandlerTest.java index e2c385dd6..bd237aaa7 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/complex/TimestampHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/complex/TimestampHandlerTest.java @@ -1,7 +1,8 @@ -package io.odpf.dagger.common.serde.typehandler.complex; +package com.gotocompany.dagger.common.serde.typehandler.complex; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.types.Row; @@ -9,7 +10,7 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.apache.parquet.schema.MessageType; @@ -249,6 +250,38 @@ public void shouldSetDefaultValueForDynamicMessageForKafkaIfValuesNotSet() throw assertEquals(Row.of(0L, 0), row); } + + @Test + public void shouldTransformTimestampForDynamicMessageForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + Descriptors.Descriptor descriptor = TestBookingLogMessage.getDescriptor(); + Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName("event_timestamp"); + TestBookingLogMessage bookingLogMessage = TestBookingLogMessage + .newBuilder() + .setEventTimestamp(com.google.protobuf.Timestamp.newBuilder().setSeconds(10L).setNanos(10).build()) + .build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), bookingLogMessage.toByteArray()); + TimestampHandler timestampHandler = new TimestampHandler(fieldDescriptor); + Row row = (Row) timestampHandler.transformFromProtoUsingCache(dynamicMessage.getField(fieldDescriptor), fieldDescriptorCache); + assertEquals(Row.of(10L, 10), row); + } + + @Test + public void shouldSetDefaultValueForDynamicMessageForTransformFromProtoUsingCacheIfValuesNotSet() throws InvalidProtocolBufferException { + Descriptors.Descriptor descriptor = TestBookingLogMessage.getDescriptor(); + Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName("event_timestamp"); + TestBookingLogMessage bookingLogMessage = TestBookingLogMessage + .newBuilder() + .build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), bookingLogMessage.toByteArray()); + TimestampHandler timestampHandler = new TimestampHandler(fieldDescriptor); + Row row = (Row) timestampHandler.transformFromProtoUsingCache(dynamicMessage.getField(fieldDescriptor), fieldDescriptorCache); + assertEquals(Row.of(0L, 0), row); + } + @Test public void shouldConvertTimestampToJsonString() { Descriptors.Descriptor descriptor = TestBookingLogMessage.getDescriptor(); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/BooleanHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/BooleanHandlerTest.java similarity index 98% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/BooleanHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/BooleanHandlerTest.java index 3a71862a9..862492bf5 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/BooleanHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/BooleanHandlerTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.junit.Test; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/ByteStringHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/ByteStringHandlerTest.java similarity index 97% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/ByteStringHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/ByteStringHandlerTest.java index 88f05fa6a..8be0e7816 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/ByteStringHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/ByteStringHandlerTest.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; -import io.odpf.dagger.consumer.TestRepeatedPrimitiveMessage; +import com.gotocompany.dagger.consumer.TestRepeatedPrimitiveMessage; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestMessageEnvelope; +import com.gotocompany.dagger.consumer.TestMessageEnvelope; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.io.api.Binary; import org.apache.parquet.schema.GroupType; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/DoubleHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/DoubleHandlerTest.java similarity index 98% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/DoubleHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/DoubleHandlerTest.java index d6cff55d8..5d9ff3d1a 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/DoubleHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/DoubleHandlerTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.apache.flink.api.common.typeinfo.Types; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/FloatHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/FloatHandlerTest.java similarity index 98% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/FloatHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/FloatHandlerTest.java index 5b9488a8a..362704966 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/FloatHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/FloatHandlerTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.junit.Test; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/IntegerHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/IntegerHandlerTest.java similarity index 97% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/IntegerHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/IntegerHandlerTest.java index ee07661a3..08d8dc2b9 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/IntegerHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/IntegerHandlerTest.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; -import io.odpf.dagger.consumer.TestNestedRepeatedMessage; +import com.gotocompany.dagger.consumer.TestNestedRepeatedMessage; import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.junit.Test; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/LongHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/LongHandlerTest.java similarity index 97% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/LongHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/LongHandlerTest.java index 7b1f44f2b..519ccffc1 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/LongHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/LongHandlerTest.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; -import io.odpf.dagger.consumer.TestNestedRepeatedMessage; +import com.gotocompany.dagger.consumer.TestNestedRepeatedMessage; import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestAggregatedSupplyMessage; +import com.gotocompany.dagger.consumer.TestAggregatedSupplyMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.junit.Test; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactoryTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactoryTest.java similarity index 91% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactoryTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactoryTest.java index 7a30f612c..f44debdcf 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactoryTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/PrimitiveHandlerFactoryTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; -import io.odpf.dagger.common.exceptions.serde.DataTypeNotSupportedException; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestMessageEnvelope; +import com.gotocompany.dagger.common.exceptions.serde.DataTypeNotSupportedException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestMessageEnvelope; import org.junit.Assert; import org.junit.Test; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/StringHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/StringHandlerTest.java similarity index 98% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/StringHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/StringHandlerTest.java index e8cee4aac..fea398023 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/primitive/StringHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/primitive/StringHandlerTest.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.common.serde.typehandler.primitive; +package com.gotocompany.dagger.common.serde.typehandler.primitive; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.junit.Test; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedEnumHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedEnumHandlerTest.java similarity index 59% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedEnumHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedEnumHandlerTest.java index 537bf8711..9c8cd7651 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedEnumHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedEnumHandlerTest.java @@ -1,21 +1,25 @@ -package io.odpf.dagger.common.serde.typehandler.repeated; - -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.common.typeinfo.Types; -import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; +package com.gotocompany.dagger.common.serde.typehandler.repeated; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestEnumMessage; -import io.odpf.dagger.consumer.TestRepeatedEnumMessage; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.exceptions.serde.EnumFieldNotFoundException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestEnumMessage; +import com.gotocompany.dagger.consumer.TestRepeatedEnumMessage; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; +import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; +import java.util.List; import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY; import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64; @@ -26,6 +30,7 @@ import static org.junit.Assert.assertTrue; public class RepeatedEnumHandlerTest { + @Test public void shouldReturnTrueIfRepeatedEnumFieldDescriptorIsPassed() { Descriptors.FieldDescriptor repeatedEnumFieldDescriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); @@ -50,15 +55,6 @@ public void shouldReturnFalseIfFieldDescriptorOtherThanRepeatedEnumTypeIsPassed( assertFalse(repeatedEnumHandler.canHandle()); } - @Test - public void shouldReturnTheSameBuilderWithoutSettingAnyValue() { - Descriptors.FieldDescriptor repeatedEnumFieldDescriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); - RepeatedEnumHandler repeatedEnumHandler = new RepeatedEnumHandler(repeatedEnumFieldDescriptor); - DynamicMessage.Builder builder = DynamicMessage.newBuilder(repeatedEnumFieldDescriptor.getContainingType()); - - assertEquals(Collections.EMPTY_LIST, repeatedEnumHandler.transformToProtoBuilder(builder, 123).getField(repeatedEnumFieldDescriptor)); - } - @Test public void shouldReturnTypeInformation() { Descriptors.FieldDescriptor repeatedEnumFieldDescriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); @@ -116,6 +112,31 @@ public void shouldTransformValueFromProtoAsEmptyStringArrayForNull() { assertEquals(0, outputValues.length); } + @Test + public void shouldTransformValueFromProtoUsingCacheAsStringArray() throws InvalidProtocolBufferException { + TestRepeatedEnumMessage testRepeatedEnumMessage = TestRepeatedEnumMessage.newBuilder().addTestEnums(TestEnumMessage.Enum.UNKNOWN).build(); + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestRepeatedEnumMessage.getDescriptor(), testRepeatedEnumMessage.toByteArray()); + + Descriptors.FieldDescriptor repeatedEnumFieldDescriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); + RepeatedEnumHandler repeatedEnumHandler = new RepeatedEnumHandler(repeatedEnumFieldDescriptor); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestRepeatedEnumMessage.getDescriptor()); + + String[] outputValues = (String[]) repeatedEnumHandler.transformFromProtoUsingCache(dynamicMessage.getField(repeatedEnumFieldDescriptor), fieldDescriptorCache); + + assertEquals("UNKNOWN", outputValues[0]); + } + + @Test + public void shouldTransformValueFromProtoUsingCacheAsEmptyStringArrayForNull() { + Descriptors.FieldDescriptor repeatedEnumFieldDescriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); + RepeatedEnumHandler repeatedEnumHandler = new RepeatedEnumHandler(repeatedEnumFieldDescriptor); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestRepeatedEnumMessage.getDescriptor()); + + String[] outputValues = (String[]) repeatedEnumHandler.transformFromProtoUsingCache(null, fieldDescriptorCache); + + assertEquals(0, outputValues.length); + } + @Test public void shouldTransformValueForParquetAsStringArray() { Descriptors.FieldDescriptor fieldDescriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); @@ -194,4 +215,82 @@ public void shouldTransformValueForParquetAsEmptyStringArrayWhenFieldIsNotInitia assertArrayEquals(new String[0], actualEnumArray); } + + @Test + public void shouldReturnSameBuilderIfFieldIsDifferentType() { + Descriptors.FieldDescriptor otherFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("order_number"); + DynamicMessage.Builder builder = DynamicMessage.newBuilder(TestBookingLogMessage.getDescriptor()); + RepeatedEnumHandler repeatedEnumHandler = new RepeatedEnumHandler(otherFieldDescriptor); + + repeatedEnumHandler.transformToProtoBuilder(builder, "abc"); + assertEquals("", builder.getField(otherFieldDescriptor)); + } + + @Test + public void shouldReturnSameBuilderIfFieldIsSetAsNull() { + Descriptors.FieldDescriptor descriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); + DynamicMessage.Builder builder = DynamicMessage.newBuilder(TestRepeatedEnumMessage.getDescriptor()); + + RepeatedEnumHandler repeatedEnumHandler = new RepeatedEnumHandler(descriptor); + + repeatedEnumHandler.transformToProtoBuilder(builder, null); + assertEquals(Collections.emptyList(), builder.getField(descriptor)); + } + + @Test + public void shouldSetValueIfValueIsArray() { + Descriptors.FieldDescriptor descriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); + DynamicMessage.Builder builder = DynamicMessage.newBuilder(TestRepeatedEnumMessage.getDescriptor()); + + RepeatedEnumHandler repeatedEnumHandler = new RepeatedEnumHandler(descriptor); + String[] a = {"FIRST_ENUM_VALUE"}; + + repeatedEnumHandler.transformToProtoBuilder(builder, a); + assertEquals(Collections.singletonList(TestEnumMessage.Enum.FIRST_ENUM_VALUE.getValueDescriptor()), builder.getField(descriptor)); + } + + @Test + public void shouldSetValueIfValueIsList() { + Descriptors.FieldDescriptor descriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); + DynamicMessage.Builder builder = DynamicMessage.newBuilder(TestRepeatedEnumMessage.getDescriptor()); + + RepeatedEnumHandler repeatedEnumHandler = new RepeatedEnumHandler(descriptor); + + repeatedEnumHandler.transformToProtoBuilder(builder, Collections.singletonList("FIRST_ENUM_VALUE")); + assertEquals(Collections.singletonList(TestEnumMessage.Enum.FIRST_ENUM_VALUE.getValueDescriptor()), builder.getField(descriptor)); + } + + @Test + public void shouldIncludeDataInProtoSerializedFormat() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor descriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); + DynamicMessage.Builder builder = DynamicMessage.newBuilder(TestRepeatedEnumMessage.getDescriptor()); + + RepeatedEnumHandler repeatedEnumHandler = new RepeatedEnumHandler(descriptor); + + repeatedEnumHandler.transformToProtoBuilder(builder, Arrays.asList("FIRST_ENUM_VALUE", "SECOND_ENUM_VALUE")); + byte[] byteData = builder.build().toByteArray(); + DynamicMessage message = DynamicMessage.parseFrom(TestRepeatedEnumMessage.getDescriptor(), byteData); + + List expected = Arrays.asList(TestEnumMessage.Enum.FIRST_ENUM_VALUE.getValueDescriptor(), TestEnumMessage.Enum.SECOND_ENUM_VALUE.getValueDescriptor()); + assertEquals(expected, message.getField(descriptor)); + } + + @Test + public void shouldThrowErrorIfEnumValueNotFound() { + Descriptors.FieldDescriptor descriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); + DynamicMessage.Builder builder = DynamicMessage.newBuilder(TestRepeatedEnumMessage.getDescriptor()); + + RepeatedEnumHandler repeatedEnumHandler = new RepeatedEnumHandler(descriptor); + EnumFieldNotFoundException exception = Assert.assertThrows(EnumFieldNotFoundException.class, () -> repeatedEnumHandler.transformToProtoBuilder(builder, Collections.singletonList("test_enum"))); + assertEquals("field: test_enum not found in com.gotocompany.dagger.consumer.TestRepeatedEnumMessage.test_enums", exception.getMessage()); + } + + @Test + public void shouldThrowErrorIfValueIsNotListOrArray() { + Descriptors.FieldDescriptor descriptor = TestRepeatedEnumMessage.getDescriptor().findFieldByName("test_enums"); + DynamicMessage.Builder builder = DynamicMessage.newBuilder(TestRepeatedEnumMessage.getDescriptor()); + + RepeatedEnumHandler repeatedEnumHandler = new RepeatedEnumHandler(descriptor); + Assert.assertThrows(ClassCastException.class, () -> repeatedEnumHandler.transformToProtoBuilder(builder, "test_enum")); + } } diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedMessageHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedMessageHandlerTest.java similarity index 87% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedMessageHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedMessageHandlerTest.java index 971bf18d1..6c62e4de8 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedMessageHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedMessageHandlerTest.java @@ -1,16 +1,16 @@ -package io.odpf.dagger.common.serde.typehandler.repeated; - -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.types.Row; +package com.gotocompany.dagger.common.serde.typehandler.repeated; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestFeedbackLogMessage; -import io.odpf.dagger.consumer.TestReason; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestFeedbackLogMessage; +import com.gotocompany.dagger.consumer.TestReason; import net.minidev.json.JSONArray; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.types.Row; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.apache.parquet.schema.PrimitiveType; @@ -20,9 +20,7 @@ import java.util.HashMap; import java.util.List; -import static org.apache.flink.api.common.typeinfo.Types.OBJECT_ARRAY; -import static org.apache.flink.api.common.typeinfo.Types.ROW_NAMED; -import static org.apache.flink.api.common.typeinfo.Types.STRING; +import static org.apache.flink.api.common.typeinfo.Types.*; import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64; import static org.apache.parquet.schema.Types.buildMessage; import static org.apache.parquet.schema.Types.repeatedGroup; @@ -375,4 +373,37 @@ public void shouldReturnEmptyRowArrayWhenTransformFromParquetIsCalledWithSimpleG assertEquals(0, actualRows.length); } + + @Test + public void shouldReturnEmptyArrayOfRowsIfNullPassedForTransformFromProtoUsingCache() { + Descriptors.FieldDescriptor repeatedMessageFieldDescriptor = TestFeedbackLogMessage.getDescriptor().findFieldByName("reason"); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestFeedbackLogMessage.getDescriptor()); + Object[] values = (Object[]) new RepeatedMessageHandler(repeatedMessageFieldDescriptor).transformFromProtoUsingCache(null, fieldDescriptorCache); + + + assertEquals(0, values.length); + } + + @Test + public void shouldReturnArrayOfRowsGivenAListForFieldDescriptorOfTypeRepeatedMessageOfAsDescriptorForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + TestFeedbackLogMessage logMessage = TestFeedbackLogMessage + .newBuilder() + .addReason(TestReason.newBuilder().setReasonId("reason1").setGroupId("group1").build()) + .addReason(TestReason.newBuilder().setReasonId("reason2").setGroupId("group2").build()) + .build(); + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestFeedbackLogMessage.getDescriptor(), logMessage.toByteArray()); + + Descriptors.FieldDescriptor repeatedMessageFieldDescriptor = TestFeedbackLogMessage.getDescriptor().findFieldByName("reason"); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestFeedbackLogMessage.getDescriptor()); + + Object[] values = (Object[]) new RepeatedMessageHandler(repeatedMessageFieldDescriptor).transformFromProtoUsingCache(dynamicMessage.getField(repeatedMessageFieldDescriptor), fieldDescriptorCache); + + assertEquals(repeatedMessageFieldDescriptor.getMessageType().getFields().size(), ((Row) values[0]).getArity()); + assertEquals(repeatedMessageFieldDescriptor.getMessageType().getFields().size(), ((Row) values[1]).getArity()); + assertEquals("reason1", ((Row) values[0]).getField(0)); + assertEquals("group1", ((Row) values[0]).getField(1)); + assertEquals("reason2", ((Row) values[1]).getField(0)); + assertEquals("group2", ((Row) values[1]).getField(1)); + } + } diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandlerTest.java similarity index 85% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandlerTest.java index 89ec0307e..845549fdd 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedPrimitiveHandlerTest.java @@ -1,5 +1,6 @@ -package io.odpf.dagger.common.serde.typehandler.repeated; +package com.gotocompany.dagger.common.serde.typehandler.repeated; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; @@ -7,10 +8,10 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.common.exceptions.serde.DataTypeNotSupportedException; -import io.odpf.dagger.common.exceptions.serde.InvalidDataTypeException; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestRepeatedEnumMessage; +import com.gotocompany.dagger.common.exceptions.serde.DataTypeNotSupportedException; +import com.gotocompany.dagger.common.exceptions.serde.InvalidDataTypeException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestRepeatedEnumMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.junit.Assert; @@ -220,6 +221,36 @@ public void shouldThrowUnsupportedDataTypeExceptionInCaseOfInCaseOfEnumForTransf assertEquals("Data type ENUM not supported in primitive type handlers", exception.getMessage()); } + @Test + public void shouldReturnAllFieldsInAListOfObjectsIfMultipleFieldsPassedWithSameTypeAsFieldDescriptorForTransformFromProtoUsingCache() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor repeatedFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("meta_array"); + RepeatedPrimitiveHandler repeatedPrimitiveHandler = new RepeatedPrimitiveHandler(repeatedFieldDescriptor); + + TestBookingLogMessage goLifeBookingLogMessage = TestBookingLogMessage + .newBuilder() + .addMetaArray("1") + .addMetaArray("2") + .addMetaArray("3") + .build(); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), goLifeBookingLogMessage.toByteArray()); + + String[] outputValues = (String[]) repeatedPrimitiveHandler.transformFromProtoUsingCache(dynamicMessage.getField(repeatedFieldDescriptor), fieldDescriptorCache); + assertArrayEquals(new String[]{"1", "2", "3"}, outputValues); + } + + @Test + public void shouldThrowUnsupportedDataTypeExceptionInCaseOfInCaseOfEnumForTransformFromProtoUsingCache() { + Descriptors.FieldDescriptor fieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("status"); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestBookingLogMessage.getDescriptor()); + + RepeatedPrimitiveHandler repeatedPrimitiveHandler = new RepeatedPrimitiveHandler(fieldDescriptor); + DataTypeNotSupportedException exception = Assert.assertThrows(DataTypeNotSupportedException.class, + () -> repeatedPrimitiveHandler.transformFromProtoUsingCache("CREATED", fieldDescriptorCache)); + assertEquals("Data type ENUM not supported in primitive type handlers", exception.getMessage()); + } + @Test public void shouldReturnTypeInformation() { Descriptors.FieldDescriptor repeatedFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("meta_array"); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandlerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandlerTest.java similarity index 84% rename from dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandlerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandlerTest.java index f1ee06d40..2192f1e3c 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandlerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/serde/typehandler/repeated/RepeatedStructMessageHandlerTest.java @@ -1,13 +1,14 @@ -package io.odpf.dagger.common.serde.typehandler.repeated; +package com.gotocompany.dagger.common.serde.typehandler.repeated; +import com.gotocompany.dagger.common.core.FieldDescriptorCache; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestNestedRepeatedMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestNestedRepeatedMessage; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.junit.Test; @@ -64,6 +65,15 @@ public void shouldReturnNullForTransformForKafka() { assertNull(repeatedStructMessageHandler.transformFromProto("test")); } + @Test + public void shouldReturnNullForTransformFromProtoUsingCache() { + Descriptors.FieldDescriptor repeatedStructFieldDescriptor = TestNestedRepeatedMessage.getDescriptor().findFieldByName("metadata"); + FieldDescriptorCache fieldDescriptorCache = new FieldDescriptorCache(TestNestedRepeatedMessage.getDescriptor()); + + RepeatedStructMessageHandler repeatedStructMessageHandler = new RepeatedStructMessageHandler(repeatedStructFieldDescriptor); + assertNull(repeatedStructMessageHandler.transformFromProtoUsingCache("test", fieldDescriptorCache)); + } + @Test public void shouldReturnTypeInformation() { Descriptors.FieldDescriptor repeatedStructFieldDescriptor = TestNestedRepeatedMessage.getDescriptor().findFieldByName("metadata"); diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/watermark/StreamWatermarkAssignerTest.java b/dagger-common/src/test/java/com/gotocompany/dagger/common/watermark/StreamWatermarkAssignerTest.java similarity index 98% rename from dagger-common/src/test/java/io/odpf/dagger/common/watermark/StreamWatermarkAssignerTest.java rename to dagger-common/src/test/java/com/gotocompany/dagger/common/watermark/StreamWatermarkAssignerTest.java index e98e2d6ba..2685c8c23 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/watermark/StreamWatermarkAssignerTest.java +++ b/dagger-common/src/test/java/com/gotocompany/dagger/common/watermark/StreamWatermarkAssignerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.common.watermark; +package com.gotocompany.dagger.common.watermark; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.streaming.api.datastream.DataStream; diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoDeserializerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoDeserializerTest.java deleted file mode 100644 index 2f2fed61d..000000000 --- a/dagger-common/src/test/java/io/odpf/dagger/common/serde/proto/deserialization/ProtoDeserializerTest.java +++ /dev/null @@ -1,281 +0,0 @@ -package io.odpf.dagger.common.serde.proto.deserialization; - -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.java.typeutils.RowTypeInfo; -import org.apache.flink.types.Row; - -import com.google.protobuf.Struct; -import com.google.protobuf.Timestamp; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.exceptions.serde.DaggerDeserializationException; -import io.odpf.dagger.consumer.TestBookingLogKey; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestBookingStatus; -import io.odpf.dagger.consumer.TestLocation; -import io.odpf.dagger.consumer.TestNestedRepeatedMessage; -import io.odpf.dagger.consumer.TestRoute; -import io.odpf.dagger.consumer.TestServiceType; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static io.odpf.dagger.common.core.Constants.*; -import static org.apache.flink.api.common.typeinfo.Types.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.when; -import static org.mockito.MockitoAnnotations.initMocks; - -public class ProtoDeserializerTest { - - private StencilClientOrchestrator stencilClientOrchestrator; - - - @Mock - private Configuration configuration; - - @Before - public void setUp() { - initMocks(this); - when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_ENABLE_KEY, SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT); - when(configuration.getString(SCHEMA_REGISTRY_STENCIL_URLS_KEY, SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT); - stencilClientOrchestrator = new StencilClientOrchestrator(configuration); - } - - @Test - public void shouldAlwaysReturnFalseForEndOfStream() { - assertFalse(new ProtoDeserializer(TestBookingLogKey.class.getTypeName(), 4, "rowtime", stencilClientOrchestrator).isEndOfStream(null)); - } - - @Test - public void shouldReturnProducedType() { - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogKey.class.getTypeName(), 3, "rowtime", stencilClientOrchestrator); - TypeInformation producedType = protoDeserializer.getProducedType(); - assertArrayEquals( - new String[]{"service_type", "order_number", "order_url", "status", "event_timestamp", INTERNAL_VALIDATION_FIELD_KEY, "rowtime"}, - ((RowTypeInfo) producedType).getFieldNames()); - assertArrayEquals( - new TypeInformation[]{STRING, STRING, STRING, STRING, ROW_NAMED(new String[]{"seconds", "nanos"}, LONG, INT), BOOLEAN, SQL_TIMESTAMP}, - ((RowTypeInfo) producedType).getFieldTypes()); - } - - @Test - public void shouldDeserializeProtoAsRowWithSimpleFields() { - String expectedOrderNumber = "111"; - final int expectedIterationNumber = 10; - byte[] protoBytes = TestBookingLogMessage.newBuilder().setOrderNumber(expectedOrderNumber).setCancelReasonId(expectedIterationNumber).build().toByteArray(); - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); - - Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); - - assertEquals(expectedOrderNumber, row.getField(bookingLogFieldIndex("order_number"))); - assertEquals(expectedIterationNumber, row.getField(bookingLogFieldIndex("cancel_reason_id"))); - } - - @Test - public void shouldAddExtraFieldsToRow() { - String expectedOrderNumber = "111"; - byte[] protoBytes = TestBookingLogMessage - .newBuilder() - .setOrderNumber(expectedOrderNumber) - .setEventTimestamp(Timestamp.newBuilder().setSeconds(1595548800L).setNanos(0).build()) - .build() - .toByteArray(); - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); - - Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); - - int size = row.getArity(); - assertEquals(51, size); - assertTrue("Didn't add field at the penultimate index", (Boolean) row.getField(size - 2)); - assertEquals(1595548800000L, ((java.sql.Timestamp) row.getField(size - 1)).getTime()); - } - - @Test - public void shouldDeserializeEnumAsString() { - - byte[] protoBytes = TestBookingLogMessage.newBuilder().setServiceType(TestServiceType.Enum.GO_RIDE).setStatus(TestBookingStatus.Enum.COMPLETED).build().toByteArray(); - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); - - Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); - - assertEquals(TestServiceType.Enum.GO_RIDE.toString(), row.getField(bookingLogFieldIndex("service_type"))); - assertEquals(TestBookingStatus.Enum.COMPLETED.toString(), row.getField(bookingLogFieldIndex("status"))); - } - - @Test - public void shouldDeserializeNestedMessagesAsSubRows() { - final int expectedSeconds = 10; - final int expectedNanoSeconds = 10; - final int expectedAccuracy = 111; - final int expectedLatitude = 222; - final int accuracyFieldIndex = 7; - final int latitudeFieldIndex = 2; - Timestamp expectedTimestamp = Timestamp.newBuilder().setSeconds(expectedSeconds).setNanos(expectedNanoSeconds).build(); - TestLocation testLocation = TestLocation.newBuilder().setAccuracyMeter(expectedAccuracy).setLatitude(expectedLatitude).build(); - byte[] protoBytes = TestBookingLogMessage.newBuilder() - .setEventTimestamp(expectedTimestamp) - .setDriverPickupLocation(testLocation).build().toByteArray(); - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); - - Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); - - Row eventTimestampRow = (Row) row.getField(bookingLogFieldIndex("event_timestamp")); - assertEquals(expectedTimestamp.getSeconds(), eventTimestampRow.getField(0)); - assertEquals(expectedTimestamp.getNanos(), eventTimestampRow.getField(1)); - - Row locationRow = (Row) row.getField(bookingLogFieldIndex("driver_pickup_location")); - assertEquals(testLocation.getAccuracyMeter(), locationRow.getField(accuracyFieldIndex)); - assertEquals(testLocation.getLatitude(), locationRow.getField(latitudeFieldIndex)); - } - - @Test - public void shouldDeserializeArrayOfObjectAsSubRows() throws IOException { - byte[] protoBytes = TestBookingLogMessage.newBuilder() - .setOrderNumber("EXAMPLE_ORDER_1") - .addRoutes(TestRoute.newBuilder().setDistanceInKms(1.0f).setRouteOrder(4).build()) - .addRoutes(TestRoute.newBuilder().setDistanceInKms(2.0f).setRouteOrder(5).build()) - .addRoutes(TestRoute.newBuilder().setDistanceInKms(3.0f).setRouteOrder(6).build()) - .build().toByteArray(); - - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); - - Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); - - Object[] routes = (Object[]) row.getField(bookingLogFieldIndex("routes")); - Row firstRouteRow = (Row) routes[0]; - assertEquals(firstRouteRow.getField(routeFieldIndex("distance_in_kms")), 1.0f); - assertEquals(firstRouteRow.getField(routeFieldIndex("route_order")), 4); - - Row secondRouteRow = (Row) routes[1]; - assertEquals(secondRouteRow.getField(routeFieldIndex("distance_in_kms")), 2.0f); - assertEquals(secondRouteRow.getField(routeFieldIndex("route_order")), 5); - - Row thirdRouteRow = (Row) routes[2]; - assertEquals(thirdRouteRow.getField(routeFieldIndex("distance_in_kms")), 3.0f); - assertEquals(thirdRouteRow.getField(routeFieldIndex("route_order")), 6); - } - - @Test - public void shouldDeserializeArrayOfString() throws IOException { - byte[] protoBytes = TestBookingLogMessage.newBuilder() - .setOrderNumber("EXAMPLE-ID-01") - .addMetaArray("EXAMPLE-REGISTERED-DEVICE-01") - .addMetaArray("EXAMPLE-REGISTERED-DEVICE-02") - .addMetaArray("EXAMPLE-REGISTERED-DEVICE-03") - .addMetaArray("EXAMPLE-REGISTERED-DEVICE-04") - .build().toByteArray(); - - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); - - Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); - - String[] strings = (String[]) row.getField(bookingLogFieldIndex("meta_array")); - - assertEquals("EXAMPLE-REGISTERED-DEVICE-01", strings[0]); - assertEquals("EXAMPLE-REGISTERED-DEVICE-02", strings[1]); - assertEquals("EXAMPLE-REGISTERED-DEVICE-03", strings[2]); - assertEquals("EXAMPLE-REGISTERED-DEVICE-04", strings[3]); - - } - - @Test - public void shouldDeserializeProtobufMapAsSubRows() throws IOException { - String orderNumber = "1"; - Map currentState = new HashMap(); - currentState.put("force_close", "true"); - currentState.put("image", "example.png"); - - byte[] protoBytes = TestBookingLogMessage.newBuilder() - .putAllMetadata(currentState) - .setOrderNumber(orderNumber).build().toByteArray(); - - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); - - Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); - - Object[] currentStateRowList = (Object[]) row.getField(bookingLogFieldIndex("metadata")); - - assertTrue(currentState.keySet().contains(((Row) currentStateRowList[0]).getField(0))); - assertTrue(currentState.values().contains(((Row) currentStateRowList[0]).getField(1))); - assertTrue(currentState.keySet().contains(((Row) currentStateRowList[1]).getField(0))); - assertTrue(currentState.values().contains(((Row) currentStateRowList[1]).getField(1))); - - assertEquals(orderNumber, row.getField(bookingLogFieldIndex("order_number"))); - } - - @Test - public void shouldDeserializeProtobufMapOfNullValueAsSubRows() throws IOException { - String orderNumber = "1"; - Map metaData = new HashMap(); - metaData.put("force_close", "true"); - metaData.put("image", ""); - - byte[] protoBytes = TestBookingLogMessage.newBuilder() - .putAllMetadata(metaData) - .setOrderNumber(orderNumber).build().toByteArray(); - - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); - - Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); - - Object[] currentStateRowList = (Object[]) row.getField(bookingLogFieldIndex("metadata")); - - assertTrue(metaData.keySet().contains(((Row) currentStateRowList[0]).getField(0))); - assertTrue(metaData.values().contains(((Row) currentStateRowList[0]).getField(1))); - assertTrue(metaData.keySet().contains(((Row) currentStateRowList[1]).getField(0))); - assertTrue(metaData.values().contains(((Row) currentStateRowList[1]).getField(1))); - - assertEquals(orderNumber, row.getField(bookingLogFieldIndex("order_number"))); - } - - @Test - public void shouldIgnoreStructWhileDeserialising() { - byte[] protoBytes = TestNestedRepeatedMessage.newBuilder() - .addMetadata(Struct.getDefaultInstance()) - .addMetadata(Struct.getDefaultInstance()) - .setNumberField(5) - .build().toByteArray(); - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestNestedRepeatedMessage.class.getTypeName(), 6, "rowtime", stencilClientOrchestrator); - Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, protoBytes)); - assertNull(row.getField(4)); - assertEquals(row.getField(2), 5); - } - - @Test - public void shouldThrowExceptionIfNotAbleToDeserialise() { - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestNestedRepeatedMessage.class.getTypeName(), 6, "rowtime", stencilClientOrchestrator); - assertThrows(DaggerDeserializationException.class, - () -> protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, null))); - } - - @Test - public void shouldReturnInvalidRow() { - ProtoDeserializer protoDeserializer = new ProtoDeserializer(TestBookingLogMessage.class.getTypeName(), 5, "rowtime", stencilClientOrchestrator); - Row row = protoDeserializer.deserialize(new ConsumerRecord<>("test-topic", 0, 0, null, "test".getBytes())); - assertFalse((boolean) row.getField(row.getArity() - 2)); - assertEquals(new java.sql.Timestamp(0), row.getField(row.getArity() - 1)); - } - - @Test - public void shouldThrowDescriptorNotFoundExceptionForStringClass() { - assertThrows(DescriptorNotFoundException.class, - () -> new ProtoDeserializer(String.class.getTypeName(), 6, "rowtime", stencilClientOrchestrator)); - } - - private int bookingLogFieldIndex(String propertyName) { - return TestBookingLogMessage.getDescriptor().findFieldByName(propertyName).getIndex(); - } - - private int routeFieldIndex(String propertyName) { - return TestRoute.getDescriptor().findFieldByName(propertyName).getIndex(); - } - - -} diff --git a/dagger-common/src/test/proto/TestGrpc.proto b/dagger-common/src/test/proto/TestGrpc.proto index d1f216f1b..bb9e7d3a8 100644 --- a/dagger-common/src/test/proto/TestGrpc.proto +++ b/dagger-common/src/test/proto/TestGrpc.proto @@ -1,9 +1,9 @@ syntax = "proto3"; -package io.odpf.dagger.consumer; +package com.gotocompany.dagger.consumer; option java_multiple_files = true; -option java_package = "io.odpf.dagger.consumer"; +option java_package = "com.gotocompany.dagger.consumer"; option java_outer_classname = "SampleGrpcServerProto"; service TestServer { diff --git a/dagger-common/src/test/proto/TestLogMessage.proto b/dagger-common/src/test/proto/TestLogMessage.proto index 362bcd908..216e3c6a2 100644 --- a/dagger-common/src/test/proto/TestLogMessage.proto +++ b/dagger-common/src/test/proto/TestLogMessage.proto @@ -1,9 +1,9 @@ syntax = "proto3"; -package io.odpf.dagger.consumer; +package com.gotocompany.dagger.consumer; option java_multiple_files = true; -option java_package = "io.odpf.dagger.consumer"; +option java_package = "com.gotocompany.dagger.consumer"; option java_outer_classname = "TestLogMessageProto"; import "google/protobuf/struct.proto"; diff --git a/dagger-common/src/test/proto/TestMessage.proto b/dagger-common/src/test/proto/TestMessage.proto index 64075a06f..59978a8a5 100644 --- a/dagger-common/src/test/proto/TestMessage.proto +++ b/dagger-common/src/test/proto/TestMessage.proto @@ -1,9 +1,9 @@ syntax = "proto3"; -package io.odpf.dagger.consumer; +package com.gotocompany.dagger.consumer; option java_multiple_files = true; -option java_package = "io.odpf.dagger.consumer"; +option java_package = "com.gotocompany.dagger.consumer"; option java_outer_classname = "TestMessageProto"; import "google/protobuf/struct.proto"; diff --git a/dagger-core/build.gradle b/dagger-core/build.gradle index a4f33cedb..d7af1dae0 100644 --- a/dagger-core/build.gradle +++ b/dagger-core/build.gradle @@ -7,17 +7,20 @@ buildscript { maven { url "https://plugins.gradle.org/m2/" } + maven { + url 'https://repo1.maven.org/maven2' + } } dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:6.0.0' - classpath "org.jfrog.buildinfo:build-info-extractor-gradle:4.17.0" + classpath 'com.github.jengelman.gradle.plugins:shadow:6.1.0' + classpath "org.jfrog.buildinfo:build-info-extractor-gradle:4.33.1" } } plugins { id 'maven-publish' - id 'com.jfrog.artifactory' version '4.17.0' - id 'com.github.johnrengelman.shadow' version '6.0.0' + id 'com.jfrog.artifactory' version '4.33.1' + id 'com.github.johnrengelman.shadow' version '6.1.0' } @@ -26,18 +29,21 @@ def flinkVersion = rootProject.flinkVersion version = rootProject.file('version.txt').text.trim() def minimalVersion = version -def dependenciesVersion = '0.2.2' +def dependenciesVersion = "0.5.3" description = """dagger to the heart!""" -sourceCompatibility = 1.8 -targetCompatibility = 1.8 + +java { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 +} tasks.withType(JavaCompile) { options.encoding = 'UTF-8' } -def mainClassName = "io.odpf.dagger.core.KafkaProtoSQLProcessor" +def mainClassName = "com.gotocompany.dagger.core.KafkaProtoSQLProcessor" configurations { minimalJar @@ -61,46 +67,63 @@ configurations { dependencies { minimalJar project(path: ':dagger-common', configuration: 'minimalCommonJar') minimalJar project(path: ':dagger-functions', configuration: 'minimalFunctionsJar') - minimalJar('io.odpf:depot:0.3.1') { - exclude group: 'org.apache.httpcomponents' - exclude module: 'stencil', group: 'io.odpf' - exclude group: 'com.google.protobuf' - exclude group: 'com.datadoghq' + minimalJar('com.gotocompany:depot:0.9.2') { + exclude group: 'com.google.cloud', module: 'google-cloud-bigtable' + exclude group: 'com.google.cloud', module: 'google-cloud-bigquerystorage' + exclude group: 'com.google.cloud', module: 'google-cloud-bigquery' + exclude group: 'com.google.protobuf', module: 'protobuf-java' + exclude group: 'com.google.protobuf', module: 'protobuf-java-util' + exclude group: "io.grpc" + exclude group: 'com.squareup.okhttp3', module: 'okhttp' + } + minimalJar('com.gotocompany:stencil:0.6.0') { + exclude group: 'com.google.protobuf', module: 'protobuf-java' + exclude group: 'com.google.protobuf', module: 'protobuf-java-util' } compileOnly 'org.projectlombok:lombok:1.18.8' annotationProcessor 'org.projectlombok:lombok:1.18.8' - implementation 'org.slf4j:slf4j-log4j12:1.7.7' + implementation group: 'org.slf4j', name: 'slf4j-reload4j', version: '2.0.7' implementation 'org.apache.flink:flink-streaming-java_2.11:' + flinkVersion implementation 'org.apache.flink:flink-clients_2.11:' + flinkVersion implementation 'org.apache.flink:flink-table:' + flinkVersion implementation 'org.apache.flink:flink-table-api-java-bridge_2.11:' + flinkVersion implementation 'org.apache.flink:flink-table-planner_2.11:' + flinkVersion + dependenciesJar 'io.grpc:grpc-all:1.55.1' dependenciesJar project(path: ':dagger-common', configuration: 'dependenciesCommonJar') dependenciesJar project(path: ':dagger-functions', configuration: 'dependenciesFunctionsJar') - + dependenciesJar(group: 'com.google.cloud', name: 'google-cloud-bigquerystorage', version: '2.39.1') { + exclude group: "io.grpc" + } + dependenciesJar(group: 'com.google.cloud', name: 'google-cloud-bigtable', version: '2.24.1') { + exclude group: "io.grpc" + } + dependenciesJar(group: 'com.google.cloud', name: 'google-cloud-bigquery', version: '2.29.0') { + exclude group: "io.grpc" + } dependenciesJar 'org.apache.flink:flink-connector-kafka_2.11:' + flinkVersion - dependenciesJar 'com.google.protobuf:protobuf-java:3.1.0' + dependenciesJar 'com.google.protobuf:protobuf-java:3.23.2' dependenciesJar 'com.google.protobuf:protobuf-java-util:3.1.0' dependenciesJar 'org.influxdb:influxdb-java:2.8' dependenciesJar 'org.elasticsearch.client:elasticsearch-rest-client:6.6.1' - dependenciesJar 'com.google.cloud.bigtable:bigtable-hbase-2.x:1.11.0' + dependenciesJar 'com.google.cloud.bigtable:bigtable-hbase-2.x:2.10.0' dependenciesJar 'org.asynchttpclient:async-http-client:2.10.1' dependenciesJar 'io.vertx:vertx-pg-client:3.9.0' dependenciesJar 'org.apache.commons:commons-pool2:2.4.3' dependenciesJar 'org.apache.parquet:parquet-protobuf:1.12.2' testImplementation project(':dagger-common').sourceSets.test.output - testImplementation 'junit:junit:4.13' + testImplementation 'junit:junit:4.13.1' testImplementation 'org.apache.flink:flink-test-utils_2.11:' + flinkVersion - testImplementation 'org.apache.kafka:kafka-clients:2.5.0' + testImplementation 'org.apache.kafka:kafka-clients:2.6.3' testImplementation 'com.github.tomakehurst:wiremock-standalone:2.27.0' testImplementation 'org.jmockit:jmockit:1.25' testImplementation 'org.mockito:mockito-core:2.25.1' + testImplementation 'io.grpc:grpc-all:1.55.1' testImplementation 'org.powermock:powermock-module-junit4:2.0.0-beta.5' testImplementation 'org.powermock:powermock-api-mockito2:2.0.0-beta.5' - testImplementation 'com.google.guava:guava:27.0.1-jre' + testImplementation 'com.google.guava:guava:30.0-jre' testImplementation 'org.grpcmock:grpcmock-junit5:0.5.0' testImplementation 'com.github.stefanbirkner:system-rules:1.19.0' } @@ -162,20 +185,7 @@ jar { } } -shadowJar { - mergeServiceFiles() - minimize { - exclude(dependency('io.vertx:vertx-pg-client:.*')) - exclude(dependency('com.google.cloud.bigtable:bigtable-hbase-2.x:.*')) - } - zip64 true - configurations = [project.configurations.minimalJar, project.configurations.dependenciesJar] - relocate('com.google.protobuf', 'shaded.com.google.protobuf') { - exclude 'combine.self="override"' - } -} - -task minimalJar(type: ShadowJar) { +tasks.register('minimalJar', ShadowJar) { manifest.attributes 'Main-Class': mainClassName archiveClassifier = 'minimal' zip64 true @@ -186,7 +196,7 @@ task minimalJar(type: ShadowJar) { exclude("core-site.xml") } -task dependenciesJar(type: ShadowJar) { +tasks.register('dependenciesJar', ShadowJar) { mergeServiceFiles() manifest.attributes 'Main-Class': mainClassName archiveClassifier = 'dependencies' @@ -196,7 +206,7 @@ task dependenciesJar(type: ShadowJar) { archiveVersion = dependenciesVersion } -task fatJar(type: ShadowJar) { +tasks.register('fatJar', ShadowJar) { description = "Builds a executable jar" manifest.attributes 'Main-Class': mainClassName archiveClassifier = 'fat' @@ -216,10 +226,6 @@ private Properties loadEnv() { publishing { publications { - shadow(MavenPublication) { - publication -> - project.shadow.component(publication) - } minimalArtifact(MavenPublication) { artifact file("$buildDir/libs/dagger-core-${minimalVersion}-minimal.jar") groupId project.group @@ -237,7 +243,7 @@ publishing { repositories { maven { name = "GitHubPackages" - url = "https://maven.pkg.github.com/odpf/dagger" + url = "https://maven.pkg.github.com/goto/dagger" credentials { username = System.getenv("GITHUB_ACTOR") password = System.getenv("GITHUB_TOKEN") @@ -263,9 +269,14 @@ artifactory { clientConfig.info.setBuildNumber(System.env.BUILD_NUMBER) } -task minimalAndShadowPublish(dependsOn: 'minimalJar') { - dependsOn('publishMinimalArtifactPublicationToGitHubPackagesRepository', 'publishShadowPublicationToGitHubPackagesRepository') - description('Publishes minimal and shadow jar') +task minimalPublish(dependsOn: 'minimalJar') { + dependsOn('publishMinimalArtifactPublicationToGitHubPackagesRepository') + description('Publishes minimal jar') +} + +task minimalPublishToMavenLocal(dependsOn: 'minimalJar') { + dependsOn('publishMinimalArtifactPublicationToMavenLocal') + description('Publishes minimal jar to Maven Local') } task dependenciesPublish(dependsOn: 'dependenciesJar') { @@ -273,13 +284,17 @@ task dependenciesPublish(dependsOn: 'dependenciesJar') { description('Publishes dependencies jar') } +task dependenciesPublishToMavenLocal(dependsOn: 'dependenciesJar') { + dependsOn('publishDependenciesArtifactPublicationToMavenLocal') + description('Publishes dependencies jar to Maven Local') +} + project.afterEvaluate { tasks.withType(PublishToMavenLocal) { dependsOn minimalJar, dependenciesJar } } - task runFlink(type: JavaExec, dependsOn: classes) { Properties properties = loadEnv() systemProperties['ConfigSource'] = "ENVIRONMENT" @@ -292,4 +307,3 @@ task runFlink(type: JavaExec, dependsOn: classes) { classpath = sourceSets.main.runtimeClasspath environment properties } - diff --git a/dagger-core/env/local.properties b/dagger-core/env/local.properties index 840190dc7..fe7458812 100644 --- a/dagger-core/env/local.properties +++ b/dagger-core/env/local.properties @@ -3,7 +3,7 @@ FLINK_SQL_QUERY=SELECT event_timestamp, is_valid, order_number from data_stream FLINK_WATERMARK_INTERVAL_MS=10000 FLINK_WATERMARK_DELAY_MS=0 # == Input Stream == -STREAMS=[ { "SOURCE_KAFKA_TOPIC_NAMES": "dagger-test-topic-v1", "INPUT_SCHEMA_TABLE": "data_stream", "INPUT_SCHEMA_PROTO_CLASS":"io.odpf.dagger.consumer.TestPrimitiveMessage", "INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX": "9", "SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS": "localhost:9092", "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE": "false", "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET": "latest", "SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID": "dagger-test-topic-cgroup-v1", "SOURCE_KAFKA_NAME": "local-kafka-stream", "SOURCE_DETAILS": [ { "SOURCE_TYPE": "UNBOUNDED", "SOURCE_NAME": "KAFKA_CONSUMER" } ] } ] +STREAMS=[ { "SOURCE_KAFKA_TOPIC_NAMES": "dagger-test-topic-v1", "INPUT_SCHEMA_TABLE": "data_stream", "INPUT_SCHEMA_PROTO_CLASS":"com.gotocompany.dagger.consumer.TestPrimitiveMessage", "INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX": "9", "SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS": "localhost:9092", "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE": "false", "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET": "latest", "SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID": "dagger-test-topic-cgroup-v1", "SOURCE_KAFKA_NAME": "local-kafka-stream", "SOURCE_DETAILS": [ { "SOURCE_TYPE": "UNBOUNDED", "SOURCE_NAME": "KAFKA_CONSUMER" } ] } ] # == Preprocessor == PROCESSOR_PREPROCESSOR_ENABLE=false @@ -25,7 +25,7 @@ METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS=10000 METRIC_TELEMETRY_ENABLE=true # == Others == -FUNCTION_FACTORY_CLASSES=io.odpf.dagger.functions.udfs.factories.FunctionFactory +FUNCTION_FACTORY_CLASSES=FunctionFactory FLINK_ROWTIME_ATTRIBUTE_NAME=rowtime # == Python Udf == diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/StreamManager.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/DaggerSqlJobBuilder.java similarity index 71% rename from dagger-core/src/main/java/io/odpf/dagger/core/StreamManager.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/DaggerSqlJobBuilder.java index 72df6b6ce..12c966be7 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/StreamManager.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/DaggerSqlJobBuilder.java @@ -1,26 +1,26 @@ -package io.odpf.dagger.core; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.DaggerContext; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.udfs.UdfFactory; -import io.odpf.dagger.common.watermark.LastColumnWatermark; -import io.odpf.dagger.common.watermark.NoWatermark; -import io.odpf.dagger.common.watermark.StreamWatermarkAssigner; -import io.odpf.dagger.common.watermark.WatermarkStrategyDefinition; -import io.odpf.dagger.core.exception.UDFFactoryClassNotDefinedException; -import io.odpf.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; -import io.odpf.dagger.core.processors.PostProcessorFactory; -import io.odpf.dagger.core.processors.PreProcessorFactory; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.PostProcessor; -import io.odpf.dagger.core.processors.types.Preprocessor; -import io.odpf.dagger.core.sink.SinkOrchestrator; -import io.odpf.dagger.core.source.StreamsFactory; -import io.odpf.dagger.core.utils.Constants; -import io.odpf.dagger.functions.udfs.python.PythonUdfConfig; -import io.odpf.dagger.functions.udfs.python.PythonUdfManager; +package com.gotocompany.dagger.core; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.udfs.UdfFactory; +import com.gotocompany.dagger.common.watermark.LastColumnWatermark; +import com.gotocompany.dagger.common.watermark.NoWatermark; +import com.gotocompany.dagger.common.watermark.StreamWatermarkAssigner; +import com.gotocompany.dagger.common.watermark.WatermarkStrategyDefinition; +import com.gotocompany.dagger.core.exception.UDFFactoryClassNotDefinedException; +import com.gotocompany.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; +import com.gotocompany.dagger.core.processors.PostProcessorFactory; +import com.gotocompany.dagger.core.processors.PreProcessorFactory; +import com.gotocompany.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.core.processors.types.Preprocessor; +import com.gotocompany.dagger.core.sink.SinkOrchestrator; +import com.gotocompany.dagger.core.source.StreamsFactory; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.functions.udfs.python.PythonUdfConfig; +import com.gotocompany.dagger.functions.udfs.python.PythonUdfManager; import org.apache.flink.streaming.api.CheckpointingMode; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; @@ -36,15 +36,11 @@ import java.time.Duration; import java.util.List; -import static io.odpf.dagger.core.utils.Constants.*; -import static io.odpf.dagger.functions.common.Constants.PYTHON_UDF_ENABLE_DEFAULT; -import static io.odpf.dagger.functions.common.Constants.PYTHON_UDF_ENABLE_KEY; +import static com.gotocompany.dagger.functions.common.Constants.PYTHON_UDF_ENABLE_DEFAULT; +import static com.gotocompany.dagger.functions.common.Constants.PYTHON_UDF_ENABLE_KEY; import static org.apache.flink.table.api.Expressions.$; -/** - * The Stream manager. - */ -public class StreamManager { +public class DaggerSqlJobBuilder implements JobBuilder { private final Configuration configuration; private final StreamExecutionEnvironment executionEnvironment; @@ -56,11 +52,11 @@ public class StreamManager { private final DaggerContext daggerContext; /** - * Instantiates a new Stream manager. + * Instantiates dagger sql job-builder. * - * @param daggerContext the daggerContext in form of param + * @param daggerContext the daggerContext in form of param */ - public StreamManager(DaggerContext daggerContext) { + public DaggerSqlJobBuilder(DaggerContext daggerContext) { this.daggerContext = daggerContext; this.configuration = daggerContext.getConfiguration(); this.executionEnvironment = daggerContext.getExecutionEnvironment(); @@ -72,23 +68,24 @@ public StreamManager(DaggerContext daggerContext) { * * @return the stream manager */ - public StreamManager registerConfigs() { + @Override + public JobBuilder registerConfigs() { stencilClientOrchestrator = new StencilClientOrchestrator(configuration); org.apache.flink.configuration.Configuration flinkConfiguration = (org.apache.flink.configuration.Configuration) this.executionEnvironment.getConfiguration(); daggerStatsDReporter = DaggerStatsDReporter.Provider.provide(flinkConfiguration, configuration); executionEnvironment.setMaxParallelism(configuration.getInteger(Constants.FLINK_PARALLELISM_MAX_KEY, Constants.FLINK_PARALLELISM_MAX_DEFAULT)); - executionEnvironment.setParallelism(configuration.getInteger(FLINK_PARALLELISM_KEY, FLINK_PARALLELISM_DEFAULT)); - executionEnvironment.getConfig().setAutoWatermarkInterval(configuration.getInteger(FLINK_WATERMARK_INTERVAL_MS_KEY, FLINK_WATERMARK_INTERVAL_MS_DEFAULT)); + executionEnvironment.setParallelism(configuration.getInteger(Constants.FLINK_PARALLELISM_KEY, Constants.FLINK_PARALLELISM_DEFAULT)); + executionEnvironment.getConfig().setAutoWatermarkInterval(configuration.getInteger(Constants.FLINK_WATERMARK_INTERVAL_MS_KEY, Constants.FLINK_WATERMARK_INTERVAL_MS_DEFAULT)); executionEnvironment.getCheckpointConfig().setTolerableCheckpointFailureNumber(Integer.MAX_VALUE); - executionEnvironment.enableCheckpointing(configuration.getLong(FLINK_CHECKPOINT_INTERVAL_MS_KEY, FLINK_CHECKPOINT_INTERVAL_MS_DEFAULT)); + executionEnvironment.enableCheckpointing(configuration.getLong(Constants.FLINK_CHECKPOINT_INTERVAL_MS_KEY, Constants.FLINK_CHECKPOINT_INTERVAL_MS_DEFAULT)); executionEnvironment.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE); - executionEnvironment.getCheckpointConfig().setCheckpointTimeout(configuration.getLong(FLINK_CHECKPOINT_TIMEOUT_MS_KEY, FLINK_CHECKPOINT_TIMEOUT_MS_DEFAULT)); - executionEnvironment.getCheckpointConfig().setMinPauseBetweenCheckpoints(configuration.getLong(FLINK_CHECKPOINT_MIN_PAUSE_MS_KEY, FLINK_CHECKPOINT_MIN_PAUSE_MS_DEFAULT)); - executionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(configuration.getInteger(FLINK_CHECKPOINT_MAX_CONCURRENT_KEY, FLINK_CHECKPOINT_MAX_CONCURRENT_DEFAULT)); + executionEnvironment.getCheckpointConfig().setCheckpointTimeout(configuration.getLong(Constants.FLINK_CHECKPOINT_TIMEOUT_MS_KEY, Constants.FLINK_CHECKPOINT_TIMEOUT_MS_DEFAULT)); + executionEnvironment.getCheckpointConfig().setMinPauseBetweenCheckpoints(configuration.getLong(Constants.FLINK_CHECKPOINT_MIN_PAUSE_MS_KEY, Constants.FLINK_CHECKPOINT_MIN_PAUSE_MS_DEFAULT)); + executionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(configuration.getInteger(Constants.FLINK_CHECKPOINT_MAX_CONCURRENT_KEY, Constants.FLINK_CHECKPOINT_MAX_CONCURRENT_DEFAULT)); executionEnvironment.getConfig().setGlobalJobParameters(configuration.getParam()); - tableEnvironment.getConfig().setIdleStateRetention(Duration.ofMinutes(configuration.getInteger(FLINK_RETENTION_IDLE_STATE_MINUTE_KEY, FLINK_RETENTION_IDLE_STATE_MINUTE_DEFAULT))); + tableEnvironment.getConfig().setIdleStateRetention(Duration.ofMinutes(configuration.getInteger(Constants.FLINK_RETENTION_IDLE_STATE_MINUTE_KEY, Constants.FLINK_RETENTION_IDLE_STATE_MINUTE_DEFAULT))); return this; } @@ -97,9 +94,10 @@ public StreamManager registerConfigs() { * * @return the stream manager */ - public StreamManager registerSourceWithPreProcessors() { - long watermarkDelay = configuration.getLong(FLINK_WATERMARK_DELAY_MS_KEY, FLINK_WATERMARK_DELAY_MS_DEFAULT); - Boolean enablePerPartitionWatermark = configuration.getBoolean(FLINK_WATERMARK_PER_PARTITION_ENABLE_KEY, FLINK_WATERMARK_PER_PARTITION_ENABLE_DEFAULT); + @Override + public JobBuilder registerSourceWithPreProcessors() { + long watermarkDelay = configuration.getLong(Constants.FLINK_WATERMARK_DELAY_MS_KEY, Constants.FLINK_WATERMARK_DELAY_MS_DEFAULT); + Boolean enablePerPartitionWatermark = configuration.getBoolean(Constants.FLINK_WATERMARK_PER_PARTITION_ENABLE_KEY, Constants.FLINK_WATERMARK_PER_PARTITION_ENABLE_DEFAULT); StreamsFactory.getStreams(configuration, stencilClientOrchestrator, daggerStatsDReporter) .forEach(stream -> { String tableName = stream.getStreamName(); @@ -125,7 +123,7 @@ private WatermarkStrategyDefinition getSourceWatermarkDefinition(Boolean enableP } private ApiExpression[] getApiExpressions(StreamInfo streamInfo) { - String rowTimeAttributeName = configuration.getString(FLINK_ROWTIME_ATTRIBUTE_NAME_KEY, FLINK_ROWTIME_ATTRIBUTE_NAME_DEFAULT); + String rowTimeAttributeName = configuration.getString(Constants.FLINK_ROWTIME_ATTRIBUTE_NAME_KEY, Constants.FLINK_ROWTIME_ATTRIBUTE_NAME_DEFAULT); String[] columnNames = streamInfo.getColumnNames(); ApiExpression[] expressions = new ApiExpression[columnNames.length]; if (columnNames.length == 0) { @@ -145,10 +143,11 @@ private ApiExpression[] getApiExpressions(StreamInfo streamInfo) { * * @return the stream manager */ - public StreamManager registerFunctions() throws IOException { + @Override + public JobBuilder registerFunctions() throws IOException { if (configuration.getBoolean(PYTHON_UDF_ENABLE_KEY, PYTHON_UDF_ENABLE_DEFAULT)) { PythonUdfConfig pythonUdfConfig = PythonUdfConfig.parse(configuration); - PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig); + PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig, configuration); pythonUdfManager.registerPythonFunctions(); } @@ -179,7 +178,8 @@ private UdfFactory getUdfFactory(String udfFactoryClassName) throws ClassNotFoun * * @return the stream manager */ - public StreamManager registerOutputStream() { + @Override + public JobBuilder registerOutputStream() { Table table = tableEnvironment.sqlQuery(configuration.getString(Constants.FLINK_SQL_QUERY_KEY, Constants.FLINK_SQL_QUERY_DEFAULT)); StreamInfo streamInfo = createStreamInfo(table); streamInfo = addPostProcessor(streamInfo); @@ -192,8 +192,9 @@ public StreamManager registerOutputStream() { * * @throws Exception the exception */ + @Override public void execute() throws Exception { - executionEnvironment.execute(configuration.getString(FLINK_JOB_ID_KEY, FLINK_JOB_ID_DEFAULT)); + executionEnvironment.execute(configuration.getString(Constants.FLINK_JOB_ID_KEY, Constants.FLINK_JOB_ID_DEFAULT)); } /** diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/ExampleStreamApiJobBuilder.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/ExampleStreamApiJobBuilder.java new file mode 100644 index 000000000..811843602 --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/ExampleStreamApiJobBuilder.java @@ -0,0 +1,156 @@ +package com.gotocompany.dagger.core; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.watermark.LastColumnWatermark; +import com.gotocompany.dagger.common.watermark.StreamWatermarkAssigner; +import com.gotocompany.dagger.common.watermark.WatermarkStrategyDefinition; +import com.gotocompany.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; +import com.gotocompany.dagger.core.processors.PreProcessorFactory; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.processors.types.Preprocessor; +import com.gotocompany.dagger.core.sink.SinkOrchestrator; +import com.gotocompany.dagger.core.source.StreamsFactory; +import com.gotocompany.dagger.core.utils.Constants; +import org.apache.flink.streaming.api.CheckpointingMode; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.TableSchema; +import org.apache.flink.types.Row; +import org.apache.flink.util.Preconditions; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ExampleStreamApiJobBuilder implements JobBuilder { + +// static final String KEY_PATH = "meta.customer.id"; + + private final String inputStreamName1 = "data_streams_0"; + private final String inputStreamName2 = "data_streams_1"; + private final Map dataStreams = new HashMap<>(); + + private final DaggerContext daggerContext; + private final Configuration configuration; + private final StreamExecutionEnvironment executionEnvironment; + private StencilClientOrchestrator stencilClientOrchestrator; + private DaggerStatsDReporter daggerStatsDReporter; + private final MetricsTelemetryExporter telemetryExporter = new MetricsTelemetryExporter(); + + public ExampleStreamApiJobBuilder(DaggerContext daggerContext) { + this.daggerContext = daggerContext; + this.configuration = daggerContext.getConfiguration(); + this.executionEnvironment = daggerContext.getExecutionEnvironment(); + } + + @Override + public JobBuilder registerConfigs() { + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + org.apache.flink.configuration.Configuration flinkConfiguration = (org.apache.flink.configuration.Configuration) this.executionEnvironment.getConfiguration(); + daggerStatsDReporter = DaggerStatsDReporter.Provider.provide(flinkConfiguration, configuration); + + executionEnvironment.setMaxParallelism(configuration.getInteger(Constants.FLINK_PARALLELISM_MAX_KEY, Constants.FLINK_PARALLELISM_MAX_DEFAULT)); + executionEnvironment.getCheckpointConfig().setTolerableCheckpointFailureNumber(Integer.MAX_VALUE); + executionEnvironment.enableCheckpointing(configuration.getLong(Constants.FLINK_CHECKPOINT_INTERVAL_MS_KEY, Constants.FLINK_CHECKPOINT_INTERVAL_MS_DEFAULT)); + executionEnvironment.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE); + + // goes on... + executionEnvironment.getConfig().setGlobalJobParameters(configuration.getParam()); + return this; + } + + @Override + public JobBuilder registerSourceWithPreProcessors() { + long watermarkDelay = configuration.getLong(Constants.FLINK_WATERMARK_DELAY_MS_KEY, Constants.FLINK_WATERMARK_DELAY_MS_DEFAULT); + Boolean enablePerPartitionWatermark = configuration.getBoolean(Constants.FLINK_WATERMARK_PER_PARTITION_ENABLE_KEY, Constants.FLINK_WATERMARK_PER_PARTITION_ENABLE_DEFAULT); + + StreamsFactory.getStreams(configuration, stencilClientOrchestrator, daggerStatsDReporter) + .forEach(stream -> { + String tableName = stream.getStreamName(); + + WatermarkStrategyDefinition watermarkStrategyDefinition = new LastColumnWatermark(); + + DataStream dataStream = stream.registerSource(executionEnvironment, watermarkStrategyDefinition.getWatermarkStrategy(watermarkDelay)); + StreamWatermarkAssigner streamWatermarkAssigner = new StreamWatermarkAssigner(new LastColumnWatermark()); + + DataStream dataStream1 = streamWatermarkAssigner + .assignTimeStampAndWatermark(dataStream, watermarkDelay, enablePerPartitionWatermark); + + + // just some legacy objects to adopt preprocessors + TableSchema tableSchema = TableSchema.fromTypeInfo(dataStream.getType()); + StreamInfo streamInfo = new StreamInfo(dataStream1, tableSchema.getFieldNames()); + streamInfo = addPreProcessor(streamInfo, tableName); + + if (tableName.equals(inputStreamName1)) { + dataStreams.put(inputStreamName1, streamInfo); + } + if (tableName.equals(inputStreamName2)) { + dataStreams.put(inputStreamName2, streamInfo); + } + }); + return this; + } + + @Override + public JobBuilder registerFunctions() throws IOException { + return this; + } + + @Override + public JobBuilder registerOutputStream() { + // NOTE - GET THE DATASTREAM REFERENCE + StreamInfo streamInfo = dataStreams.get(inputStreamName1); + Preconditions.checkNotNull(streamInfo, "Expected page log stream to be registered with name %s", inputStreamName1); + + DataStream inputStream = streamInfo.getDataStream(); + + SinkOrchestrator sinkOrchestrator = new SinkOrchestrator(telemetryExporter); + sinkOrchestrator.addSubscriber(telemetryExporter); + + SingleOutputStreamOperator outputStream = + inputStream + + // NOTE - USE THE FLINK STREAM APIS HERE AND SINK THE OUTPUT + +// .keyBy( +// new KeySelector() { +// private KeyExtractor keyExtractor; +// +// @Override +// public Integer getKey(Row row) { +// if (keyExtractor == null) { +// keyExtractor = new KeyExtractor(row, KEY_PATH); +// } +// int userId = keyExtractor.extract(row); +// return userId % DAU_PARALLELISM; +// } +// }) +// .process(new ShardedDistinctUserCounter()) +// .keyBy(r -> 0) // move all the output to one operator to calculate aggregation of all +// .process(new UserCounterAggregator()); + .keyBy(r -> 0) + .max("someField"); + + outputStream.sinkTo(sinkOrchestrator.getSink(configuration, new String[]{"uniq_users"}, stencilClientOrchestrator, daggerStatsDReporter)); + return this; + } + + @Override + public void execute() throws Exception { + executionEnvironment.execute(configuration.getString(Constants.FLINK_JOB_ID_KEY, Constants.FLINK_JOB_ID_DEFAULT)); + } + + private StreamInfo addPreProcessor(StreamInfo streamInfo, String tableName) { + List preProcessors = PreProcessorFactory.getPreProcessors(daggerContext, tableName, telemetryExporter); + for (Preprocessor preprocessor : preProcessors) { + streamInfo = preprocessor.process(streamInfo); + } + return streamInfo; + } +} diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/JobBuilder.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/JobBuilder.java new file mode 100644 index 000000000..a2aa07a08 --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/JobBuilder.java @@ -0,0 +1,29 @@ +package com.gotocompany.dagger.core; + +import java.io.IOException; + +/** + * An interface derived from the publicly exposed methods of {@code DaggerSqlJobBuilder} + * previously referred as StreamManager. + *

+ * The {@code KafkaProtoSQLProcessor}, which serves as the program entry point, + * initializes an instance for the given {@code JOB_BUILDER_FQCN} value. + * Ensure that the job builder class is bundled with the program during any + * subsequent build stages. If it is not, the system falls back to the + * {@code DEFAULT_JOB_BUILDER_FQCN} class, i.e., {@code com.gotocompany.dagger.core.DaggerSqlJobBuilder}. + *

+ * Additionally, the job builder class is expected to provide a constructor + * that accepts a single parameter of type {@code DaggerContext} + */ +public interface JobBuilder { + + JobBuilder registerConfigs(); + + JobBuilder registerSourceWithPreProcessors(); + + JobBuilder registerFunctions() throws IOException; + + JobBuilder registerOutputStream(); + + void execute() throws Exception; +} diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/KafkaProtoSQLProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/KafkaProtoSQLProcessor.java new file mode 100644 index 000000000..19798d03d --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/KafkaProtoSQLProcessor.java @@ -0,0 +1,59 @@ +package com.gotocompany.dagger.core; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.config.ConfigurationProvider; +import com.gotocompany.dagger.core.config.ConfigurationProviderFactory; +import com.gotocompany.dagger.functions.common.Constants; +import org.apache.flink.client.program.ProgramInvocationException; +import com.gotocompany.dagger.common.core.DaggerContext; + +import java.lang.reflect.Constructor; +import java.util.TimeZone; + +import static com.gotocompany.dagger.functions.common.Constants.JOB_BUILDER_FQCN_KEY; + +/** + * Main class to run Dagger. + */ +public class KafkaProtoSQLProcessor { + + /** + * The entry point of application. + * + * @param args the input arguments + * @throws ProgramInvocationException the program invocation exception + */ + public static void main(String[] args) throws ProgramInvocationException { + try { + ConfigurationProvider provider = new ConfigurationProviderFactory(args).provider(); + Configuration configuration = provider.get(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + DaggerContext daggerContext = DaggerContext.init(configuration); + + JobBuilder jobBuilder = getJobBuilderInstance(daggerContext); + jobBuilder + .registerConfigs() + .registerSourceWithPreProcessors() + .registerFunctions() + .registerOutputStream() + .execute(); + } catch (Exception | AssertionError e) { + e.printStackTrace(); + throw new ProgramInvocationException(e); + } + } + + private static JobBuilder getJobBuilderInstance(DaggerContext daggerContext) { + String className = daggerContext.getConfiguration().getString(JOB_BUILDER_FQCN_KEY, Constants.DEFAULT_JOB_BUILDER_FQCN); + try { + Class builderClazz = Class.forName(className); + Constructor builderClazzConstructor = builderClazz.getConstructor(DaggerContext.class); + return (JobBuilder) builderClazzConstructor.newInstance(daggerContext); + } catch (Exception e) { + Exception wrapperException = new Exception("Unable to instantiate job builder class: <" + className + "> \n" + + "Instantiating default job builder com.gotocompany.dagger.core.DaggerSqlJobBuilder", e); + wrapperException.printStackTrace(); + return new DaggerSqlJobBuilder(daggerContext); + } + } +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/config/CommandlineConfigurationProvider.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/CommandlineConfigurationProvider.java similarity index 93% rename from dagger-core/src/main/java/io/odpf/dagger/core/config/CommandlineConfigurationProvider.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/config/CommandlineConfigurationProvider.java index dd1bada88..bd7972ef4 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/config/CommandlineConfigurationProvider.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/CommandlineConfigurationProvider.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.config; +package com.gotocompany.dagger.core.config; import org.apache.flink.api.java.utils.ParameterTool; import com.google.gson.Gson; -import io.odpf.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.configuration.Configuration; import java.util.Base64; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/config/ConfigurationProvider.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/ConfigurationProvider.java similarity index 66% rename from dagger-core/src/main/java/io/odpf/dagger/core/config/ConfigurationProvider.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/config/ConfigurationProvider.java index 0b9a22f7a..d8754c4bc 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/config/ConfigurationProvider.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/ConfigurationProvider.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.config; +package com.gotocompany.dagger.core.config; -import io.odpf.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.configuration.Configuration; /** * The interface for all Configuration provider class. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/config/ConfigurationProviderFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/ConfigurationProviderFactory.java similarity index 93% rename from dagger-core/src/main/java/io/odpf/dagger/core/config/ConfigurationProviderFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/config/ConfigurationProviderFactory.java index a8ace5824..4594ef9df 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/config/ConfigurationProviderFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/ConfigurationProviderFactory.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.config; +package com.gotocompany.dagger.core.config; -import io.odpf.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/config/EnvironmentConfigurationProvider.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/EnvironmentConfigurationProvider.java similarity index 86% rename from dagger-core/src/main/java/io/odpf/dagger/core/config/EnvironmentConfigurationProvider.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/config/EnvironmentConfigurationProvider.java index dbac97758..1460d00a6 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/config/EnvironmentConfigurationProvider.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/EnvironmentConfigurationProvider.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.config; +package com.gotocompany.dagger.core.config; import org.apache.flink.api.java.utils.ParameterTool; -import io.odpf.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.configuration.Configuration; import java.util.Map; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/config/FileConfigurationProvider.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/FileConfigurationProvider.java similarity index 88% rename from dagger-core/src/main/java/io/odpf/dagger/core/config/FileConfigurationProvider.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/config/FileConfigurationProvider.java index b5684e9d4..01b76130f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/config/FileConfigurationProvider.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/FileConfigurationProvider.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.config; +package com.gotocompany.dagger.core.config; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; import org.apache.flink.api.java.utils.ParameterTool; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.common.configuration.Configuration; import java.io.FileReader; import java.util.HashMap; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/config/KafkaEnvironmentVariables.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/KafkaEnvironmentVariables.java similarity index 95% rename from dagger-core/src/main/java/io/odpf/dagger/core/config/KafkaEnvironmentVariables.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/config/KafkaEnvironmentVariables.java index 5d5e9b0b6..2342e29df 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/config/KafkaEnvironmentVariables.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/config/KafkaEnvironmentVariables.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.config; +package com.gotocompany.dagger.core.config; import org.apache.flink.configuration.Configuration; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/deserializer/DaggerDeserializerFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/DaggerDeserializerFactory.java similarity index 74% rename from dagger-core/src/main/java/io/odpf/dagger/core/deserializer/DaggerDeserializerFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/DaggerDeserializerFactory.java index 3955cfbb9..b4b4750e4 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/deserializer/DaggerDeserializerFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/DaggerDeserializerFactory.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.core.deserializer; +package com.gotocompany.dagger.core.deserializer; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.core.exception.DaggerConfigurationException; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; -import io.odpf.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; +import com.gotocompany.dagger.core.source.config.StreamConfig; import org.apache.flink.types.Row; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/deserializer/DaggerDeserializerProvider.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/DaggerDeserializerProvider.java similarity index 53% rename from dagger-core/src/main/java/io/odpf/dagger/core/deserializer/DaggerDeserializerProvider.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/DaggerDeserializerProvider.java index 405973429..d46f519a8 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/deserializer/DaggerDeserializerProvider.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/DaggerDeserializerProvider.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.deserializer; +package com.gotocompany.dagger.core.deserializer; -import io.odpf.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; public interface DaggerDeserializerProvider { DaggerDeserializer getDaggerDeserializer(); diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/deserializer/JsonDeserializerProvider.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/JsonDeserializerProvider.java similarity index 64% rename from dagger-core/src/main/java/io/odpf/dagger/core/deserializer/JsonDeserializerProvider.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/JsonDeserializerProvider.java index 22efc7dda..e7cc9b3c0 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/deserializer/JsonDeserializerProvider.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/JsonDeserializerProvider.java @@ -1,23 +1,21 @@ -package io.odpf.dagger.core.deserializer; +package com.gotocompany.dagger.core.deserializer; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.DataTypes; -import io.odpf.dagger.common.serde.json.deserialization.JsonDeserializer; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.DataTypes; +import com.gotocompany.dagger.common.serde.json.deserialization.JsonDeserializer; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; import org.apache.flink.types.Row; import java.util.Arrays; import java.util.HashSet; -import static io.odpf.dagger.common.serde.DataTypes.JSON; -import static io.odpf.dagger.core.source.config.models.SourceName.KAFKA_SOURCE; -import static io.odpf.dagger.core.source.config.models.SourceName.KAFKA_CONSUMER; +import static com.gotocompany.dagger.common.serde.DataTypes.JSON; public class JsonDeserializerProvider implements DaggerDeserializerProvider { private final StreamConfig streamConfig; - private static final HashSet COMPATIBLE_SOURCES = new HashSet<>(Arrays.asList(KAFKA_SOURCE, KAFKA_CONSUMER)); + private static final HashSet COMPATIBLE_SOURCES = new HashSet<>(Arrays.asList(SourceName.KAFKA_SOURCE, SourceName.KAFKA_CONSUMER)); private static final DataTypes COMPATIBLE_INPUT_SCHEMA_TYPE = JSON; public JsonDeserializerProvider(StreamConfig streamConfig) { diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/deserializer/ProtoDeserializerProvider.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/ProtoDeserializerProvider.java similarity index 60% rename from dagger-core/src/main/java/io/odpf/dagger/core/deserializer/ProtoDeserializerProvider.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/ProtoDeserializerProvider.java index 152b52195..e165aeb60 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/deserializer/ProtoDeserializerProvider.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/ProtoDeserializerProvider.java @@ -1,26 +1,23 @@ -package io.odpf.dagger.core.deserializer; +package com.gotocompany.dagger.core.deserializer; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.DataTypes; -import io.odpf.dagger.common.serde.proto.deserialization.ProtoDeserializer; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.DataTypes; +import com.gotocompany.dagger.common.serde.proto.deserialization.ProtoDeserializer; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.types.Row; import java.util.Arrays; import java.util.HashSet; -import static io.odpf.dagger.common.serde.DataTypes.PROTO; -import static io.odpf.dagger.core.source.config.models.SourceName.KAFKA_CONSUMER; -import static io.odpf.dagger.core.source.config.models.SourceName.KAFKA_SOURCE; -import static io.odpf.dagger.core.utils.Constants.FLINK_ROWTIME_ATTRIBUTE_NAME_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.FLINK_ROWTIME_ATTRIBUTE_NAME_KEY; +import static com.gotocompany.dagger.common.serde.DataTypes.PROTO; public class ProtoDeserializerProvider implements DaggerDeserializerProvider { - private static final HashSet COMPATIBLE_SOURCES = new HashSet<>(Arrays.asList(KAFKA_SOURCE, KAFKA_CONSUMER)); + private static final HashSet COMPATIBLE_SOURCES = new HashSet<>(Arrays.asList(SourceName.KAFKA_SOURCE, SourceName.KAFKA_CONSUMER)); private static final DataTypes COMPATIBLE_INPUT_SCHEMA_TYPE = PROTO; protected final StreamConfig streamConfig; protected final Configuration configuration; @@ -36,7 +33,7 @@ public ProtoDeserializerProvider(StreamConfig streamConfig, Configuration config public DaggerDeserializer getDaggerDeserializer() { int timestampFieldIndex = Integer.parseInt(streamConfig.getEventTimestampFieldIndex()); String protoClassName = streamConfig.getProtoClass(); - String rowTimeAttributeName = configuration.getString(FLINK_ROWTIME_ATTRIBUTE_NAME_KEY, FLINK_ROWTIME_ATTRIBUTE_NAME_DEFAULT); + String rowTimeAttributeName = configuration.getString(Constants.FLINK_ROWTIME_ATTRIBUTE_NAME_KEY, Constants.FLINK_ROWTIME_ATTRIBUTE_NAME_DEFAULT); return new ProtoDeserializer(protoClassName, timestampFieldIndex, rowTimeAttributeName, stencilClientOrchestrator); } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/deserializer/SimpleGroupDeserializerProvider.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/SimpleGroupDeserializerProvider.java similarity index 60% rename from dagger-core/src/main/java/io/odpf/dagger/core/deserializer/SimpleGroupDeserializerProvider.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/SimpleGroupDeserializerProvider.java index de1ee06a2..41c853701 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/deserializer/SimpleGroupDeserializerProvider.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/deserializer/SimpleGroupDeserializerProvider.java @@ -1,25 +1,23 @@ -package io.odpf.dagger.core.deserializer; +package com.gotocompany.dagger.core.deserializer; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.DataTypes; -import io.odpf.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.DataTypes; +import com.gotocompany.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.types.Row; -import static io.odpf.dagger.common.serde.DataTypes.PROTO; -import static io.odpf.dagger.core.source.config.models.SourceName.PARQUET_SOURCE; -import static io.odpf.dagger.core.utils.Constants.FLINK_ROWTIME_ATTRIBUTE_NAME_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.FLINK_ROWTIME_ATTRIBUTE_NAME_KEY; +import static com.gotocompany.dagger.common.serde.DataTypes.PROTO; public class SimpleGroupDeserializerProvider implements DaggerDeserializerProvider { protected final StreamConfig streamConfig; protected final Configuration configuration; protected final StencilClientOrchestrator stencilClientOrchestrator; - private static final SourceName COMPATIBLE_SOURCE = PARQUET_SOURCE; + private static final SourceName COMPATIBLE_SOURCE = SourceName.PARQUET_SOURCE; private static final DataTypes COMPATIBLE_INPUT_SCHEMA_TYPE = PROTO; public SimpleGroupDeserializerProvider(StreamConfig streamConfig, Configuration configuration, StencilClientOrchestrator stencilClientOrchestrator) { @@ -32,7 +30,7 @@ public SimpleGroupDeserializerProvider(StreamConfig streamConfig, Configuration public DaggerDeserializer getDaggerDeserializer() { int timestampFieldIndex = Integer.parseInt(streamConfig.getEventTimestampFieldIndex()); String protoClassName = streamConfig.getProtoClass(); - String rowTimeAttributeName = configuration.getString(FLINK_ROWTIME_ATTRIBUTE_NAME_KEY, FLINK_ROWTIME_ATTRIBUTE_NAME_DEFAULT); + String rowTimeAttributeName = configuration.getString(Constants.FLINK_ROWTIME_ATTRIBUTE_NAME_KEY, Constants.FLINK_ROWTIME_ATTRIBUTE_NAME_DEFAULT); return new SimpleGroupDeserializer(protoClassName, timestampFieldIndex, rowTimeAttributeName, stencilClientOrchestrator); } diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/enumeration/KafkaConnectorTypesMetadata.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/enumeration/KafkaConnectorTypesMetadata.java new file mode 100644 index 000000000..9a7739df0 --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/enumeration/KafkaConnectorTypesMetadata.java @@ -0,0 +1,18 @@ +package com.gotocompany.dagger.core.enumeration; + +import java.util.regex.Pattern; + +public enum KafkaConnectorTypesMetadata { + SOURCE("SOURCE_KAFKA_CONSUMER_CONFIG_+"), SINK("SINK_KAFKA_PRODUCER_CONFIG_+"); + + KafkaConnectorTypesMetadata(String prefixPattern) { + this.prefixPattern = prefixPattern; + } + + private final String prefixPattern; + + public Pattern getConfigurationPattern() { + return Pattern.compile(String.format("^%s(.*)", prefixPattern), Pattern.CASE_INSENSITIVE); + } + +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/BigQueryWriterException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/BigQueryWriterException.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/BigQueryWriterException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/BigQueryWriterException.java index f7d931b95..5738bd827 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/BigQueryWriterException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/BigQueryWriterException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; import java.io.IOException; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/ChannelNotAvailableException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ChannelNotAvailableException.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/ChannelNotAvailableException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ChannelNotAvailableException.java index 12abddd32..3904f0b02 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/ChannelNotAvailableException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ChannelNotAvailableException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if Grpc Channel not available. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/ConsumerLagNotZeroException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ConsumerLagNotZeroException.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/ConsumerLagNotZeroException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ConsumerLagNotZeroException.java index cbb5b50ca..2e339ab1c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/ConsumerLagNotZeroException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ConsumerLagNotZeroException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if kafka consumer lag is not zero. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/DaggerConfigurationException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/DaggerConfigurationException.java similarity index 89% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/DaggerConfigurationException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/DaggerConfigurationException.java index 584dae199..6cd70276e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/DaggerConfigurationException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/DaggerConfigurationException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if there is something wrong with Dagger configuration. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/GrpcFailureException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/GrpcFailureException.java similarity index 86% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/GrpcFailureException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/GrpcFailureException.java index 88347a443..16e6ef82c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/GrpcFailureException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/GrpcFailureException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if there is failure in Grpc. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/HttpFailureException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/HttpFailureException.java similarity index 86% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/HttpFailureException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/HttpFailureException.java index 702bab37a..3e9159a62 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/HttpFailureException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/HttpFailureException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if there is failure in Http. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InfluxWriteException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InfluxWriteException.java similarity index 77% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/InfluxWriteException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InfluxWriteException.java index 60d200469..52082f514 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InfluxWriteException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InfluxWriteException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; import java.io.IOException; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InputOutputMappingException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InputOutputMappingException.java similarity index 88% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/InputOutputMappingException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InputOutputMappingException.java index 2c7637b70..742898445 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InputOutputMappingException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InputOutputMappingException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if error happens on input output mapping. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidConfigurationException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidConfigurationException.java similarity index 88% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidConfigurationException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidConfigurationException.java index 0594bd306..dde5e543f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidConfigurationException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidConfigurationException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if there is an Invalid Configuration. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidDaggerSourceException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidDaggerSourceException.java similarity index 77% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidDaggerSourceException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidDaggerSourceException.java index d8ea68354..8badd85c9 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidDaggerSourceException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidDaggerSourceException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; public class InvalidDaggerSourceException extends RuntimeException { public InvalidDaggerSourceException(String message) { diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidGrpcBodyException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidGrpcBodyException.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidGrpcBodyException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidGrpcBodyException.java index 16d360639..dd06d1eb2 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidGrpcBodyException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidGrpcBodyException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if there is an Invalid Grpc body. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidHttpVerbException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidHttpVerbException.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidHttpVerbException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidHttpVerbException.java index 01e5379b2..0b4405062 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidHttpVerbException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidHttpVerbException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if there is an Invalid Http verb. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidLongbowDurationException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidLongbowDurationException.java similarity index 88% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidLongbowDurationException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidLongbowDurationException.java index ca91374bd..4f9087faa 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidLongbowDurationException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidLongbowDurationException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if there is an Invalid Longbow duration. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidTimeRangeException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidTimeRangeException.java similarity index 76% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidTimeRangeException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidTimeRangeException.java index 864fa196e..54b4ef911 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidTimeRangeException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/InvalidTimeRangeException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; public class InvalidTimeRangeException extends RuntimeException { public InvalidTimeRangeException(String message) { diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/ParquetFileSourceReaderInitializationException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ParquetFileSourceReaderInitializationException.java similarity index 86% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/ParquetFileSourceReaderInitializationException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ParquetFileSourceReaderInitializationException.java index 5023f5cba..35ba8b023 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/ParquetFileSourceReaderInitializationException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ParquetFileSourceReaderInitializationException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /*** * This exception is thrown when the reader for Parquet FileSource could not be initialized. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/PathParserNotProvidedException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/PathParserNotProvidedException.java similarity index 77% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/PathParserNotProvidedException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/PathParserNotProvidedException.java index 307d64980..1d4a02609 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/PathParserNotProvidedException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/PathParserNotProvidedException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; public class PathParserNotProvidedException extends RuntimeException { public PathParserNotProvidedException(String message) { diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/RecordsNotConsumedException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/RecordsNotConsumedException.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/RecordsNotConsumedException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/RecordsNotConsumedException.java index 485d14874..9e9be1fc7 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/RecordsNotConsumedException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/RecordsNotConsumedException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if there is records that not consumed. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/ThorCommandFailedException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ThorCommandFailedException.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/ThorCommandFailedException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ThorCommandFailedException.java index 5e91d844c..68dac2eab 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/ThorCommandFailedException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/ThorCommandFailedException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if there is failure on Thor command. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/TransformClassNotDefinedException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/TransformClassNotDefinedException.java similarity index 89% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/TransformClassNotDefinedException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/TransformClassNotDefinedException.java index 05e44f065..8570374e7 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/TransformClassNotDefinedException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/TransformClassNotDefinedException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if Transformer class is not defined in transformer configuration. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/UDFFactoryClassNotDefinedException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/UDFFactoryClassNotDefinedException.java similarity index 89% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/UDFFactoryClassNotDefinedException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/exception/UDFFactoryClassNotDefinedException.java index 1e4a343f4..beee7d41f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/UDFFactoryClassNotDefinedException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/exception/UDFFactoryClassNotDefinedException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package com.gotocompany.dagger.core.exception; /** * The class Exception if Udf factory class is not defined in function factory configuration. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/ChronologyOrderedSplitAssignerAspects.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/ChronologyOrderedSplitAssignerAspects.java similarity index 79% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/ChronologyOrderedSplitAssignerAspects.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/ChronologyOrderedSplitAssignerAspects.java index 555abccdd..860b0f829 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/ChronologyOrderedSplitAssignerAspects.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/ChronologyOrderedSplitAssignerAspects.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.metrics.aspects; +package com.gotocompany.dagger.core.metrics.aspects; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; public enum ChronologyOrderedSplitAssignerAspects implements Aspects { TOTAL_SPLITS_DISCOVERED("total_splits_discovered", AspectType.Gauge), diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/ExternalSourceAspects.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/ExternalSourceAspects.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/ExternalSourceAspects.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/ExternalSourceAspects.java index 2019c589b..8a9016b82 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/ExternalSourceAspects.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/ExternalSourceAspects.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.metrics.aspects; +package com.gotocompany.dagger.core.metrics.aspects; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; /** * The enum External source aspects. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/LongbowReaderAspects.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/LongbowReaderAspects.java similarity index 86% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/LongbowReaderAspects.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/LongbowReaderAspects.java index db7d4c0d9..e515e2b8b 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/LongbowReaderAspects.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/LongbowReaderAspects.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.metrics.aspects; +package com.gotocompany.dagger.core.metrics.aspects; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; /** * The enum Longbow reader aspects. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/LongbowWriterAspects.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/LongbowWriterAspects.java similarity index 88% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/LongbowWriterAspects.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/LongbowWriterAspects.java index 90a99527f..ca7414a3c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/LongbowWriterAspects.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/LongbowWriterAspects.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.metrics.aspects; +package com.gotocompany.dagger.core.metrics.aspects; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; /** * The enum Longbow writer aspects. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/ParquetReaderAspects.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/ParquetReaderAspects.java similarity index 81% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/ParquetReaderAspects.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/ParquetReaderAspects.java index 9a755e843..d1b6827a1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/ParquetReaderAspects.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/ParquetReaderAspects.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.metrics.aspects; +package com.gotocompany.dagger.core.metrics.aspects; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; public enum ParquetReaderAspects implements Aspects { READER_CREATED("reader_created", AspectType.Counter), diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/TelemetryAspects.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/TelemetryAspects.java similarity index 74% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/TelemetryAspects.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/TelemetryAspects.java index 7972aad7e..e8daed1b7 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/aspects/TelemetryAspects.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/aspects/TelemetryAspects.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.metrics.aspects; +package com.gotocompany.dagger.core.metrics.aspects; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; /** * The enum Telemetry aspects. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/ErrorReporter.java similarity index 92% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/ErrorReporter.java index 5ce90a3f1..68ea0ba2b 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/ErrorReporter.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.reporters; +package com.gotocompany.dagger.core.metrics.reporters; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/ErrorReporterFactory.java similarity index 88% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/ErrorReporterFactory.java index 729a54e17..653b861bb 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/ErrorReporterFactory.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.metrics.reporters; +package com.gotocompany.dagger.core.metrics.reporters; import org.apache.flink.metrics.MetricGroup; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.utils.Constants; /** * The Factory class for Error reporter. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/ErrorStatsReporter.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/ErrorStatsReporter.java index eda80e319..1293b5509 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/ErrorStatsReporter.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.metrics.reporters; +package com.gotocompany.dagger.core.metrics.reporters; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.MetricGroup; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.utils.Constants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/NoOpErrorReporter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/NoOpErrorReporter.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/NoOpErrorReporter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/NoOpErrorReporter.java index 0bb15e38c..1d8633723 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/NoOpErrorReporter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/NoOpErrorReporter.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.reporters; +package com.gotocompany.dagger.core.metrics.reporters; /** * The No op error reporter. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerMetricsConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerMetricsConfig.java similarity index 92% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerMetricsConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerMetricsConfig.java index 15ea4ffed..e58bad50b 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerMetricsConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerMetricsConfig.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.metrics.reporters.statsd; +package com.gotocompany.dagger.core.metrics.reporters.statsd; -import io.odpf.depot.config.MetricsConfig; +import com.gotocompany.depot.config.MetricsConfig; import org.apache.flink.configuration.ConfigOption; import org.apache.flink.configuration.ConfigOptions; import org.apache.flink.configuration.Configuration; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerStatsDReporter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerStatsDReporter.java similarity index 69% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerStatsDReporter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerStatsDReporter.java index 733cacdf3..a05484a23 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerStatsDReporter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerStatsDReporter.java @@ -1,23 +1,23 @@ -package io.odpf.dagger.core.metrics.reporters.statsd; +package com.gotocompany.dagger.core.metrics.reporters.statsd; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.GlobalTags; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; -import io.odpf.depot.metrics.StatsDReporter; -import io.odpf.depot.metrics.StatsDReporterBuilder; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.GlobalTags; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; +import com.gotocompany.depot.metrics.StatsDReporter; +import com.gotocompany.depot.metrics.StatsDReporterBuilder; import org.apache.flink.configuration.Configuration; import java.io.IOException; import java.util.Arrays; -import static io.odpf.dagger.core.utils.Constants.FLINK_JOB_ID_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.FLINK_JOB_ID_KEY; +import static com.gotocompany.dagger.core.utils.Constants.FLINK_JOB_ID_DEFAULT; +import static com.gotocompany.dagger.core.utils.Constants.FLINK_JOB_ID_KEY; public class DaggerStatsDReporter implements SerializedStatsDReporterSupplier { private static StatsDReporter statsDReporter; private final Configuration flinkConfiguration; - private final io.odpf.dagger.common.configuration.Configuration daggerConfiguration; + private final com.gotocompany.dagger.common.configuration.Configuration daggerConfiguration; - private DaggerStatsDReporter(Configuration flinkConfiguration, io.odpf.dagger.common.configuration.Configuration daggerConfiguration) { + private DaggerStatsDReporter(Configuration flinkConfiguration, com.gotocompany.dagger.common.configuration.Configuration daggerConfiguration) { this.flinkConfiguration = flinkConfiguration; this.daggerConfiguration = daggerConfiguration; } @@ -52,7 +52,7 @@ protected static void close() throws IOException { } public static class Provider { - public static DaggerStatsDReporter provide(Configuration flinkConfiguration, io.odpf.dagger.common.configuration.Configuration daggerConfiguration) { + public static DaggerStatsDReporter provide(Configuration flinkConfiguration, com.gotocompany.dagger.common.configuration.Configuration daggerConfiguration) { return new DaggerStatsDReporter(flinkConfiguration, daggerConfiguration); } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/SerializedStatsDReporterSupplier.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/SerializedStatsDReporterSupplier.java similarity index 83% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/SerializedStatsDReporterSupplier.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/SerializedStatsDReporterSupplier.java index 7991bf293..df19811c4 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/SerializedStatsDReporterSupplier.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/SerializedStatsDReporterSupplier.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.metrics.reporters.statsd; +package com.gotocompany.dagger.core.metrics.reporters.statsd; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.depot.metrics.StatsDReporter; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/StatsDErrorReporter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/StatsDErrorReporter.java similarity index 76% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/StatsDErrorReporter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/StatsDErrorReporter.java index 53856695d..8b6cb67cd 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/StatsDErrorReporter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/StatsDErrorReporter.java @@ -1,15 +1,15 @@ -package io.odpf.dagger.core.metrics.reporters.statsd; +package com.gotocompany.dagger.core.metrics.reporters.statsd; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; +import com.gotocompany.depot.metrics.StatsDReporter; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.MetricGroup; import java.io.Serializable; -import static io.odpf.dagger.core.utils.Constants.FATAL_EXCEPTION_METRIC_GROUP_KEY; -import static io.odpf.dagger.core.utils.Constants.NONFATAL_EXCEPTION_METRIC_GROUP_KEY; +import static com.gotocompany.dagger.core.utils.Constants.FATAL_EXCEPTION_METRIC_GROUP_KEY; +import static com.gotocompany.dagger.core.utils.Constants.NONFATAL_EXCEPTION_METRIC_GROUP_KEY; public class StatsDErrorReporter implements ErrorReporter, Serializable { private static final String FATAL_EXCEPTION_TAG_KEY = "fatal_exception_type"; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManager.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManager.java similarity index 73% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManager.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManager.java index 1110beeae..2cfa8ad1b 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManager.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManager.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.manager; +package com.gotocompany.dagger.core.metrics.reporters.statsd.manager; -import io.odpf.dagger.common.metrics.aspects.Aspects; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.measurement.Counter; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.core.metrics.reporters.statsd.measurement.Counter; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; +import com.gotocompany.depot.metrics.StatsDReporter; import java.util.ArrayList; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManager.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManager.java similarity index 65% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManager.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManager.java index 10c7c6771..357b4f2a8 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManager.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManager.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.manager; +package com.gotocompany.dagger.core.metrics.reporters.statsd.manager; -import io.odpf.dagger.common.metrics.aspects.Aspects; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.measurement.Gauge; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.measurement.Gauge; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; +import com.gotocompany.depot.metrics.StatsDReporter; import java.util.ArrayList; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManager.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManager.java similarity index 65% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManager.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManager.java index 93b8b5065..1c07dceae 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManager.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManager.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.manager; +package com.gotocompany.dagger.core.metrics.reporters.statsd.manager; -import io.odpf.dagger.common.metrics.aspects.Aspects; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.measurement.Histogram; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.measurement.Histogram; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; +import com.gotocompany.depot.metrics.StatsDReporter; import java.util.ArrayList; diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/MeasurementManager.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/MeasurementManager.java new file mode 100644 index 000000000..609261927 --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/MeasurementManager.java @@ -0,0 +1,9 @@ +package com.gotocompany.dagger.core.metrics.reporters.statsd.manager; + +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; + +import java.io.Serializable; + +public interface MeasurementManager extends Serializable { + void register(StatsDTag[] tags); +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/measurement/Counter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/measurement/Counter.java similarity index 64% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/measurement/Counter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/measurement/Counter.java index 0b0668aac..f85695be1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/measurement/Counter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/measurement/Counter.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.measurement; +package com.gotocompany.dagger.core.metrics.reporters.statsd.measurement; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; import java.io.Serializable; diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/measurement/Gauge.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/measurement/Gauge.java new file mode 100644 index 000000000..b9078d20b --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/measurement/Gauge.java @@ -0,0 +1,9 @@ +package com.gotocompany.dagger.core.metrics.reporters.statsd.measurement; + +import com.gotocompany.dagger.common.metrics.aspects.Aspects; + +import java.io.Serializable; + +public interface Gauge extends Serializable { + void markValue(Aspects aspect, int gaugeValue); +} diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/measurement/Histogram.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/measurement/Histogram.java new file mode 100644 index 000000000..d4d3d101a --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/measurement/Histogram.java @@ -0,0 +1,9 @@ +package com.gotocompany.dagger.core.metrics.reporters.statsd.measurement; + +import com.gotocompany.dagger.common.metrics.aspects.Aspects; + +import java.io.Serializable; + +public interface Histogram extends Serializable { + void recordValue(Aspects aspect, long value); +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/ComponentTags.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/ComponentTags.java similarity index 91% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/ComponentTags.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/ComponentTags.java index b73d64c1b..7c47a42e5 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/ComponentTags.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/ComponentTags.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.tags; +package com.gotocompany.dagger.core.metrics.reporters.statsd.tags; public class ComponentTags { private static StatsDTag[] parquetReaderTags; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/GlobalTags.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/GlobalTags.java similarity index 54% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/GlobalTags.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/GlobalTags.java index 2b3d9b143..40b93f210 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/GlobalTags.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/GlobalTags.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.tags; +package com.gotocompany.dagger.core.metrics.reporters.statsd.tags; public class GlobalTags { public static final String JOB_ID = "job_id"; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/StatsDTag.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/StatsDTag.java similarity index 91% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/StatsDTag.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/StatsDTag.java index d1c40b22e..b0394a353 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/StatsDTag.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/StatsDTag.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.tags; +package com.gotocompany.dagger.core.metrics.reporters.statsd.tags; import org.apache.flink.util.Preconditions; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/telemetry/TelemetryPublisher.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/telemetry/TelemetryPublisher.java similarity index 95% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/telemetry/TelemetryPublisher.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/telemetry/TelemetryPublisher.java index 039ce438f..512902f4e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/telemetry/TelemetryPublisher.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/telemetry/TelemetryPublisher.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.telemetry; +package com.gotocompany.dagger.core.metrics.telemetry; import java.util.ArrayList; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/telemetry/TelemetrySubscriber.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/telemetry/TelemetrySubscriber.java similarity index 80% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/telemetry/TelemetrySubscriber.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/telemetry/TelemetrySubscriber.java index f4a1159ff..141289e5c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/telemetry/TelemetrySubscriber.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/telemetry/TelemetrySubscriber.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.telemetry; +package com.gotocompany.dagger.core.metrics.telemetry; /** * The interface Telemetry subscriber. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/telemetry/TelemetryTypes.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/telemetry/TelemetryTypes.java similarity index 92% rename from dagger-core/src/main/java/io/odpf/dagger/core/metrics/telemetry/TelemetryTypes.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/telemetry/TelemetryTypes.java index d7ccc45e4..6e68c7394 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/telemetry/TelemetryTypes.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/metrics/telemetry/TelemetryTypes.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.telemetry; +package com.gotocompany.dagger.core.metrics.telemetry; /** * The enum Telemetry types. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/ColumnNameManager.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/ColumnNameManager.java similarity index 95% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/ColumnNameManager.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/ColumnNameManager.java index 7abe66d3f..d313a6389 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/ColumnNameManager.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/ColumnNameManager.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; import java.io.Serializable; import java.util.Arrays; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/ParentPostProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/ParentPostProcessor.java similarity index 81% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/ParentPostProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/ParentPostProcessor.java index 0373155e2..3b834063a 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/ParentPostProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/ParentPostProcessor.java @@ -1,22 +1,22 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.core.processors.common.FetchOutputDecorator; +import com.gotocompany.dagger.core.processors.common.InitializationDecorator; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.types.PostProcessor; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; -import io.odpf.dagger.core.processors.common.FetchOutputDecorator; -import io.odpf.dagger.core.processors.common.InitializationDecorator; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.external.ExternalPostProcessor; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.internal.InternalPostProcessor; -import io.odpf.dagger.core.processors.transformers.TransformProcessor; -import io.odpf.dagger.core.processors.types.PostProcessor; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; +import com.gotocompany.dagger.core.processors.external.ExternalPostProcessor; +import com.gotocompany.dagger.core.processors.internal.InternalPostProcessor; +import com.gotocompany.dagger.core.processors.transformers.TransformProcessor; +import com.gotocompany.dagger.core.utils.Constants; import java.util.ArrayList; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/PostProcessorConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PostProcessorConfig.java similarity index 92% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/PostProcessorConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PostProcessorConfig.java index 2e6de8414..c1328b35f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/PostProcessorConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PostProcessorConfig.java @@ -1,15 +1,15 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; -import io.odpf.dagger.core.processors.external.ExternalSourceConfig; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.transformers.TransformConfig; +import com.gotocompany.dagger.core.processors.external.ExternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.transformers.TransformConfig; import com.google.common.reflect.TypeToken; import com.google.gson.FieldNamingPolicy; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; import com.jayway.jsonpath.InvalidJsonException; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; import java.io.Serializable; import java.lang.reflect.Type; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/PostProcessorFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PostProcessorFactory.java similarity index 76% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/PostProcessorFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PostProcessorFactory.java index b7480b282..150189fc5 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/PostProcessorFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PostProcessorFactory.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.DaggerContext; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.processors.longbow.LongbowFactory; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.telemetry.TelemetryProcessor; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.PostProcessor; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.processors.longbow.LongbowFactory; +import com.gotocompany.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.telemetry.TelemetryProcessor; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.utils.Constants; import java.util.ArrayList; import java.util.Arrays; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/PreProcessorConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PreProcessorConfig.java similarity index 86% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/PreProcessorConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PreProcessorConfig.java index 87e7f8b25..da0e55c62 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/PreProcessorConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PreProcessorConfig.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; -import io.odpf.dagger.core.processors.transformers.TableTransformConfig; +import com.gotocompany.dagger.core.processors.transformers.TableTransformConfig; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/PreProcessorFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PreProcessorFactory.java similarity index 72% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/PreProcessorFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PreProcessorFactory.java index ed411f1c1..9b6c20cc2 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/PreProcessorFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PreProcessorFactory.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; -import io.odpf.dagger.common.core.DaggerContext; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.Preprocessor; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.core.processors.types.Preprocessor; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; import java.util.Collections; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/PreProcessorOrchestrator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PreProcessorOrchestrator.java similarity index 84% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/PreProcessorOrchestrator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PreProcessorOrchestrator.java index 4adb63f56..56b12b49f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/PreProcessorOrchestrator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/PreProcessorOrchestrator.java @@ -1,19 +1,19 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; import com.google.gson.FieldNamingPolicy; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; +import com.gotocompany.dagger.core.processors.common.ValidRecordsDecorator; import com.jayway.jsonpath.InvalidJsonException; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.DaggerContext; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.metrics.telemetry.TelemetryTypes; -import io.odpf.dagger.core.processors.common.ValidRecordsDecorator; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.transformers.TransformProcessor; -import io.odpf.dagger.core.processors.types.Preprocessor; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryTypes; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.processors.transformers.TransformProcessor; +import com.gotocompany.dagger.core.processors.types.Preprocessor; +import com.gotocompany.dagger.core.utils.Constants; import java.util.ArrayList; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/DescriptorManager.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/DescriptorManager.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/common/DescriptorManager.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/DescriptorManager.java index 20b0ca2e6..3325bdc07 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/DescriptorManager.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/DescriptorManager.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.stencil.client.StencilClient; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.stencil.client.StencilClient; import com.google.protobuf.Descriptors; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/EndpointHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/EndpointHandler.java similarity index 89% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/common/EndpointHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/EndpointHandler.java index 6e818467e..68b308576 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/EndpointHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/EndpointHandler.java @@ -1,17 +1,17 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.utils.Constants.ExternalPostProcessorVariableType; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.utils.Constants.ExternalPostProcessorVariableType; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; import com.google.protobuf.Descriptors; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/FetchOutputDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/FetchOutputDecorator.java similarity index 91% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/common/FetchOutputDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/FetchOutputDecorator.java index e0b7abbd9..281df539f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/FetchOutputDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/FetchOutputDecorator.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; -import io.odpf.dagger.core.processors.types.MapDecorator; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.core.processors.types.MapDecorator; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.java.typeutils.RowTypeInfo; @@ -14,7 +14,7 @@ import java.time.LocalDateTime; import java.util.Arrays; -import static io.odpf.dagger.common.core.Constants.ROWTIME; +import static com.gotocompany.dagger.common.core.Constants.ROWTIME; /** * The Fetch output decorator. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/InitializationDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/InitializationDecorator.java similarity index 79% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/common/InitializationDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/InitializationDecorator.java index 88ff66670..173dcf46c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/InitializationDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/InitializationDecorator.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.types.MapDecorator; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.types.MapDecorator; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/OutputMapping.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/OutputMapping.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/common/OutputMapping.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/OutputMapping.java index d872b784d..dd6a9d3aa 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/OutputMapping.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/OutputMapping.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.dagger.core.processors.types.Validator; +import com.gotocompany.dagger.core.processors.types.Validator; import java.io.Serializable; import java.util.HashMap; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/PostResponseTelemetry.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/PostResponseTelemetry.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/common/PostResponseTelemetry.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/PostResponseTelemetry.java index e8e479327..3fe04b010 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/PostResponseTelemetry.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/PostResponseTelemetry.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; import java.time.Instant; -import static io.odpf.dagger.core.utils.Constants.*; +import static com.gotocompany.dagger.core.utils.Constants.*; import static java.time.Duration.between; import static org.apache.http.HttpStatus.SC_NOT_FOUND; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/RowManager.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/RowManager.java similarity index 95% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/common/RowManager.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/RowManager.java index 29cf19d71..3a164b018 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/RowManager.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/RowManager.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.dagger.core.exception.InputOutputMappingException; +import com.gotocompany.dagger.core.exception.InputOutputMappingException; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/SchemaConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/SchemaConfig.java similarity index 83% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/common/SchemaConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/SchemaConfig.java index 8b07be7b8..eac3558de 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/SchemaConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/SchemaConfig.java @@ -1,17 +1,17 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; import com.google.gson.Gson; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.processors.ColumnNameManager; import java.io.Serializable; import java.util.ArrayList; import java.util.Map; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_PROTO_CLASS; -import static io.odpf.dagger.core.utils.Constants.SINK_KAFKA_PROTO_MESSAGE_KEY; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_PROTO_CLASS; +import static com.gotocompany.dagger.core.utils.Constants.SINK_KAFKA_PROTO_MESSAGE_KEY; /** * The Schema config. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/ValidRecordsDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/ValidRecordsDecorator.java similarity index 81% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/common/ValidRecordsDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/ValidRecordsDecorator.java index c4d4a2385..7a86fbeab 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/ValidRecordsDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/common/ValidRecordsDecorator.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; import org.apache.flink.api.common.functions.RichFilterFunction; import org.apache.flink.types.Row; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.Constants; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; -import io.odpf.dagger.core.processors.types.FilterDecorator; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.Constants; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporterFactory; +import com.gotocompany.dagger.core.processors.types.FilterDecorator; import java.util.Arrays; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/AsyncConnector.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/AsyncConnector.java similarity index 89% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/AsyncConnector.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/AsyncConnector.java index 50144d98d..bbead41d8 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/AsyncConnector.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/AsyncConnector.java @@ -1,24 +1,24 @@ -package io.odpf.dagger.core.processors.external; - -import io.odpf.dagger.core.processors.common.SchemaConfig; +package com.gotocompany.dagger.core.processors.external; + +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporterFactory; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryPublisher; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryTypes; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.DescriptorManager; +import com.gotocompany.dagger.core.processors.common.EndpointHandler; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.types.SourceConfig; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.streaming.api.functions.async.RichAsyncFunction; import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; -import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; -import io.odpf.dagger.core.metrics.telemetry.TelemetryTypes; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.DescriptorManager; -import io.odpf.dagger.core.processors.common.EndpointHandler; -import io.odpf.dagger.core.processors.types.SourceConfig; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; import org.apache.commons.lang3.StringUtils; import java.util.ArrayList; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/ExternalMetricConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/ExternalMetricConfig.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/ExternalMetricConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/ExternalMetricConfig.java index 44b8ae3c1..99183e998 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/ExternalMetricConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/ExternalMetricConfig.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.external; +package com.gotocompany.dagger.core.processors.external; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; +import com.gotocompany.dagger.core.utils.Constants; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/ExternalPostProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/ExternalPostProcessor.java similarity index 80% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/ExternalPostProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/ExternalPostProcessor.java index bde862aef..fd91e74f1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/ExternalPostProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/ExternalPostProcessor.java @@ -1,20 +1,20 @@ -package io.odpf.dagger.core.processors.external; - -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.PostProcessorConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.types.PostProcessor; -import io.odpf.dagger.core.processors.types.SourceConfig; -import io.odpf.dagger.core.processors.types.StreamDecorator; -import io.odpf.dagger.core.processors.types.Validator; -import io.odpf.dagger.core.processors.external.es.EsSourceConfig; -import io.odpf.dagger.core.processors.external.es.EsStreamDecorator; -import io.odpf.dagger.core.processors.external.grpc.GrpcSourceConfig; -import io.odpf.dagger.core.processors.external.grpc.GrpcStreamDecorator; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; -import io.odpf.dagger.core.processors.external.http.HttpStreamDecorator; -import io.odpf.dagger.core.processors.external.pg.PgSourceConfig; -import io.odpf.dagger.core.processors.external.pg.PgStreamDecorator; +package com.gotocompany.dagger.core.processors.external; + +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.PostProcessorConfig; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.external.es.EsSourceConfig; +import com.gotocompany.dagger.core.processors.external.es.EsStreamDecorator; +import com.gotocompany.dagger.core.processors.external.grpc.GrpcSourceConfig; +import com.gotocompany.dagger.core.processors.external.grpc.GrpcStreamDecorator; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.processors.external.http.HttpStreamDecorator; +import com.gotocompany.dagger.core.processors.external.pg.PgSourceConfig; +import com.gotocompany.dagger.core.processors.external.pg.PgStreamDecorator; +import com.gotocompany.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.core.processors.types.SourceConfig; +import com.gotocompany.dagger.core.processors.types.StreamDecorator; +import com.gotocompany.dagger.core.processors.types.Validator; import org.apache.commons.lang3.StringUtils; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/ExternalSourceConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/ExternalSourceConfig.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/ExternalSourceConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/ExternalSourceConfig.java index 13670629a..0c6e372b7 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/ExternalSourceConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/ExternalSourceConfig.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.processors.external; +package com.gotocompany.dagger.core.processors.external; -import io.odpf.dagger.core.processors.types.SourceConfig; -import io.odpf.dagger.core.processors.external.es.EsSourceConfig; -import io.odpf.dagger.core.processors.external.grpc.GrpcSourceConfig; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; -import io.odpf.dagger.core.processors.external.pg.PgSourceConfig; +import com.gotocompany.dagger.core.processors.external.es.EsSourceConfig; +import com.gotocompany.dagger.core.processors.external.grpc.GrpcSourceConfig; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.processors.external.pg.PgSourceConfig; +import com.gotocompany.dagger.core.processors.types.SourceConfig; import java.util.ArrayList; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsAsyncConnector.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsAsyncConnector.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsAsyncConnector.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsAsyncConnector.java index 97c3a3c3f..fe65e0eed 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsAsyncConnector.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsAsyncConnector.java @@ -1,16 +1,15 @@ -package io.odpf.dagger.core.processors.external.es; +package com.gotocompany.dagger.core.processors.external.es; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.external.AsyncConnector; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; +import com.gotocompany.dagger.core.processors.external.AsyncConnector; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; -import io.odpf.dagger.core.utils.Constants; -import io.odpf.dagger.core.utils.Constants.ExternalPostProcessorVariableType; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; @@ -79,7 +78,7 @@ protected void createClient() { protected void process(Row input, ResultFuture resultFuture) { RowManager rowManager = new RowManager(input); Object[] endpointVariablesValues = getEndpointHandler() - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.ENDPOINT_VARIABLE, esSourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.ENDPOINT_VARIABLE, esSourceConfig.getVariables(), resultFuture); if (getEndpointHandler().isQueryInvalid(resultFuture, rowManager, esSourceConfig.getVariables(), endpointVariablesValues)) { return; } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsResponseHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsResponseHandler.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsResponseHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsResponseHandler.java index 09c0e613a..8b87d66fe 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsResponseHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsResponseHandler.java @@ -1,5 +1,11 @@ -package io.odpf.dagger.core.processors.external.es; +package com.gotocompany.dagger.core.processors.external.es; +import com.gotocompany.dagger.core.exception.HttpFailureException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; @@ -7,15 +13,9 @@ import com.google.protobuf.Descriptors.Descriptor; import com.jayway.jsonpath.JsonPath; import com.jayway.jsonpath.PathNotFoundException; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.exception.HttpFailureException; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; import org.apache.http.ParseException; import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Response; @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; -import static io.odpf.dagger.common.serde.typehandler.RowFactory.createRow; +import static com.gotocompany.dagger.common.serde.typehandler.RowFactory.createRow; import static java.util.Collections.singleton; import static org.apache.http.HttpStatus.SC_OK; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsSourceConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsSourceConfig.java similarity index 97% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsSourceConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsSourceConfig.java index c23648acc..13d2fa2cf 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsSourceConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsSourceConfig.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.es; +package com.gotocompany.dagger.core.processors.external.es; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.types.SourceConfig; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.types.SourceConfig; import com.google.gson.annotations.SerializedName; import org.apache.commons.lang3.StringUtils; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsSourceConfigBuilder.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsSourceConfigBuilder.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsSourceConfigBuilder.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsSourceConfigBuilder.java index a7e214ce0..6f6f59dfd 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsSourceConfigBuilder.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsSourceConfigBuilder.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.external.es; +package com.gotocompany.dagger.core.processors.external.es; -import io.odpf.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.OutputMapping; import java.util.Map; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsStreamDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsStreamDecorator.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsStreamDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsStreamDecorator.java index 32fb01a22..179f1480f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsStreamDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/es/EsStreamDecorator.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.external.es; +package com.gotocompany.dagger.core.processors.external.es; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.types.StreamDecorator; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.types.StreamDecorator; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.apache.flink.streaming.api.datastream.AsyncDataStream; import org.apache.flink.streaming.api.datastream.DataStream; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcAsyncConnector.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcAsyncConnector.java similarity index 81% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcAsyncConnector.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcAsyncConnector.java index db28e5b23..b200c30f6 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcAsyncConnector.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcAsyncConnector.java @@ -1,26 +1,25 @@ -package io.odpf.dagger.core.processors.external.grpc; - -import io.odpf.dagger.core.processors.common.DescriptorManager; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.external.AsyncConnector; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.external.grpc.client.GrpcRequestHandler; +package com.gotocompany.dagger.core.processors.external.grpc; + +import com.gotocompany.dagger.core.exception.ChannelNotAvailableException; +import com.gotocompany.dagger.core.exception.InvalidGrpcBodyException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.common.DescriptorManager; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.external.grpc.client.GrpcClient; +import com.gotocompany.dagger.core.processors.external.grpc.client.GrpcRequestHandler; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.external.AsyncConnector; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; -import io.odpf.dagger.core.exception.ChannelNotAvailableException; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.core.exception.InvalidGrpcBodyException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.external.grpc.client.GrpcClient; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.core.utils.Constants; -import io.odpf.dagger.core.utils.Constants.ExternalPostProcessorVariableType; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -91,7 +90,7 @@ protected void process(Row input, ResultFuture resultFuture) throws Excepti RowManager rowManager = new RowManager(input); Object[] requestVariablesValues = getEndpointHandler() - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, grpcSourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, grpcSourceConfig.getVariables(), resultFuture); if (getEndpointHandler().isQueryInvalid(resultFuture, rowManager, grpcSourceConfig.getVariables(), requestVariablesValues)) { return; } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcResponseHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcResponseHandler.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcResponseHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcResponseHandler.java index 12441ecb1..d88b39ca1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcResponseHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcResponseHandler.java @@ -1,25 +1,26 @@ -package io.odpf.dagger.core.processors.external.grpc; - -import io.odpf.dagger.core.exception.GrpcFailureException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; +package com.gotocompany.dagger.core.processors.external.grpc; + +import com.gotocompany.dagger.core.exception.GrpcFailureException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; +import com.gotocompany.dagger.core.utils.DescriptorsUtil; import com.jayway.jsonpath.JsonPath; import com.jayway.jsonpath.PathNotFoundException; import io.grpc.stub.StreamObserver; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -111,10 +112,14 @@ private void setField(String key, Object value, int fieldIndex) { } private void setFieldUsingType(String key, Object value, int fieldIndex) { - Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName(key); - if (fieldDescriptor == null) { - IllegalArgumentException illegalArgumentException = new IllegalArgumentException("Field Descriptor not found for field: " + key); - reportAndThrowError(illegalArgumentException); + Descriptors.FieldDescriptor fieldDescriptor = null; + try { + fieldDescriptor = DescriptorsUtil.getFieldDescriptor(descriptor, key); + if (fieldDescriptor == null) { + throw new IllegalArgumentException("Field Descriptor not found for field: " + key); + } + } catch (RuntimeException exception) { + reportAndThrowError(exception); } TypeHandler typeHandler = TypeHandlerFactory.getTypeHandler(fieldDescriptor); rowManager.setInOutput(fieldIndex, typeHandler.transformFromPostProcessor(value)); diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcSourceConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcSourceConfig.java similarity index 72% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcSourceConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcSourceConfig.java index 1ca92709f..2a88745ae 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcSourceConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcSourceConfig.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.grpc; +package com.gotocompany.dagger.core.processors.external.grpc; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.types.SourceConfig; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.types.SourceConfig; import com.google.gson.annotations.SerializedName; import org.apache.commons.lang3.StringUtils; @@ -24,6 +24,8 @@ public class GrpcSourceConfig implements Serializable, SourceConfig { private String grpcMethodUrl; private String requestPattern; private String requestVariables; + private String grpcArgKeepaliveTimeMs; + private String grpcArgKeepaliveTimeoutMs; private String streamTimeout; private String connectTimeout; private boolean failOnErrors; @@ -63,30 +65,34 @@ public GrpcSourceConfig(String endpoint, int servicePort, String grpcRequestProt /** * Instantiates a new Grpc source config with specified grpc stencil url. * - * @param endpoint the endpoint - * @param servicePort the service port - * @param grpcRequestProtoSchema the grpc request proto schema - * @param grpcResponseProtoSchema the grpc response proto schema - * @param grpcMethodUrl the grpc method url - * @param requestPattern the request pattern - * @param requestVariables the request variables - * @param streamTimeout the stream timeout - * @param connectTimeout the connect timeout - * @param failOnErrors the fail on errors - * @param grpcStencilUrl the grpc stencil url - * @param type the type - * @param retainResponseType the retain response type - * @param headers the headers - * @param outputMapping the output mapping - * @param metricId the metric id - * @param capacity the capacity + * @param endpoint the endpoint + * @param servicePort the service port + * @param grpcRequestProtoSchema the grpc request proto schema + * @param grpcResponseProtoSchema the grpc response proto schema + * @param grpcMethodUrl the grpc method url + * @param requestPattern the request pattern + * @param grpcArgKeepaliveTimeMs the grpc Keepalive Time ms + * @param grpcArgKeepaliveTimeoutMs the grpc Keepalive Timeout ms + * @param requestVariables the request variables + * @param streamTimeout the stream timeout + * @param connectTimeout the connect timeout + * @param failOnErrors the fail on errors + * @param grpcStencilUrl the grpc stencil url + * @param type the type + * @param retainResponseType the retain response type + * @param headers the headers + * @param outputMapping the output mapping + * @param metricId the metric id + * @param capacity the capacity */ - public GrpcSourceConfig(String endpoint, int servicePort, String grpcRequestProtoSchema, String grpcResponseProtoSchema, String grpcMethodUrl, String requestPattern, String requestVariables, String streamTimeout, String connectTimeout, boolean failOnErrors, String grpcStencilUrl, String type, boolean retainResponseType, Map headers, Map outputMapping, String metricId, int capacity) { + public GrpcSourceConfig(String endpoint, int servicePort, String grpcRequestProtoSchema, String grpcResponseProtoSchema, String grpcMethodUrl, String grpcArgKeepaliveTimeMs, String grpcArgKeepaliveTimeoutMs, String requestPattern, String requestVariables, String streamTimeout, String connectTimeout, boolean failOnErrors, String grpcStencilUrl, String type, boolean retainResponseType, Map headers, Map outputMapping, String metricId, int capacity) { this.endpoint = endpoint; this.servicePort = servicePort; this.grpcRequestProtoSchema = grpcRequestProtoSchema; this.grpcResponseProtoSchema = grpcResponseProtoSchema; this.grpcMethodUrl = grpcMethodUrl; + this.grpcArgKeepaliveTimeMs = grpcArgKeepaliveTimeMs; + this.grpcArgKeepaliveTimeoutMs = grpcArgKeepaliveTimeoutMs; this.requestPattern = requestPattern; this.requestVariables = requestVariables; this.streamTimeout = streamTimeout; @@ -209,6 +215,42 @@ public String getGrpcMethodUrl() { return grpcMethodUrl; } + /** + * Gets grpc arg keepalive time ms. + * + * @return grpc arg keepalive time ms + */ + public String getGrpcArgKeepaliveTimeMs() { + return grpcArgKeepaliveTimeMs; + } + + /** + * Gets grpc arg keepalive timeout ms. + * + * @return grpc arg keepalive timeout ms + */ + public String getGrpcArgKeepaliveTimeoutMs() { + return grpcArgKeepaliveTimeoutMs; + } + + /** + * Sets grpc arg keepalive time ms. + * + * @param grpcArgKeepaliveTimeMs the grpc arg keepalive time ms + */ + public void setGrpcArgKeepaliveTimeMs(String grpcArgKeepaliveTimeMs) { + this.grpcArgKeepaliveTimeMs = grpcArgKeepaliveTimeMs; + } + + /** + * Sets grpc arg keepalive timeout ms. + * + * @param grpcArgKeepaliveTimeoutMs the grpc arg keepalive timeout ms + */ + public void setGrpcArgKeepaliveTimeoutMs(String grpcArgKeepaliveTimeoutMs) { + this.grpcArgKeepaliveTimeoutMs = grpcArgKeepaliveTimeoutMs; + } + /** * Gets service port. * diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcSourceConfigBuilder.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcSourceConfigBuilder.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcSourceConfigBuilder.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcSourceConfigBuilder.java index 8e3507f70..da9393926 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcSourceConfigBuilder.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcSourceConfigBuilder.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.external.grpc; +package com.gotocompany.dagger.core.processors.external.grpc; -import io.odpf.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.OutputMapping; import java.util.Map; @@ -10,6 +10,8 @@ public class GrpcSourceConfigBuilder { private String grpcRequestProtoSchema; private String grpcResponseProtoSchema; private String grpcMethodUrl; + private String grpcArgKeepaliveTimeMs; + private String grpcArgKeepaliveTimeoutMs; private String requestPattern; private String requestVariables; private Map outputMapping; @@ -108,8 +110,18 @@ public GrpcSourceConfigBuilder setCapacity(int capacity) { return this; } + public GrpcSourceConfigBuilder setGrpcArgKeepaliveTimeMs(String grpcArgKeepaliveTimeMs) { + this.grpcArgKeepaliveTimeMs = grpcArgKeepaliveTimeMs; + return this; + } + + public GrpcSourceConfigBuilder setGrpcArgKeepaliveTimeoutMs(String grpcArgKeepaliveTimeoutMs) { + this.grpcArgKeepaliveTimeoutMs = grpcArgKeepaliveTimeoutMs; + return this; + } + public GrpcSourceConfig createGrpcSourceConfig() { - return new GrpcSourceConfig(endpoint, servicePort, grpcRequestProtoSchema, grpcResponseProtoSchema, grpcMethodUrl, requestPattern, requestVariables, + return new GrpcSourceConfig(endpoint, servicePort, grpcRequestProtoSchema, grpcResponseProtoSchema, grpcMethodUrl, grpcArgKeepaliveTimeMs, grpcArgKeepaliveTimeoutMs, requestPattern, requestVariables, streamTimeout, connectTimeout, failOnErrors, grpcStencilUrl, type, retainResponseType, headers, outputMapping, metricId, capacity); } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcStreamDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcStreamDecorator.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcStreamDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcStreamDecorator.java index 39fd9dde1..4fd636c34 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcStreamDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcStreamDecorator.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.external.grpc; +package com.gotocompany.dagger.core.processors.external.grpc; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.types.StreamDecorator; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.types.StreamDecorator; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.apache.flink.streaming.api.datastream.AsyncDataStream; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/client/DynamicMessageMarshaller.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/client/DynamicMessageMarshaller.java similarity index 94% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/client/DynamicMessageMarshaller.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/client/DynamicMessageMarshaller.java index 26a409b4c..1147cf0c9 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/client/DynamicMessageMarshaller.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/client/DynamicMessageMarshaller.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.external.grpc.client; +package com.gotocompany.dagger.core.processors.external.grpc.client; import java.io.IOException; import java.io.InputStream; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcClient.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcClient.java similarity index 60% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcClient.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcClient.java index ada0a424d..8b6b70764 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcClient.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcClient.java @@ -1,22 +1,24 @@ -package io.odpf.dagger.core.processors.external.grpc.client; +package com.gotocompany.dagger.core.processors.external.grpc.client; -import io.odpf.dagger.core.exception.ChannelNotAvailableException; -import io.odpf.dagger.core.processors.external.grpc.GrpcSourceConfig; +import com.gotocompany.dagger.core.exception.ChannelNotAvailableException; +import com.gotocompany.dagger.core.processors.external.grpc.GrpcSourceConfig; import com.google.protobuf.Descriptors.Descriptor; import com.google.protobuf.DynamicMessage; -import io.grpc.Channel; -import io.grpc.ManagedChannelBuilder; -import io.grpc.ClientInterceptors; -import io.grpc.Metadata; import io.grpc.CallOptions; import io.grpc.ClientCall; +import io.grpc.Channel; import io.grpc.MethodDescriptor; +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.grpc.Metadata; import io.grpc.stub.ClientCalls; import io.grpc.stub.MetadataUtils; import io.grpc.stub.StreamObserver; +import org.apache.commons.lang3.StringUtils; import java.util.Map; +import java.util.concurrent.TimeUnit; /** * The Grpc client. @@ -24,7 +26,11 @@ public class GrpcClient { private final GrpcSourceConfig grpcConfig; - private Channel decoratedChannel; + private ManagedChannel decoratedChannel; + + private final long defaultKeepAliveTimeout = 20000L; + + private final long defaultKeepAliveInterval = Long.MAX_VALUE; /** * Instantiates a new Grpc client. @@ -39,19 +45,27 @@ public GrpcClient(GrpcSourceConfig grpcConfig) { * Add channel. */ public void addChannel() { - Channel channel = ManagedChannelBuilder.forAddress(grpcConfig.getEndpoint(), grpcConfig.getServicePort()).usePlaintext().build(); + ManagedChannelBuilder channelBuilder = ManagedChannelBuilder.forAddress(grpcConfig.getEndpoint(), grpcConfig.getServicePort()).usePlaintext(); + channelBuilder = decorateManagedChannelBuilder(channelBuilder); + decoratedChannel = channelBuilder.build(); + } + + protected ManagedChannelBuilder decorateManagedChannelBuilder(ManagedChannelBuilder channelBuilder) { - Metadata metadata = new Metadata(); + long keepAliveInterval = StringUtils.isNotEmpty(grpcConfig.getGrpcArgKeepaliveTimeMs()) ? Long.parseLong(grpcConfig.getGrpcArgKeepaliveTimeMs()) : defaultKeepAliveInterval; + long keepAliveTimeout = StringUtils.isNotEmpty(grpcConfig.getGrpcArgKeepaliveTimeoutMs()) ? Long.parseLong(grpcConfig.getGrpcArgKeepaliveTimeoutMs()) : defaultKeepAliveTimeout; + + channelBuilder = channelBuilder.keepAliveTime(keepAliveInterval, TimeUnit.MILLISECONDS).keepAliveTimeout(keepAliveTimeout, TimeUnit.MILLISECONDS); if (grpcConfig.getHeaders() != null && !grpcConfig.getHeaders().isEmpty()) { + Metadata metadata = new Metadata(); for (Map.Entry header : grpcConfig.getHeaders().entrySet()) { metadata.put(Metadata.Key.of(header.getKey(), Metadata.ASCII_STRING_MARSHALLER), header.getValue()); } + channelBuilder.intercept(MetadataUtils.newAttachHeadersInterceptor(metadata)); } - decoratedChannel = ClientInterceptors.intercept(channel, - MetadataUtils.newAttachHeadersInterceptor(metadata)); - + return channelBuilder; } /** @@ -89,6 +103,9 @@ private ClientCall createCall(CallOptions callOp * Close channel. */ public void close() { + if (decoratedChannel != null && !decoratedChannel.isShutdown()) { + decoratedChannel.shutdown(); + } this.decoratedChannel = null; } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcRequestHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcRequestHandler.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcRequestHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcRequestHandler.java index 69f4a5d37..1e09e82ee 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcRequestHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcRequestHandler.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.external.grpc.client; +package com.gotocompany.dagger.core.processors.external.grpc.client; -import io.odpf.dagger.core.exception.InvalidGrpcBodyException; -import io.odpf.dagger.core.processors.common.DescriptorManager; -import io.odpf.dagger.core.processors.external.grpc.GrpcSourceConfig; +import com.gotocompany.dagger.core.exception.InvalidGrpcBodyException; +import com.gotocompany.dagger.core.processors.common.DescriptorManager; +import com.gotocompany.dagger.core.processors.external.grpc.GrpcSourceConfig; import com.google.protobuf.DynamicMessage; import com.google.protobuf.util.JsonFormat; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpAsyncConnector.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpAsyncConnector.java similarity index 60% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpAsyncConnector.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpAsyncConnector.java index 591f1c927..f7b1592cd 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpAsyncConnector.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpAsyncConnector.java @@ -1,26 +1,34 @@ -package io.odpf.dagger.core.processors.external.http; - -import io.odpf.dagger.core.exception.InvalidHttpVerbException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.common.DescriptorManager; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.external.AsyncConnector; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.external.http.request.HttpRequestFactory; +package com.gotocompany.dagger.core.processors.external.http; + +import com.gotocompany.dagger.core.exception.InvalidHttpVerbException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.common.DescriptorManager; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.external.http.request.HttpRequestFactory; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.processors.external.AsyncConnector; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; +import io.netty.util.internal.StringUtil; +import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; -import io.odpf.dagger.core.utils.Constants; -import io.odpf.dagger.core.utils.Constants.ExternalPostProcessorVariableType; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.BoundRequestBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + import static org.asynchttpclient.Dsl.asyncHttpClient; import static org.asynchttpclient.Dsl.config; @@ -32,6 +40,7 @@ public class HttpAsyncConnector extends AsyncConnector { private static final Logger LOGGER = LoggerFactory.getLogger(HttpAsyncConnector.class.getName()); private AsyncHttpClient httpClient; private HttpSourceConfig httpSourceConfig; + private Set failOnErrorsExclusionSet; /** * Instantiates a new Http async connector with specified http client. @@ -81,6 +90,12 @@ protected void createClient() { } } + @Override + public void open(Configuration configuration) throws Exception { + super.open(configuration); + setFailOnErrorsExclusionSet(httpSourceConfig.getExcludeFailOnErrorsCodeRange()); + } + @Override public void close() throws Exception { httpClient.close(); @@ -95,17 +110,16 @@ protected void process(Row input, ResultFuture resultFuture) { RowManager rowManager = new RowManager(input); Object[] requestVariablesValues = getEndpointHandler() - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, httpSourceConfig.getRequestVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, httpSourceConfig.getRequestVariables(), resultFuture); Object[] dynamicHeaderVariablesValues = getEndpointHandler() - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.HEADER_VARIABLES, httpSourceConfig.getHeaderVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.HEADER_VARIABLES, httpSourceConfig.getHeaderVariables(), resultFuture); Object[] endpointVariablesValues = getEndpointHandler() - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.ENDPOINT_VARIABLE, httpSourceConfig.getEndpointVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.ENDPOINT_VARIABLE, httpSourceConfig.getEndpointVariables(), resultFuture); if (getEndpointHandler().isQueryInvalid(resultFuture, rowManager, httpSourceConfig.getRequestVariables(), requestVariablesValues) || getEndpointHandler().isQueryInvalid(resultFuture, rowManager, httpSourceConfig.getHeaderVariables(), dynamicHeaderVariablesValues)) { return; } - BoundRequestBuilder request = HttpRequestFactory.createRequest(httpSourceConfig, httpClient, requestVariablesValues, dynamicHeaderVariablesValues, endpointVariablesValues); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, getMeterStatsManager(), + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, getFailOnErrorsExclusionSet(), getMeterStatsManager(), rowManager, getColumnNameManager(), getOutputDescriptor(resultFuture), resultFuture, getErrorReporter(), new PostResponseTelemetry()); httpResponseHandler.startTimer(); request.execute(httpResponseHandler); @@ -115,4 +129,19 @@ protected void process(Row input, ResultFuture resultFuture) { } } + + protected Set getFailOnErrorsExclusionSet() { + return failOnErrorsExclusionSet; + } + + private void setFailOnErrorsExclusionSet(String excludeFailOnErrorsCodeRange) { + failOnErrorsExclusionSet = new HashSet(); + if (!StringUtil.isNullOrEmpty(excludeFailOnErrorsCodeRange)) { + String[] ranges = excludeFailOnErrorsCodeRange.split(","); + Arrays.stream(ranges).forEach(range -> { + List rangeList = Arrays.stream(range.split("-")).map(Integer::parseInt).collect(Collectors.toList()); + IntStream.rangeClosed(rangeList.get(0), rangeList.get(rangeList.size() - 1)).forEach(statusCode -> failOnErrorsExclusionSet.add(statusCode)); + }); + } + } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpResponseHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpResponseHandler.java similarity index 66% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpResponseHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpResponseHandler.java index 2691b9f77..64a3abd91 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpResponseHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpResponseHandler.java @@ -1,21 +1,21 @@ -package io.odpf.dagger.core.processors.external.http; - -import io.odpf.dagger.core.exception.HttpFailureException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; +package com.gotocompany.dagger.core.processors.external.http; + import com.google.protobuf.Descriptors; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.core.exception.HttpFailureException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.utils.DescriptorsUtil; import com.jayway.jsonpath.JsonPath; import com.jayway.jsonpath.PathNotFoundException; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; - -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; import org.asynchttpclient.AsyncCompletionHandler; import org.asynchttpclient.Response; import org.slf4j.Logger; @@ -23,8 +23,9 @@ import java.time.Instant; import java.util.ArrayList; -import java.util.Collections; import java.util.Map; +import java.util.Collections; +import java.util.Set; import java.util.regex.Pattern; /** @@ -39,6 +40,7 @@ public class HttpResponseHandler extends AsyncCompletionHandler { private Descriptors.Descriptor descriptor; private ResultFuture resultFuture; private HttpSourceConfig httpSourceConfig; + private Set failOnErrorsExclusionSet; private MeterStatsManager meterStatsManager; private Instant startTime; private ErrorReporter errorReporter; @@ -48,20 +50,22 @@ public class HttpResponseHandler extends AsyncCompletionHandler { /** * Instantiates a new Http response handler. * - * @param httpSourceConfig the http source config - * @param meterStatsManager the meter stats manager - * @param rowManager the row manager - * @param columnNameManager the column name manager - * @param descriptor the descriptor - * @param resultFuture the result future - * @param errorReporter the error reporter - * @param postResponseTelemetry the post response telemetry + * @param httpSourceConfig the http source config + * @param failOnErrorsExclusionSet the fail on error exclusion set + * @param meterStatsManager the meter stats manager + * @param rowManager the row manager + * @param columnNameManager the column name manager + * @param descriptor the descriptor + * @param resultFuture the result future + * @param errorReporter the error reporter + * @param postResponseTelemetry the post response telemetry */ - public HttpResponseHandler(HttpSourceConfig httpSourceConfig, MeterStatsManager meterStatsManager, RowManager rowManager, + public HttpResponseHandler(HttpSourceConfig httpSourceConfig, Set failOnErrorsExclusionSet, MeterStatsManager meterStatsManager, RowManager rowManager, ColumnNameManager columnNameManager, Descriptors.Descriptor descriptor, ResultFuture resultFuture, ErrorReporter errorReporter, PostResponseTelemetry postResponseTelemetry) { this.httpSourceConfig = httpSourceConfig; + this.failOnErrorsExclusionSet = failOnErrorsExclusionSet; this.meterStatsManager = meterStatsManager; this.rowManager = rowManager; this.columnNameManager = columnNameManager; @@ -86,15 +90,16 @@ public Object onCompleted(Response response) { successHandler(response); } else { postResponseTelemetry.validateResponseCode(meterStatsManager, statusCode); - failureHandler("Received status code : " + statusCode); + failureHandler("Received status code : " + statusCode, statusCode); } return response; } @Override public void onThrowable(Throwable t) { + t.printStackTrace(); meterStatsManager.markEvent(ExternalSourceAspects.OTHER_ERRORS); - failureHandler(t.getMessage()); + failureHandler(t.getMessage(), 0); } private void successHandler(Response response) { @@ -123,12 +128,13 @@ private void successHandler(Response response) { * Failure handler. * * @param logMessage the log message + * @param statusCode the status code */ - public void failureHandler(String logMessage) { + public void failureHandler(String logMessage, Integer statusCode) { postResponseTelemetry.sendFailureTelemetry(meterStatsManager, startTime); LOGGER.error(logMessage); Exception httpFailureException = new HttpFailureException(logMessage); - if (httpSourceConfig.isFailOnErrors()) { + if (shouldFailOnError(statusCode)) { reportAndThrowError(httpFailureException); } else { errorReporter.reportNonFatalException(httpFailureException); @@ -136,6 +142,13 @@ public void failureHandler(String logMessage) { resultFuture.complete(Collections.singleton(rowManager.getAll())); } + private boolean shouldFailOnError(Integer statusCode) { + if (httpSourceConfig.isFailOnErrors() && (statusCode == 0 || !failOnErrorsExclusionSet.contains(statusCode))) { + return true; + } + return false; + } + private void setField(String key, Object value, int fieldIndex) { if (!httpSourceConfig.isRetainResponseType() || httpSourceConfig.hasType()) { setFieldUsingType(key, value, fieldIndex); @@ -145,10 +158,14 @@ private void setField(String key, Object value, int fieldIndex) { } private void setFieldUsingType(String key, Object value, Integer fieldIndex) { - Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName(key); - if (fieldDescriptor == null) { - IllegalArgumentException illegalArgumentException = new IllegalArgumentException("Field Descriptor not found for field: " + key); - reportAndThrowError(illegalArgumentException); + Descriptors.FieldDescriptor fieldDescriptor = null; + try { + fieldDescriptor = DescriptorsUtil.getFieldDescriptor(descriptor, key); + if (fieldDescriptor == null) { + throw new IllegalArgumentException("Field Descriptor not found for field: " + key); + } + } catch (RuntimeException exception) { + reportAndThrowError(exception); } TypeHandler typeHandler = TypeHandlerFactory.getTypeHandler(fieldDescriptor); rowManager.setInOutput(fieldIndex, typeHandler.transformFromPostProcessor(value)); diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpSourceConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpSourceConfig.java similarity index 78% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpSourceConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpSourceConfig.java index 9d66cbdb4..a775452fe 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpSourceConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpSourceConfig.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.http; +package com.gotocompany.dagger.core.processors.external.http; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.types.SourceConfig; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.types.SourceConfig; import com.google.gson.annotations.SerializedName; import org.apache.commons.lang3.StringUtils; @@ -26,6 +26,7 @@ public class HttpSourceConfig implements Serializable, SourceConfig { private String streamTimeout; private String connectTimeout; private boolean failOnErrors; + private String excludeFailOnErrorsCodeRange; @SerializedName(value = "type", alternate = {"Type", "TYPE"}) private String type; private String capacity; @@ -49,6 +50,7 @@ public class HttpSourceConfig implements Serializable, SourceConfig { * @param streamTimeout the stream timeout * @param connectTimeout the connect timeout * @param failOnErrors the fail on errors + * @param excludeFailOnErrorsCodeRange the exclude fail on errors code range * @param type the type * @param capacity the capacity * @param headers the static headers @@ -56,7 +58,7 @@ public class HttpSourceConfig implements Serializable, SourceConfig { * @param metricId the metric id * @param retainResponseType the retain response type */ - public HttpSourceConfig(String endpoint, String endpointVariables, String verb, String requestPattern, String requestVariables, String headerPattern, String headerVariables, String streamTimeout, String connectTimeout, boolean failOnErrors, String type, String capacity, Map headers, Map outputMapping, String metricId, boolean retainResponseType) { + public HttpSourceConfig(String endpoint, String endpointVariables, String verb, String requestPattern, String requestVariables, String headerPattern, String headerVariables, String streamTimeout, String connectTimeout, boolean failOnErrors, String excludeFailOnErrorsCodeRange, String type, String capacity, Map headers, Map outputMapping, String metricId, boolean retainResponseType) { this.endpoint = endpoint; this.endpointVariables = endpointVariables; this.verb = verb; @@ -67,6 +69,7 @@ public HttpSourceConfig(String endpoint, String endpointVariables, String verb, this.streamTimeout = streamTimeout; this.connectTimeout = connectTimeout; this.failOnErrors = failOnErrors; + this.excludeFailOnErrorsCodeRange = excludeFailOnErrorsCodeRange; this.type = type; this.capacity = capacity; this.headers = headers; @@ -162,6 +165,16 @@ public boolean isFailOnErrors() { return failOnErrors; } + /** + * Gets failOnErrorsCodeRange Variable. + * + * @return the failOnErrorsCodeRange Variable + */ + public String getExcludeFailOnErrorsCodeRange() { + return excludeFailOnErrorsCodeRange; + } + + @Override public String getMetricId() { return metricId; @@ -245,11 +258,11 @@ public boolean equals(Object o) { return false; } HttpSourceConfig that = (HttpSourceConfig) o; - return failOnErrors == that.failOnErrors && retainResponseType == that.retainResponseType && Objects.equals(endpoint, that.endpoint) && Objects.equals(verb, that.verb) && Objects.equals(requestPattern, that.requestPattern) && Objects.equals(requestVariables, that.requestVariables) && Objects.equals(headerPattern, that.headerPattern) && Objects.equals(headerVariables, that.headerVariables) && Objects.equals(streamTimeout, that.streamTimeout) && Objects.equals(connectTimeout, that.connectTimeout) && Objects.equals(type, that.type) && Objects.equals(capacity, that.capacity) && Objects.equals(headers, that.headers) && Objects.equals(outputMapping, that.outputMapping) && Objects.equals(metricId, that.metricId); + return failOnErrors == that.failOnErrors && excludeFailOnErrorsCodeRange == that.excludeFailOnErrorsCodeRange && retainResponseType == that.retainResponseType && Objects.equals(endpoint, that.endpoint) && Objects.equals(verb, that.verb) && Objects.equals(requestPattern, that.requestPattern) && Objects.equals(requestVariables, that.requestVariables) && Objects.equals(headerPattern, that.headerPattern) && Objects.equals(headerVariables, that.headerVariables) && Objects.equals(streamTimeout, that.streamTimeout) && Objects.equals(connectTimeout, that.connectTimeout) && Objects.equals(type, that.type) && Objects.equals(capacity, that.capacity) && Objects.equals(headers, that.headers) && Objects.equals(outputMapping, that.outputMapping) && Objects.equals(metricId, that.metricId); } @Override public int hashCode() { - return Objects.hash(endpoint, endpointVariables, verb, requestPattern, requestVariables, headerPattern, headerVariables, streamTimeout, connectTimeout, failOnErrors, type, capacity, headers, outputMapping, metricId, retainResponseType); + return Objects.hash(endpoint, endpointVariables, verb, requestPattern, requestVariables, headerPattern, headerVariables, streamTimeout, connectTimeout, failOnErrors, excludeFailOnErrorsCodeRange, type, capacity, headers, outputMapping, metricId, retainResponseType); } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpStreamDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpStreamDecorator.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpStreamDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpStreamDecorator.java index 22e8acbf9..78d8c6820 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpStreamDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/HttpStreamDecorator.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.external.http; +package com.gotocompany.dagger.core.processors.external.http; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.types.StreamDecorator; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.types.StreamDecorator; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.apache.flink.streaming.api.datastream.AsyncDataStream; import org.apache.flink.streaming.api.datastream.DataStream; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpGetRequestHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpGetRequestHandler.java similarity index 92% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpGetRequestHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpGetRequestHandler.java index da9d8889b..4e5cb064d 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpGetRequestHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpGetRequestHandler.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.external.http.request; +package com.gotocompany.dagger.core.processors.external.http.request; import com.google.gson.Gson; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; import io.netty.util.internal.StringUtil; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.BoundRequestBuilder; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpPostRequestHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpPostRequestHandler.java similarity index 92% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpPostRequestHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpPostRequestHandler.java index 357373415..b675b786e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpPostRequestHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpPostRequestHandler.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.external.http.request; +package com.gotocompany.dagger.core.processors.external.http.request; import com.google.gson.Gson; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; import io.netty.util.internal.StringUtil; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.BoundRequestBuilder; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpPutRequestHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpPutRequestHandler.java similarity index 92% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpPutRequestHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpPutRequestHandler.java index a7603181c..16988dff4 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpPutRequestHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpPutRequestHandler.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.external.http.request; +package com.gotocompany.dagger.core.processors.external.http.request; import com.google.gson.Gson; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; import io.netty.util.internal.StringUtil; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.BoundRequestBuilder; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpRequestFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpRequestFactory.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpRequestFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpRequestFactory.java index 6f713cbef..54e88bccf 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpRequestFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpRequestFactory.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.http.request; +package com.gotocompany.dagger.core.processors.external.http.request; -import io.odpf.dagger.core.exception.InvalidHttpVerbException; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.exception.InvalidHttpVerbException; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.BoundRequestBuilder; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpRequestHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpRequestHandler.java similarity index 92% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpRequestHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpRequestHandler.java index 122e6b687..7e22ac0cc 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/request/HttpRequestHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/http/request/HttpRequestHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.external.http.request; +package com.gotocompany.dagger.core.processors.external.http.request; import org.asynchttpclient.BoundRequestBuilder; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgAsyncConnector.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgAsyncConnector.java similarity index 83% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgAsyncConnector.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgAsyncConnector.java index c1b83d502..050656f56 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgAsyncConnector.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgAsyncConnector.java @@ -1,16 +1,15 @@ -package io.odpf.dagger.core.processors.external.pg; - -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.external.AsyncConnector; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.utils.Constants; -import io.odpf.dagger.core.utils.Constants.ExternalPostProcessorVariableType; +package com.gotocompany.dagger.core.processors.external.pg; + +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; +import com.gotocompany.dagger.core.processors.external.AsyncConnector; import io.vertx.core.Vertx; import io.vertx.core.VertxOptions; import io.vertx.pgclient.PgConnectOptions; @@ -26,8 +25,6 @@ import java.util.concurrent.TimeUnit; -import static io.odpf.dagger.core.utils.Constants.MAX_EVENT_LOOP_EXECUTE_TIME_DEFAULT; - /** * The Postgre async connector. */ @@ -90,7 +87,7 @@ public void process(Row input, ResultFuture resultFuture) { RowManager rowManager = new RowManager(input); Object[] queryVariablesValues = getEndpointHandler() - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.QUERY_VARIABLES, pgSourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.QUERY_VARIABLES, pgSourceConfig.getVariables(), resultFuture); if (getEndpointHandler().isQueryInvalid(resultFuture, rowManager, pgSourceConfig.getVariables(), queryVariablesValues)) { return; } @@ -123,7 +120,7 @@ private PgPool pool(PgConnectOptions connectOptions, PoolOptions poolOptions) { throw new IllegalStateException("Running in a Vertx context => use PgPool#pool(Vertx, PgConnectOptions, PoolOptions) instead"); } VertxOptions vertxOptions = new VertxOptions(); - vertxOptions.setMaxEventLoopExecuteTime(MAX_EVENT_LOOP_EXECUTE_TIME_DEFAULT); + vertxOptions.setMaxEventLoopExecuteTime(Constants.MAX_EVENT_LOOP_EXECUTE_TIME_DEFAULT); vertxOptions.setMaxEventLoopExecuteTimeUnit(TimeUnit.MILLISECONDS); if (connectOptions.isUsingDomainSocket()) { vertxOptions.setPreferNativeTransport(true); diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgResponseHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgResponseHandler.java similarity index 89% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgResponseHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgResponseHandler.java index bcfd9cec8..7c319fad6 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgResponseHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgResponseHandler.java @@ -1,15 +1,15 @@ -package io.odpf.dagger.core.processors.external.pg; +package com.gotocompany.dagger.core.processors.external.pg; -import io.odpf.dagger.core.exception.HttpFailureException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.common.serde.typehandler.TypeHandler; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.core.exception.HttpFailureException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandler; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; import com.google.protobuf.Descriptors; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.sqlclient.RowSet; @@ -24,7 +24,7 @@ import java.util.List; import java.util.Map; -import static io.odpf.dagger.common.serde.typehandler.RowFactory.createRow; +import static com.gotocompany.dagger.common.serde.typehandler.RowFactory.createRow; /** * The Postgre response handler. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgSourceConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgSourceConfig.java similarity index 98% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgSourceConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgSourceConfig.java index 9d8f775d4..8b8a665bd 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgSourceConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgSourceConfig.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.external.pg; +package com.gotocompany.dagger.core.processors.external.pg; -import io.odpf.dagger.core.processors.types.SourceConfig; +import com.gotocompany.dagger.core.processors.types.SourceConfig; import com.google.gson.annotations.SerializedName; import org.apache.commons.lang3.StringUtils; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgSourceConfigBuilder.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgSourceConfigBuilder.java similarity index 98% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgSourceConfigBuilder.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgSourceConfigBuilder.java index 5b1d4d937..c348eafef 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgSourceConfigBuilder.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgSourceConfigBuilder.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.external.pg; +package com.gotocompany.dagger.core.processors.external.pg; import java.util.Map; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgStreamDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgStreamDecorator.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgStreamDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgStreamDecorator.java index 6f030e625..453aa5bbb 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgStreamDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/external/pg/PgStreamDecorator.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.external.pg; +package com.gotocompany.dagger.core.processors.external.pg; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.types.StreamDecorator; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.types.StreamDecorator; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.apache.flink.streaming.api.datastream.AsyncDataStream; import org.apache.flink.streaming.api.datastream.DataStream; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/InternalDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/InternalDecorator.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/InternalDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/InternalDecorator.java index 6336d93df..eb65bf14e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/InternalDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/InternalDecorator.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.internal; +package com.gotocompany.dagger.core.processors.internal; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.types.MapDecorator; -import io.odpf.dagger.core.processors.internal.processor.InternalConfigProcessor; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.processor.InternalConfigProcessor; +import com.gotocompany.dagger.core.processors.types.MapDecorator; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/InternalPostProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/InternalPostProcessor.java similarity index 75% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/InternalPostProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/InternalPostProcessor.java index 3a168b732..2912d2405 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/InternalPostProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/InternalPostProcessor.java @@ -1,18 +1,18 @@ -package io.odpf.dagger.core.processors.internal; +package com.gotocompany.dagger.core.processors.internal; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.PostProcessorConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.types.PostProcessor; -import io.odpf.dagger.core.processors.types.StreamDecorator; -import io.odpf.dagger.core.processors.types.Validator; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.PostProcessorConfig; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.internal.processor.InternalConfigHandlerFactory; +import com.gotocompany.dagger.core.processors.internal.processor.InternalConfigProcessor; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; +import com.gotocompany.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.core.processors.types.StreamDecorator; +import com.gotocompany.dagger.core.processors.types.Validator; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.internal.processor.InternalConfigHandlerFactory; -import io.odpf.dagger.core.processors.internal.processor.InternalConfigProcessor; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; +import com.gotocompany.dagger.common.core.StreamInfo; /** * The Internal post processor. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/InternalSourceConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/InternalSourceConfig.java similarity index 94% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/InternalSourceConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/InternalSourceConfig.java index aed062302..7a9ee0a87 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/InternalSourceConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/InternalSourceConfig.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.internal; +package com.gotocompany.dagger.core.processors.internal; -import io.odpf.dagger.core.processors.types.Validator; +import com.gotocompany.dagger.core.processors.types.Validator; import java.io.Serializable; import java.util.HashMap; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/InternalConfigHandlerFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/InternalConfigHandlerFactory.java similarity index 67% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/InternalConfigHandlerFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/InternalConfigHandlerFactory.java index cc4a9e198..7f39d0c7d 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/InternalConfigHandlerFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/InternalConfigHandlerFactory.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.processors.internal.processor; +package com.gotocompany.dagger.core.processors.internal.processor; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; -import io.odpf.dagger.core.processors.internal.processor.constant.ConstantInternalConfigProcessor; -import io.odpf.dagger.core.processors.internal.processor.function.FunctionInternalConfigProcessor; -import io.odpf.dagger.core.processors.internal.processor.invalid.InvalidInternalConfigProcessor; -import io.odpf.dagger.core.processors.internal.processor.sql.fields.SqlInternalConfigProcessor; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.internal.processor.constant.ConstantInternalConfigProcessor; +import com.gotocompany.dagger.core.processors.internal.processor.function.FunctionInternalConfigProcessor; +import com.gotocompany.dagger.core.processors.internal.processor.invalid.InvalidInternalConfigProcessor; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; +import com.gotocompany.dagger.core.processors.internal.processor.sql.fields.SqlInternalConfigProcessor; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; import java.util.Arrays; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/InternalConfigProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/InternalConfigProcessor.java similarity index 74% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/InternalConfigProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/InternalConfigProcessor.java index 5361154f1..78c57bf77 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/InternalConfigProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/InternalConfigProcessor.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.internal.processor; +package com.gotocompany.dagger.core.processors.internal.processor; -import io.odpf.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.common.RowManager; /** * The interface for Internal config processor. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessor.java similarity index 76% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessor.java index f54652874..c7aeec1ea 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessor.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.internal.processor.constant; +package com.gotocompany.dagger.core.processors.internal.processor.constant; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.InternalConfigProcessor; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.InternalConfigProcessor; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessor.java similarity index 77% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessor.java index f4c388e47..6891b2e7d 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessor.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.processors.internal.processor.function; +package com.gotocompany.dagger.core.processors.internal.processor.function; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.InternalConfigProcessor; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.InternalConfigProcessor; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionProcessor.java similarity index 71% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionProcessor.java index ac505e82a..e06d47909 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionProcessor.java @@ -1,5 +1,5 @@ -package io.odpf.dagger.core.processors.internal.processor.function; -import io.odpf.dagger.core.processors.common.RowManager; +package com.gotocompany.dagger.core.processors.internal.processor.function; +import com.gotocompany.dagger.core.processors.common.RowManager; public interface FunctionProcessor { /** diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionProcessorFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionProcessorFactory.java similarity index 70% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionProcessorFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionProcessorFactory.java index c1abc343e..48da06ea2 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionProcessorFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionProcessorFactory.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.processors.internal.processor.function; +package com.gotocompany.dagger.core.processors.internal.processor.function; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.function.functions.CurrentTimestampFunction; -import io.odpf.dagger.core.processors.internal.processor.function.functions.JsonPayloadFunction; -import io.odpf.dagger.core.processors.internal.processor.function.functions.InvalidFunction; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.function.functions.CurrentTimestampFunction; +import com.gotocompany.dagger.core.processors.internal.processor.function.functions.JsonPayloadFunction; +import com.gotocompany.dagger.core.processors.internal.processor.function.functions.InvalidFunction; import java.time.Clock; import java.util.Arrays; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunction.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunction.java similarity index 74% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunction.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunction.java index adb2f079d..b177ab8db 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunction.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunction.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.internal.processor.function.functions; +package com.gotocompany.dagger.core.processors.internal.processor.function.functions; -import io.odpf.dagger.core.processors.internal.processor.function.FunctionProcessor; -import io.odpf.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.processor.function.FunctionProcessor; import java.sql.Timestamp; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/functions/InvalidFunction.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/InvalidFunction.java similarity index 69% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/functions/InvalidFunction.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/InvalidFunction.java index 0276ed3f8..596c8d004 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/functions/InvalidFunction.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/InvalidFunction.java @@ -1,10 +1,11 @@ -package io.odpf.dagger.core.processors.internal.processor.function.functions; +package com.gotocompany.dagger.core.processors.internal.processor.function.functions; -import io.odpf.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.processor.function.FunctionProcessor; + +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.function.FunctionProcessor; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.common.RowManager; import java.io.Serializable; public class InvalidFunction implements FunctionProcessor, Serializable { private InternalSourceConfig internalSourceConfig; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunction.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunction.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunction.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunction.java index 65ffc8f74..c93ca69bd 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunction.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunction.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.core.processors.internal.processor.function.functions; +package com.gotocompany.dagger.core.processors.internal.processor.function.functions; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.function.FunctionProcessor; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.common.serde.typehandler.TypeInformationFactory; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.internal.processor.function.FunctionProcessor; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.common.serde.typehandler.TypeInformationFactory; import com.google.protobuf.Descriptors; -import io.odpf.stencil.client.StencilClient; +import com.gotocompany.stencil.client.StencilClient; import org.apache.flink.formats.json.JsonRowSerializationSchema; import java.util.Map; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessor.java similarity index 73% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessor.java index 13d133d93..e71953063 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessor.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.internal.processor.invalid; +package com.gotocompany.dagger.core.processors.internal.processor.invalid; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.InternalConfigProcessor; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.InternalConfigProcessor; import org.apache.commons.lang3.StringUtils; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParser.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParser.java similarity index 77% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParser.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParser.java index 9e1c2edf8..d9dfd7448 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParser.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParser.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.processors.internal.processor.sql; +package com.gotocompany.dagger.core.processors.internal.processor.sql; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/SqlInternalFieldConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/SqlInternalFieldConfig.java similarity index 64% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/SqlInternalFieldConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/SqlInternalFieldConfig.java index 1c91e460e..7297d8ec9 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/SqlInternalFieldConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/SqlInternalFieldConfig.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.internal.processor.sql; +package com.gotocompany.dagger.core.processors.internal.processor.sql; -import io.odpf.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.common.RowManager; /** * The interface for Sql internal field config. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImport.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImport.java similarity index 75% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImport.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImport.java index 8af047fe5..cc6ed6413 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImport.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImport.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.internal.processor.sql.fields; +package com.gotocompany.dagger.core.processors.internal.processor.sql.fields; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlInternalFieldConfig; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlInternalFieldConfig; /** * The Sql internal auto field import. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessor.java similarity index 71% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessor.java index 08a57c38f..235c54def 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessor.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.processors.internal.processor.sql.fields; - -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.InternalConfigProcessor; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlInternalFieldConfig; +package com.gotocompany.dagger.core.processors.internal.processor.sql.fields; + +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.InternalConfigProcessor; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlInternalFieldConfig; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactory.java similarity index 69% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactory.java index b9423f02e..84555233a 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactory.java @@ -1,11 +1,10 @@ -package io.odpf.dagger.core.processors.internal.processor.sql.fields; +package com.gotocompany.dagger.core.processors.internal.processor.sql.fields; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlInternalFieldConfig; - -import static io.odpf.dagger.core.utils.Constants.SQL_PATH_SELECT_ALL_CONFIG_VALUE; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlInternalFieldConfig; /** * The factory class for Sql internal field processor. @@ -42,6 +41,6 @@ public SqlInternalFieldConfig getSqlInternalFieldConfig() { } private boolean selectAllFromInputColumns() { - return SQL_PATH_SELECT_ALL_CONFIG_VALUE.equals(internalSourceConfig.getOutputField()); + return Constants.SQL_PATH_SELECT_ALL_CONFIG_VALUE.equals(internalSourceConfig.getOutputField()); } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImport.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImport.java similarity index 71% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImport.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImport.java index 95ae11cd8..6718d9d1f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImport.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImport.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.processors.internal.processor.sql.fields; +package com.gotocompany.dagger.core.processors.internal.processor.sql.fields; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlInternalFieldConfig; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlInternalFieldConfig; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; /** * The Sql internal field import. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/AsyncProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/AsyncProcessor.java similarity index 94% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/AsyncProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/AsyncProcessor.java index de2d6c412..57f5692ec 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/AsyncProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/AsyncProcessor.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow; +package com.gotocompany.dagger.core.processors.longbow; import org.apache.flink.streaming.api.datastream.AsyncDataStream; import org.apache.flink.streaming.api.datastream.DataStream; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/LongbowFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/LongbowFactory.java similarity index 75% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/LongbowFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/LongbowFactory.java index 7c1a7f86c..ea35c6222 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/LongbowFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/LongbowFactory.java @@ -1,40 +1,40 @@ -package io.odpf.dagger.core.processors.longbow; +package com.gotocompany.dagger.core.processors.longbow; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; import org.apache.flink.streaming.api.functions.async.RichAsyncFunction; import org.apache.flink.types.Row; import com.google.gson.Gson; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.processors.longbow.columnmodifier.LongbowReadColumnModifier; -import io.odpf.dagger.core.processors.longbow.columnmodifier.LongbowWriteColumnModifier; -import io.odpf.dagger.core.processors.longbow.columnmodifier.NoOpColumnModifier; -import io.odpf.dagger.core.processors.longbow.data.LongbowProtoData; -import io.odpf.dagger.core.processors.longbow.data.LongbowTableData; -import io.odpf.dagger.core.processors.longbow.outputRow.OutputIdentity; -import io.odpf.dagger.core.processors.longbow.outputRow.OutputSynchronizer; -import io.odpf.dagger.core.processors.longbow.outputRow.ReaderOutputLongbowData; -import io.odpf.dagger.core.processors.longbow.outputRow.ReaderOutputProtoData; -import io.odpf.dagger.core.processors.longbow.processor.LongbowReader; -import io.odpf.dagger.core.processors.longbow.processor.LongbowWriter; -import io.odpf.dagger.core.processors.longbow.range.LongbowRange; -import io.odpf.dagger.core.processors.longbow.range.LongbowRangeFactory; -import io.odpf.dagger.core.processors.longbow.request.PutRequestFactory; -import io.odpf.dagger.core.processors.longbow.request.ScanRequestFactory; -import io.odpf.dagger.core.processors.longbow.validator.LongbowType; -import io.odpf.dagger.core.processors.longbow.validator.LongbowValidator; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.processors.longbow.columnmodifier.LongbowReadColumnModifier; +import com.gotocompany.dagger.core.processors.longbow.columnmodifier.LongbowWriteColumnModifier; +import com.gotocompany.dagger.core.processors.longbow.columnmodifier.NoOpColumnModifier; +import com.gotocompany.dagger.core.processors.longbow.data.LongbowProtoData; +import com.gotocompany.dagger.core.processors.longbow.data.LongbowTableData; +import com.gotocompany.dagger.core.processors.longbow.outputRow.OutputIdentity; +import com.gotocompany.dagger.core.processors.longbow.outputRow.OutputSynchronizer; +import com.gotocompany.dagger.core.processors.longbow.outputRow.ReaderOutputLongbowData; +import com.gotocompany.dagger.core.processors.longbow.outputRow.ReaderOutputProtoData; +import com.gotocompany.dagger.core.processors.longbow.processor.LongbowReader; +import com.gotocompany.dagger.core.processors.longbow.processor.LongbowWriter; +import com.gotocompany.dagger.core.processors.longbow.range.LongbowRange; +import com.gotocompany.dagger.core.processors.longbow.range.LongbowRangeFactory; +import com.gotocompany.dagger.core.processors.longbow.request.PutRequestFactory; +import com.gotocompany.dagger.core.processors.longbow.request.ScanRequestFactory; +import com.gotocompany.dagger.core.processors.longbow.validator.LongbowType; +import com.gotocompany.dagger.core.processors.longbow.validator.LongbowValidator; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.processors.types.PostProcessor; import java.util.ArrayList; import java.util.Map; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_PROTO_CLASS; -import static io.odpf.dagger.core.utils.Constants.DAGGER_NAME_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.DAGGER_NAME_KEY; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_LONGBOW_GCP_TABLE_ID_KEY; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_PROTO_CLASS; +import static com.gotocompany.dagger.core.utils.Constants.DAGGER_NAME_DEFAULT; +import static com.gotocompany.dagger.core.utils.Constants.DAGGER_NAME_KEY; +import static com.gotocompany.dagger.core.utils.Constants.PROCESSOR_LONGBOW_GCP_TABLE_ID_KEY; /** * The factory class for Longbow. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/LongbowProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/LongbowProcessor.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/LongbowProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/LongbowProcessor.java index 401161bf0..e5edbfd23 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/LongbowProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/LongbowProcessor.java @@ -1,15 +1,15 @@ -package io.odpf.dagger.core.processors.longbow; +package com.gotocompany.dagger.core.processors.longbow; +import com.gotocompany.dagger.core.processors.PostProcessorConfig; +import com.gotocompany.dagger.core.processors.longbow.columnmodifier.ColumnModifier; +import com.gotocompany.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.functions.async.RichAsyncFunction; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.PostProcessorConfig; -import io.odpf.dagger.core.processors.longbow.columnmodifier.ColumnModifier; -import io.odpf.dagger.core.processors.types.PostProcessor; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StreamInfo; import java.util.ArrayList; import java.util.concurrent.TimeUnit; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/LongbowSchema.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/LongbowSchema.java similarity index 93% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/LongbowSchema.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/LongbowSchema.java index 3a68526c9..f486278a8 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/LongbowSchema.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/LongbowSchema.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.processors.longbow; +package com.gotocompany.dagger.core.processors.longbow; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.core.exception.InvalidLongbowDurationException; +import com.gotocompany.dagger.core.processors.longbow.validator.LongbowType; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.types.Row; -import io.odpf.dagger.core.exception.DaggerConfigurationException; -import io.odpf.dagger.core.exception.InvalidLongbowDurationException; -import io.odpf.dagger.core.processors.longbow.validator.LongbowType; -import io.odpf.dagger.core.utils.Constants; import org.apache.hadoop.hbase.util.Bytes; import java.io.Serializable; @@ -19,7 +19,7 @@ import java.util.function.Predicate; import java.util.stream.Collectors; -import static io.odpf.dagger.common.core.Constants.ROWTIME; +import static com.gotocompany.dagger.common.core.Constants.ROWTIME; /** * A class that holds the Longbow schema. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/ColumnModifier.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/ColumnModifier.java similarity index 80% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/ColumnModifier.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/ColumnModifier.java index faba1c0a2..7721b56cc 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/ColumnModifier.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/ColumnModifier.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.columnmodifier; +package com.gotocompany.dagger.core.processors.longbow.columnmodifier; /** * The interface Column modifier. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/LongbowReadColumnModifier.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/LongbowReadColumnModifier.java similarity index 80% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/LongbowReadColumnModifier.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/LongbowReadColumnModifier.java index fed11fa7e..7c75cb988 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/LongbowReadColumnModifier.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/LongbowReadColumnModifier.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.columnmodifier; +package com.gotocompany.dagger.core.processors.longbow.columnmodifier; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; import java.util.ArrayList; import java.util.Arrays; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/LongbowWriteColumnModifier.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/LongbowWriteColumnModifier.java similarity index 83% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/LongbowWriteColumnModifier.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/LongbowWriteColumnModifier.java index 10eb8399e..156aa003c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/LongbowWriteColumnModifier.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/LongbowWriteColumnModifier.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.columnmodifier; +package com.gotocompany.dagger.core.processors.longbow.columnmodifier; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; import java.util.ArrayList; import java.util.Arrays; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/NoOpColumnModifier.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/NoOpColumnModifier.java similarity index 75% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/NoOpColumnModifier.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/NoOpColumnModifier.java index e4f74f93c..a93b40a0c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/columnmodifier/NoOpColumnModifier.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/columnmodifier/NoOpColumnModifier.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.columnmodifier; +package com.gotocompany.dagger.core.processors.longbow.columnmodifier; /** * The No op column modifier. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowData.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowData.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowData.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowData.java index 0ba6634b7..4815550f9 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowData.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowData.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.data; +package com.gotocompany.dagger.core.processors.longbow.data; import org.apache.hadoop.hbase.client.Result; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowDataFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowDataFactory.java similarity index 83% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowDataFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowDataFactory.java index f8179a86e..ac750eb0b 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowDataFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowDataFactory.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.data; +package com.gotocompany.dagger.core.processors.longbow.data; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; /** * The factory class for Longbow data. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowProtoData.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowProtoData.java similarity index 89% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowProtoData.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowProtoData.java index 2525451b0..7de943df4 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowProtoData.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowProtoData.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.data; +package com.gotocompany.dagger.core.processors.longbow.data; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowTableData.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowTableData.java similarity index 89% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowTableData.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowTableData.java index 524af0c17..f131e7587 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/data/LongbowTableData.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowTableData.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.longbow.data; +package com.gotocompany.dagger.core.processors.longbow.data; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/exceptions/LongbowReaderException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/exceptions/LongbowReaderException.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/exceptions/LongbowReaderException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/exceptions/LongbowReaderException.java index 2d1c7a5a8..d1a210429 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/exceptions/LongbowReaderException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/exceptions/LongbowReaderException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.exceptions; +package com.gotocompany.dagger.core.processors.longbow.exceptions; /** * The Exception for Longbow reader. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/exceptions/LongbowWriterException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/exceptions/LongbowWriterException.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/exceptions/LongbowWriterException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/exceptions/LongbowWriterException.java index 8fd5c294b..f1ec76105 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/exceptions/LongbowWriterException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/exceptions/LongbowWriterException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.exceptions; +package com.gotocompany.dagger.core.processors.longbow.exceptions; /** * The Exception for Longbow writer. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputIdentity.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputIdentity.java similarity index 75% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputIdentity.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputIdentity.java index 24880041c..0d09e19ec 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputIdentity.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputIdentity.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.outputRow; +package com.gotocompany.dagger.core.processors.longbow.outputRow; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputSynchronizer.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputSynchronizer.java similarity index 76% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputSynchronizer.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputSynchronizer.java index bb835a6fb..ac021e2ac 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputSynchronizer.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputSynchronizer.java @@ -1,14 +1,13 @@ -package io.odpf.dagger.core.processors.longbow.outputRow; +package com.gotocompany.dagger.core.processors.longbow.outputRow; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.validator.LongbowType; +import com.gotocompany.dagger.core.processors.longbow.validator.LongbowType; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; import java.util.stream.IntStream; -import static io.odpf.dagger.core.utils.Constants.LONGBOW_OUTPUT_ADDITIONAL_ARITY; - /** * The Output synchronizer. */ @@ -32,7 +31,7 @@ public OutputSynchronizer(LongbowSchema longbowSchema, String tableId, String in @Override public Row get(Row input) { - int outputArity = input.getArity() + LONGBOW_OUTPUT_ADDITIONAL_ARITY; + int outputArity = input.getArity() + Constants.LONGBOW_OUTPUT_ADDITIONAL_ARITY; int inputArity = input.getArity(); Row output = new Row(outputArity); IntStream.range(0, inputArity).forEach(i -> output.setField(i, input.getField(i))); diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/ReaderOutputLongbowData.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/ReaderOutputLongbowData.java similarity index 86% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/ReaderOutputLongbowData.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/ReaderOutputLongbowData.java index 560ea08da..000919a2e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/ReaderOutputLongbowData.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/ReaderOutputLongbowData.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.longbow.outputRow; +package com.gotocompany.dagger.core.processors.longbow.outputRow; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/ReaderOutputProtoData.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/ReaderOutputProtoData.java similarity index 86% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/ReaderOutputProtoData.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/ReaderOutputProtoData.java index 6833e1627..3fcca44e3 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/ReaderOutputProtoData.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/ReaderOutputProtoData.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.longbow.outputRow; +package com.gotocompany.dagger.core.processors.longbow.outputRow; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/ReaderOutputRow.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/ReaderOutputRow.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/ReaderOutputRow.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/ReaderOutputRow.java index 455cd9ef8..36a07e38c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/ReaderOutputRow.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/ReaderOutputRow.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.outputRow; +package com.gotocompany.dagger.core.processors.longbow.outputRow; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/WriterOutputRow.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/WriterOutputRow.java similarity index 81% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/WriterOutputRow.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/WriterOutputRow.java index ee41f731e..f4d24711f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/outputRow/WriterOutputRow.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/outputRow/WriterOutputRow.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.outputRow; +package com.gotocompany.dagger.core.processors.longbow.outputRow; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowReader.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowReader.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowReader.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowReader.java index 381abf73a..3b2ea681e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowReader.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowReader.java @@ -1,25 +1,25 @@ -package io.odpf.dagger.core.processors.longbow.processor; - +package com.gotocompany.dagger.core.processors.longbow.processor; + +import com.gotocompany.dagger.core.metrics.aspects.LongbowReaderAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporterFactory; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryPublisher; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryTypes; +import com.gotocompany.dagger.core.processors.longbow.exceptions.LongbowReaderException; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.streaming.api.functions.async.RichAsyncFunction; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.aspects.LongbowReaderAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; -import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; -import io.odpf.dagger.core.metrics.telemetry.TelemetryTypes; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.data.LongbowData; -import io.odpf.dagger.core.processors.longbow.exceptions.LongbowReaderException; -import io.odpf.dagger.core.processors.longbow.outputRow.ReaderOutputRow; -import io.odpf.dagger.core.processors.longbow.range.LongbowRange; -import io.odpf.dagger.core.processors.longbow.request.ScanRequestFactory; -import io.odpf.dagger.core.processors.longbow.storage.LongbowStore; -import io.odpf.dagger.core.processors.longbow.storage.ScanRequest; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.data.LongbowData; +import com.gotocompany.dagger.core.processors.longbow.outputRow.ReaderOutputRow; +import com.gotocompany.dagger.core.processors.longbow.range.LongbowRange; +import com.gotocompany.dagger.core.processors.longbow.request.ScanRequestFactory; +import com.gotocompany.dagger.core.processors.longbow.storage.LongbowStore; +import com.gotocompany.dagger.core.processors.longbow.storage.ScanRequest; import org.apache.hadoop.hbase.client.Result; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowWriter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowWriter.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowWriter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowWriter.java index bd02ac9c9..e8a608c71 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowWriter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowWriter.java @@ -1,23 +1,23 @@ -package io.odpf.dagger.core.processors.longbow.processor; - +package com.gotocompany.dagger.core.processors.longbow.processor; + +import com.gotocompany.dagger.core.metrics.aspects.LongbowWriterAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporterFactory; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryPublisher; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryTypes; +import com.gotocompany.dagger.core.processors.longbow.exceptions.LongbowWriterException; +import com.gotocompany.dagger.core.processors.longbow.outputRow.WriterOutputRow; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.streaming.api.functions.async.RichAsyncFunction; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.aspects.LongbowWriterAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; -import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; -import io.odpf.dagger.core.metrics.telemetry.TelemetryTypes; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.exceptions.LongbowWriterException; -import io.odpf.dagger.core.processors.longbow.outputRow.WriterOutputRow; -import io.odpf.dagger.core.processors.longbow.request.PutRequestFactory; -import io.odpf.dagger.core.processors.longbow.storage.LongbowStore; -import io.odpf.dagger.core.processors.longbow.storage.PutRequest; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.request.PutRequestFactory; +import com.gotocompany.dagger.core.processors.longbow.storage.LongbowStore; +import com.gotocompany.dagger.core.processors.longbow.storage.PutRequest; import org.apache.hadoop.hbase.util.Bytes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowAbsoluteRange.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowAbsoluteRange.java similarity index 83% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowAbsoluteRange.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowAbsoluteRange.java index b0256ccd9..01476e38a 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowAbsoluteRange.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowAbsoluteRange.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.longbow.range; +package com.gotocompany.dagger.core.processors.longbow.range; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowDurationRange.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowDurationRange.java similarity index 81% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowDurationRange.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowDurationRange.java index fc0ba897d..6eb4d2ee1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowDurationRange.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowDurationRange.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.longbow.range; +package com.gotocompany.dagger.core.processors.longbow.range; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowRange.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowRange.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowRange.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowRange.java index 2ac113713..79b7397d1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowRange.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowRange.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.range; +package com.gotocompany.dagger.core.processors.longbow.range; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowRangeFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowRangeFactory.java similarity index 77% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowRangeFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowRangeFactory.java index 13c92379b..f06732a33 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/range/LongbowRangeFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowRangeFactory.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.longbow.range; +package com.gotocompany.dagger.core.processors.longbow.range; -import io.odpf.dagger.core.exception.DaggerConfigurationException; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; /** * The factor class for Longbow range. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequest.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoBytePutRequest.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequest.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoBytePutRequest.java index 71f7631c0..b76e52a06 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequest.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoBytePutRequest.java @@ -1,18 +1,18 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.core.processors.longbow.storage.PutRequest; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.types.Row; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.storage.PutRequest; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.util.Bytes; import java.sql.Timestamp; import java.time.LocalDateTime; -import static io.odpf.dagger.common.core.Constants.ROWTIME; +import static com.gotocompany.dagger.common.core.Constants.ROWTIME; /** * Create PutRequest in form of proto byte. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ProtoByteScanRequest.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoByteScanRequest.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ProtoByteScanRequest.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoByteScanRequest.java index 427683b90..0c475dffb 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ProtoByteScanRequest.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoByteScanRequest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; -import io.odpf.dagger.core.processors.longbow.storage.ScanRequest; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.storage.ScanRequest; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/PutRequestFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/PutRequestFactory.java similarity index 80% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/PutRequestFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/PutRequestFactory.java index 32f5e8d6c..7a129eefa 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/PutRequestFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/PutRequestFactory.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; import org.apache.flink.types.Row; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.storage.PutRequest; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.storage.PutRequest; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ScanRequestFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/ScanRequestFactory.java similarity index 79% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ScanRequestFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/ScanRequestFactory.java index b068323a3..cae40431a 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ScanRequestFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/ScanRequestFactory.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.range.LongbowRange; -import io.odpf.dagger.core.processors.longbow.storage.ScanRequest; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.storage.ScanRequest; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.range.LongbowRange; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/TablePutRequest.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/TablePutRequest.java similarity index 84% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/TablePutRequest.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/TablePutRequest.java index 47324e2d8..e8203c6a2 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/TablePutRequest.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/TablePutRequest.java @@ -1,17 +1,17 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.types.Row; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.storage.PutRequest; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.storage.PutRequest; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.util.Bytes; import java.sql.Timestamp; import java.time.LocalDateTime; -import static io.odpf.dagger.common.core.Constants.ROWTIME; +import static com.gotocompany.dagger.common.core.Constants.ROWTIME; /** * Create PutRequest in form of table. LONGBOW_KEY as range key, diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/TableScanRequest.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/TableScanRequest.java similarity index 84% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/TableScanRequest.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/TableScanRequest.java index feceda178..1ba069be9 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/TableScanRequest.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/request/TableScanRequest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.storage.ScanRequest; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.storage.ScanRequest; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/storage/LongbowStore.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/storage/LongbowStore.java similarity index 95% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/storage/LongbowStore.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/storage/LongbowStore.java index aca99ab92..59652386e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/storage/LongbowStore.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/storage/LongbowStore.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.processors.longbow.storage; +package com.gotocompany.dagger.core.processors.longbow.storage; import com.google.cloud.bigtable.admin.v2.BigtableTableAdminClient; import com.google.cloud.bigtable.admin.v2.models.CreateTableRequest; import com.google.cloud.bigtable.hbase.BigtableConfiguration; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.AdvancedScanResultConsumer; import org.apache.hadoop.hbase.client.AsyncTable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/storage/PutRequest.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/storage/PutRequest.java similarity index 81% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/storage/PutRequest.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/storage/PutRequest.java index 0e12eafea..49594b9dd 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/storage/PutRequest.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/storage/PutRequest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.storage; +package com.gotocompany.dagger.core.processors.longbow.storage; import org.apache.hadoop.hbase.client.Put; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/storage/ScanRequest.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/storage/ScanRequest.java similarity index 91% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/storage/ScanRequest.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/storage/ScanRequest.java index 717bc2bf7..87c5df764 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/storage/ScanRequest.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/storage/ScanRequest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.storage; +package com.gotocompany.dagger.core.processors.longbow.storage; import org.apache.hadoop.hbase.client.Scan; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/validator/LongbowType.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/validator/LongbowType.java similarity index 93% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/validator/LongbowType.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/validator/LongbowType.java index e341284c2..126475aef 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/validator/LongbowType.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/validator/LongbowType.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.longbow.validator; +package com.gotocompany.dagger.core.processors.longbow.validator; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.utils.Constants; -import static io.odpf.dagger.common.core.Constants.ROWTIME; +import static com.gotocompany.dagger.common.core.Constants.ROWTIME; /** * The enum Longbow type. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/validator/LongbowValidator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/validator/LongbowValidator.java similarity index 91% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/validator/LongbowValidator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/validator/LongbowValidator.java index 43e500137..78e341a43 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/validator/LongbowValidator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/longbow/validator/LongbowValidator.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.validator; +package com.gotocompany.dagger.core.processors.longbow.validator; -import io.odpf.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; import org.apache.commons.lang3.StringUtils; import java.util.Arrays; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/telemetry/TelemetryProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/telemetry/TelemetryProcessor.java similarity index 73% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/telemetry/TelemetryProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/telemetry/TelemetryProcessor.java index d77f1c358..19ccd9e83 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/telemetry/TelemetryProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/telemetry/TelemetryProcessor.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.telemetry; +package com.gotocompany.dagger.core.processors.telemetry; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.PostProcessorConfig; -import io.odpf.dagger.core.processors.types.PostProcessor; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.PostProcessorConfig; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.processors.types.PostProcessor; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/telemetry/processor/MetricsTelemetryExporter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/telemetry/processor/MetricsTelemetryExporter.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/telemetry/processor/MetricsTelemetryExporter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/telemetry/processor/MetricsTelemetryExporter.java index 73f5c97c7..0f293190a 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/telemetry/processor/MetricsTelemetryExporter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/telemetry/processor/MetricsTelemetryExporter.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.processors.telemetry.processor; +package com.gotocompany.dagger.core.processors.telemetry.processor; +import com.gotocompany.dagger.core.metrics.aspects.TelemetryAspects; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryPublisher; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; import org.apache.flink.api.common.functions.RichMapFunction; import org.apache.flink.configuration.Configuration; import org.apache.flink.types.Row; -import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; -import io.odpf.dagger.common.metrics.managers.GaugeStatsManager; -import io.odpf.dagger.core.metrics.aspects.TelemetryAspects; +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TableTransformConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TableTransformConfig.java similarity index 94% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TableTransformConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TableTransformConfig.java index aa80ea1c2..11f76b2d3 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TableTransformConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TableTransformConfig.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.transformers; +package com.gotocompany.dagger.core.processors.transformers; import java.util.ArrayList; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TransformConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TransformConfig.java similarity index 94% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TransformConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TransformConfig.java index 7ef9271c3..7eb6d9678 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TransformConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TransformConfig.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.transformers; +package com.gotocompany.dagger.core.processors.transformers; -import io.odpf.dagger.core.processors.types.Validator; +import com.gotocompany.dagger.core.processors.types.Validator; import java.io.Serializable; import java.util.HashMap; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TransformProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TransformProcessor.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TransformProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TransformProcessor.java index ba5cff334..acca01125 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TransformProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TransformProcessor.java @@ -1,16 +1,16 @@ -package io.odpf.dagger.core.processors.transformers; +package com.gotocompany.dagger.core.processors.transformers; -import io.odpf.dagger.common.core.DaggerContext; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.core.Transformer; -import io.odpf.dagger.core.exception.TransformClassNotDefinedException; -import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; -import io.odpf.dagger.core.metrics.telemetry.TelemetryTypes; -import io.odpf.dagger.core.processors.PostProcessorConfig; -import io.odpf.dagger.core.processors.PreProcessorConfig; -import io.odpf.dagger.core.processors.types.PostProcessor; -import io.odpf.dagger.core.processors.types.Preprocessor; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.Transformer; +import com.gotocompany.dagger.core.exception.TransformClassNotDefinedException; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryPublisher; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryTypes; +import com.gotocompany.dagger.core.processors.PostProcessorConfig; +import com.gotocompany.dagger.core.processors.PreProcessorConfig; +import com.gotocompany.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.core.processors.types.Preprocessor; +import com.gotocompany.dagger.core.utils.Constants; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; @@ -25,6 +25,7 @@ public class TransformProcessor implements Preprocessor, PostProcessor, TelemetryPublisher { protected final List transformConfigs; + /** * Gets table name. * diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TransformerUtils.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TransformerUtils.java similarity index 93% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TransformerUtils.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TransformerUtils.java index d2723ae8d..bb10c8409 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/transformers/TransformerUtils.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/transformers/TransformerUtils.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.transformers; +package com.gotocompany.dagger.core.processors.transformers; /** * The utils of the Transformer. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/FilterDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/FilterDecorator.java similarity index 88% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/types/FilterDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/FilterDecorator.java index 50780558f..384a21979 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/FilterDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/FilterDecorator.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.types; +package com.gotocompany.dagger.core.processors.types; import org.apache.flink.api.common.functions.FilterFunction; import org.apache.flink.streaming.api.datastream.DataStream; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/MapDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/MapDecorator.java similarity index 88% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/types/MapDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/MapDecorator.java index 7037cfb90..5e17a4d29 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/MapDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/MapDecorator.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.types; +package com.gotocompany.dagger.core.processors.types; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.streaming.api.datastream.DataStream; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/PostProcessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/PostProcessor.java similarity index 73% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/types/PostProcessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/PostProcessor.java index 32483e63b..552f2a37b 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/PostProcessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/PostProcessor.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.types; +package com.gotocompany.dagger.core.processors.types; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.PostProcessorConfig; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.PostProcessorConfig; /** * The interface Post processor. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/Preprocessor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/Preprocessor.java similarity index 73% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/types/Preprocessor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/Preprocessor.java index d77da7dd7..9e262567c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/Preprocessor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/Preprocessor.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.types; +package com.gotocompany.dagger.core.processors.types; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.PreProcessorConfig; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.PreProcessorConfig; /** * The interface Preprocessor. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/SourceConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/SourceConfig.java similarity index 93% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/types/SourceConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/SourceConfig.java index 6434d964b..e212b6531 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/SourceConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/SourceConfig.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.types; +package com.gotocompany.dagger.core.processors.types; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/StreamDecorator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/StreamDecorator.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/types/StreamDecorator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/StreamDecorator.java index 1283b54e6..f0cea7612 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/StreamDecorator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/StreamDecorator.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.types; +package com.gotocompany.dagger.core.processors.types; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/Validator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/Validator.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/processors/types/Validator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/Validator.java index 5d416b0d9..593b2cb21 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/types/Validator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/processors/types/Validator.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.types; +package com.gotocompany.dagger.core.processors.types; import org.apache.commons.lang3.StringUtils; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/SinkOrchestrator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/SinkOrchestrator.java similarity index 58% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/SinkOrchestrator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/SinkOrchestrator.java index fd14c5e80..05997e445 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/SinkOrchestrator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/SinkOrchestrator.java @@ -1,33 +1,36 @@ -package io.odpf.dagger.core.sink; - -import io.odpf.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; -import io.odpf.dagger.core.sink.bigquery.BigQuerySinkBuilder; +package com.gotocompany.dagger.core.sink; + +import com.gotocompany.dagger.core.enumeration.KafkaConnectorTypesMetadata; +import com.gotocompany.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryPublisher; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryTypes; +import com.gotocompany.dagger.core.sink.bigquery.BigQuerySinkBuilder; +import com.gotocompany.dagger.core.sink.influx.ErrorHandler; +import com.gotocompany.dagger.core.sink.influx.InfluxDBFactoryWrapper; +import com.gotocompany.dagger.core.sink.influx.InfluxDBSink; +import com.gotocompany.dagger.core.utils.KafkaConfigUtil; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.connector.sink.Sink; +import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.connector.base.DeliveryGuarantee; import org.apache.flink.connector.kafka.sink.KafkaSink; import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducerBase; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; -import io.odpf.dagger.core.metrics.telemetry.TelemetryTypes; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.sink.influx.ErrorHandler; -import io.odpf.dagger.core.sink.influx.InfluxDBFactoryWrapper; -import io.odpf.dagger.core.sink.influx.InfluxDBSink; -import io.odpf.dagger.core.sink.kafka.KafkaSerializationSchemaFactory; -import io.odpf.dagger.core.sink.kafka.KafkaSerializerBuilder; -import io.odpf.dagger.core.sink.log.LogSink; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.sink.kafka.KafkaSerializationSchemaFactory; +import com.gotocompany.dagger.core.sink.kafka.KafkaSerializerBuilder; +import com.gotocompany.dagger.core.sink.log.LogSink; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Properties; -import static io.odpf.dagger.core.utils.Constants.*; - /** * The Sink orchestrator. * Responsible for handling the sink type. @@ -55,7 +58,7 @@ public Sink getSink(Configuration configuration, String[] columnNames, StencilCl Sink sink; switch (sinkType) { case "kafka": - String outputBootStrapServers = configuration.getString(SINK_KAFKA_BROKERS_KEY, ""); + String outputBootStrapServers = configuration.getString(Constants.SINK_KAFKA_BROKERS_KEY, ""); KafkaSerializerBuilder serializationSchema = KafkaSerializationSchemaFactory .getSerializationSchema(configuration, stencilClientOrchestrator, columnNames); @@ -101,15 +104,30 @@ private void reportTelemetry(KafkaSerializerBuilder kafkaSchemaBuilder) { * @return the producer properties */ protected Properties getProducerProperties(Configuration configuration) { - String outputBrokerList = configuration.getString(SINK_KAFKA_BROKERS_KEY, ""); + String outputBrokerList = configuration.getString(Constants.SINK_KAFKA_BROKERS_KEY, ""); Properties kafkaProducerConfigs = FlinkKafkaProducerBase.getPropertiesFromBrokerList(outputBrokerList); - if (configuration.getBoolean(SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE_KEY, SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE_DEFAULT)) { - kafkaProducerConfigs.setProperty(SINK_KAFKA_COMPRESSION_TYPE_KEY, SINK_KAFKA_COMPRESSION_TYPE_DEFAULT); - kafkaProducerConfigs.setProperty(SINK_KAFKA_MAX_REQUEST_SIZE_KEY, SINK_KAFKA_MAX_REQUEST_SIZE_DEFAULT); + if (configuration.getBoolean(Constants.SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE_KEY, Constants.SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE_DEFAULT)) { + kafkaProducerConfigs.setProperty(Constants.SINK_KAFKA_COMPRESSION_TYPE_KEY, Constants.SINK_KAFKA_COMPRESSION_TYPE_DEFAULT); + kafkaProducerConfigs.setProperty(Constants.SINK_KAFKA_MAX_REQUEST_SIZE_KEY, Constants.SINK_KAFKA_MAX_REQUEST_SIZE_DEFAULT); } + String lingerMs = configuration.getString(Constants.SINK_KAFKA_LINGER_MS_KEY, Constants.SINK_KAFKA_LINGER_MS_DEFAULT); + validateLingerMs(lingerMs); + kafkaProducerConfigs.setProperty(Constants.SINK_KAFKA_LINGER_MS_CONFIG_KEY, lingerMs); + Properties dynamicProperties = KafkaConfigUtil.parseKafkaConfiguration(KafkaConnectorTypesMetadata.SINK, Optional.ofNullable(configuration.getParam()) + .map(ParameterTool::getProperties) + .orElseGet(Properties::new)); + kafkaProducerConfigs.putAll(dynamicProperties); return kafkaProducerConfigs; } + private void validateLingerMs(String lingerMs) { + try { + Integer.parseInt(lingerMs); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Provided value for Linger Ms : " + lingerMs + " is not a valid integer , Error: " + e.getMessage()); + } + } + @Override public Map> getTelemetry() { return metrics; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/bigquery/BigQuerySink.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySink.java similarity index 80% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/bigquery/BigQuerySink.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySink.java index bf8fb23dd..37d8cc0e5 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/bigquery/BigQuerySink.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySink.java @@ -1,16 +1,15 @@ -package io.odpf.dagger.core.sink.bigquery; +package com.gotocompany.dagger.core.sink.bigquery; import com.google.common.base.Splitter; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; -import io.odpf.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; -import io.odpf.dagger.core.utils.Constants; -import io.odpf.depot.OdpfSink; -import io.odpf.depot.bigquery.BigQuerySinkFactory; -import io.odpf.depot.config.BigQuerySinkConfig; -import io.odpf.depot.error.ErrorType; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporterFactory; +import com.gotocompany.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.depot.bigquery.BigQuerySinkFactory; +import com.gotocompany.depot.config.BigQuerySinkConfig; +import com.gotocompany.depot.error.ErrorType; import org.aeonbits.owner.ConfigFactory; import org.apache.flink.api.connector.sink.Committer; import org.apache.flink.api.connector.sink.GlobalCommitter; @@ -58,7 +57,7 @@ public SinkWriter createWriter(InitContext context, List throw e; } } - OdpfSink odpfSink = sinkFactory.create(); + com.gotocompany.depot.Sink sink = sinkFactory.create(); int batchSize = configuration.getInteger( Constants.SINK_BIGQUERY_BATCH_SIZE, Constants.SINK_BIGQUERY_BATCH_SIZE_DEFAULT); @@ -69,7 +68,7 @@ public SinkWriter createWriter(InitContext context, List for (String s : Splitter.on(",").omitEmptyStrings().split(errorsForFailing)) { errorTypesForFailing.add(ErrorType.valueOf(s.trim())); } - return new BigQuerySinkWriter(protoSerializer, odpfSink, batchSize, errorReporter, errorTypesForFailing); + return new BigQuerySinkWriter(protoSerializer, sink, batchSize, errorReporter, errorTypesForFailing); } @Override diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkBuilder.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkBuilder.java similarity index 86% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkBuilder.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkBuilder.java index c403e3340..2ae1322f3 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkBuilder.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkBuilder.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.sink.bigquery; +package com.gotocompany.dagger.core.sink.bigquery; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; -import io.odpf.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; import org.apache.flink.api.java.utils.ParameterTool; import java.util.HashMap; @@ -44,6 +44,7 @@ private Configuration setDefaultValues(Configuration inputConf) { configMap.put("SCHEMA_REGISTRY_STENCIL_FETCH_HEADERS", ""); configMap.put("SINK_METRICS_APPLICATION_PREFIX", "dagger_"); configMap.put("SINK_BIGQUERY_ROW_INSERT_ID_ENABLE", "false"); + configMap.put("SINK_BIGQUERY_STORAGE_API_ENABLE", "true"); return new Configuration(ParameterTool.fromMap(configMap)); } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkWriter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkWriter.java similarity index 71% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkWriter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkWriter.java index 471590d80..5e64a3f93 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkWriter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkWriter.java @@ -1,37 +1,38 @@ -package io.odpf.dagger.core.sink.bigquery; +package com.gotocompany.dagger.core.sink.bigquery; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; -import io.odpf.dagger.core.exception.BigQueryWriterException; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.depot.OdpfSink; -import io.odpf.depot.OdpfSinkResponse; -import io.odpf.depot.error.ErrorInfo; -import io.odpf.depot.error.ErrorType; -import io.odpf.depot.exception.OdpfSinkException; -import io.odpf.depot.message.OdpfMessage; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.core.exception.BigQueryWriterException; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.depot.Sink; +import com.gotocompany.depot.SinkResponse; +import com.gotocompany.depot.error.ErrorInfo; +import com.gotocompany.depot.error.ErrorType; +import com.gotocompany.depot.exception.SinkException; +import com.gotocompany.depot.message.Message; import lombok.extern.slf4j.Slf4j; import org.apache.flink.api.connector.sink.SinkWriter; import org.apache.flink.types.Row; import java.io.IOException; + import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.Collections; import java.util.stream.Collectors; @Slf4j public class BigQuerySinkWriter implements SinkWriter { private final ProtoSerializer protoSerializer; - private final OdpfSink bigquerySink; + private final Sink bigquerySink; private final int batchSize; private final ErrorReporter errorReporter; private final Set errorTypesForFailing; - private final List messages = new ArrayList<>(); + private final List messages = new ArrayList<>(); private int currentBatchSize; - public BigQuerySinkWriter(ProtoSerializer protoSerializer, OdpfSink bigquerySink, int batchSize, ErrorReporter errorReporter, Set errorTypesForFailing) { + public BigQuerySinkWriter(ProtoSerializer protoSerializer, Sink bigquerySink, int batchSize, ErrorReporter errorReporter, Set errorTypesForFailing) { this.protoSerializer = protoSerializer; this.bigquerySink = bigquerySink; this.batchSize = batchSize; @@ -44,7 +45,7 @@ public void write(Row element, Context context) throws IOException { log.info("adding row to BQ batch : " + element); byte[] key = protoSerializer.serializeKey(element); byte[] value = protoSerializer.serializeValue(element); - OdpfMessage message = new OdpfMessage(key, value); + Message message = new Message(key, value); if (currentBatchSize < batchSize) { messages.add(message); currentBatchSize++; @@ -56,22 +57,22 @@ public void write(Row element, Context context) throws IOException { } } - private void pushToBq() throws OdpfSinkException, BigQueryWriterException { + private void pushToBq() throws SinkException, BigQueryWriterException { log.info("Pushing " + currentBatchSize + " records to bq"); - OdpfSinkResponse odpfSinkResponse; + SinkResponse sinkResponse; try { - odpfSinkResponse = bigquerySink.pushToSink(messages); + sinkResponse = bigquerySink.pushToSink(messages); } catch (Exception e) { errorReporter.reportFatalException(e); throw e; } - if (odpfSinkResponse.hasErrors()) { - logErrors(odpfSinkResponse, messages); - checkAndThrow(odpfSinkResponse); + if (sinkResponse.hasErrors()) { + logErrors(sinkResponse, messages); + checkAndThrow(sinkResponse); } } - protected void checkAndThrow(OdpfSinkResponse sinkResponse) throws BigQueryWriterException { + protected void checkAndThrow(SinkResponse sinkResponse) throws BigQueryWriterException { Map> failedErrorTypes = sinkResponse.getErrors().values().stream().collect( Collectors.partitioningBy(errorInfo -> errorTypesForFailing.contains(errorInfo.getErrorType()))); failedErrorTypes.get(Boolean.FALSE).forEach(errorInfo -> { @@ -85,10 +86,10 @@ protected void checkAndThrow(OdpfSinkResponse sinkResponse) throws BigQueryWrite } } - protected void logErrors(OdpfSinkResponse sinkResponse, List sentMessages) { + protected void logErrors(SinkResponse sinkResponse, List sentMessages) { log.error("Failed to push " + sinkResponse.getErrors().size() + " records to BigQuerySink"); sinkResponse.getErrors().forEach((index, errorInfo) -> { - OdpfMessage message = sentMessages.get(index.intValue()); + Message message = sentMessages.get(index.intValue()); log.error("Failed to pushed message with metadata {}. The exception was {}. The ErrorType was {}", message.getMetadataString(), errorInfo.getException().getMessage(), diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/ErrorHandler.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/ErrorHandler.java similarity index 78% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/ErrorHandler.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/ErrorHandler.java index 6064328d7..4f00d1964 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/ErrorHandler.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/ErrorHandler.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.core.sink.influx; +package com.gotocompany.dagger.core.sink.influx; import org.apache.flink.api.connector.sink.Sink.InitContext; -import io.odpf.dagger.core.sink.influx.errors.InfluxError; -import io.odpf.dagger.core.sink.influx.errors.LateRecordDropError; -import io.odpf.dagger.core.sink.influx.errors.NoError; -import io.odpf.dagger.core.sink.influx.errors.ValidError; -import io.odpf.dagger.core.sink.influx.errors.ValidException; +import com.gotocompany.dagger.core.sink.influx.errors.InfluxError; +import com.gotocompany.dagger.core.sink.influx.errors.LateRecordDropError; +import com.gotocompany.dagger.core.sink.influx.errors.NoError; +import com.gotocompany.dagger.core.sink.influx.errors.ValidError; +import com.gotocompany.dagger.core.sink.influx.errors.ValidException; import org.influxdb.dto.Point; import java.io.Serializable; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBFactoryWrapper.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/InfluxDBFactoryWrapper.java similarity index 91% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBFactoryWrapper.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/InfluxDBFactoryWrapper.java index a8bf0eea0..c13ea9a90 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBFactoryWrapper.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/InfluxDBFactoryWrapper.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.sink.influx; +package com.gotocompany.dagger.core.sink.influx; import org.influxdb.InfluxDB; import org.influxdb.InfluxDBFactory; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBSink.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/InfluxDBSink.java similarity index 73% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBSink.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/InfluxDBSink.java index 3071a187b..9d961fa9d 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBSink.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/InfluxDBSink.java @@ -1,5 +1,8 @@ -package io.odpf.dagger.core.sink.influx; +package com.gotocompany.dagger.core.sink.influx; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporterFactory; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.connector.sink.Committer; import org.apache.flink.api.connector.sink.GlobalCommitter; import org.apache.flink.api.connector.sink.Sink; @@ -7,9 +10,7 @@ import org.apache.flink.core.io.SimpleVersionedSerializer; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; +import com.gotocompany.dagger.common.configuration.Configuration; import org.influxdb.InfluxDB; import java.io.IOException; @@ -18,8 +19,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import static io.odpf.dagger.core.utils.Constants.*; - public class InfluxDBSink implements Sink { private InfluxDBFactoryWrapper influxDBFactory; private Configuration configuration; @@ -36,12 +35,12 @@ public InfluxDBSink(InfluxDBFactoryWrapper influxDBFactory, Configuration config @Override public SinkWriter createWriter(InitContext context, List states) throws IOException { - InfluxDB influxDB = influxDBFactory.connect(configuration.getString(SINK_INFLUX_URL_KEY, SINK_INFLUX_URL_DEFAULT), - configuration.getString(SINK_INFLUX_USERNAME_KEY, SINK_INFLUX_USERNAME_DEFAULT), - configuration.getString(SINK_INFLUX_PASSWORD_KEY, SINK_INFLUX_PASSWORD_DEFAULT)); + InfluxDB influxDB = influxDBFactory.connect(configuration.getString(Constants.SINK_INFLUX_URL_KEY, Constants.SINK_INFLUX_URL_DEFAULT), + configuration.getString(Constants.SINK_INFLUX_USERNAME_KEY, Constants.SINK_INFLUX_USERNAME_DEFAULT), + configuration.getString(Constants.SINK_INFLUX_PASSWORD_KEY, Constants.SINK_INFLUX_PASSWORD_DEFAULT)); errorHandler.init(context); - influxDB.enableBatch(configuration.getInteger(SINK_INFLUX_BATCH_SIZE_KEY, SINK_INFLUX_BATCH_SIZE_DEFAULT), - configuration.getInteger(SINK_INFLUX_FLUSH_DURATION_MS_KEY, SINK_INFLUX_FLUSH_DURATION_MS_DEFAULT), + influxDB.enableBatch(configuration.getInteger(Constants.SINK_INFLUX_BATCH_SIZE_KEY, Constants.SINK_INFLUX_BATCH_SIZE_DEFAULT), + configuration.getInteger(Constants.SINK_INFLUX_FLUSH_DURATION_MS_KEY, Constants.SINK_INFLUX_FLUSH_DURATION_MS_DEFAULT), TimeUnit.MILLISECONDS, Executors.defaultThreadFactory(), errorHandler.getExceptionHandler()); if (errorReporter == null) { errorReporter = ErrorReporterFactory.getErrorReporter(context.metricGroup(), configuration); diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBWriter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/InfluxDBWriter.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBWriter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/InfluxDBWriter.java index 691fbd83c..49b5e6b60 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBWriter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/InfluxDBWriter.java @@ -1,11 +1,12 @@ -package io.odpf.dagger.core.sink.influx; +package com.gotocompany.dagger.core.sink.influx; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.connector.sink.SinkWriter; import org.apache.flink.types.Row; import com.google.common.base.Strings; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.common.configuration.Configuration; import org.influxdb.InfluxDB; import org.influxdb.dto.Point; import org.influxdb.dto.Point.Builder; @@ -22,8 +23,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static io.odpf.dagger.core.utils.Constants.*; - public class InfluxDBWriter implements SinkWriter { private static final Logger LOGGER = LoggerFactory.getLogger(InfluxDBWriter.class.getName()); private final String databaseName; @@ -35,9 +34,9 @@ public class InfluxDBWriter implements SinkWriter { private ErrorReporter errorReporter; public InfluxDBWriter(Configuration configuration, InfluxDB influxDB, String[] columnNames, ErrorHandler errorHandler, ErrorReporter errorReporter) { - databaseName = configuration.getString(SINK_INFLUX_DB_NAME_KEY, SINK_INFLUX_DB_NAME_DEFAULT); - retentionPolicy = configuration.getString(SINK_INFLUX_RETENTION_POLICY_KEY, SINK_INFLUX_RETENTION_POLICY_DEFAULT); - measurementName = configuration.getString(SINK_INFLUX_MEASUREMENT_NAME_KEY, SINK_INFLUX_MEASUREMENT_NAME_DEFAULT); + databaseName = configuration.getString(Constants.SINK_INFLUX_DB_NAME_KEY, Constants.SINK_INFLUX_DB_NAME_DEFAULT); + retentionPolicy = configuration.getString(Constants.SINK_INFLUX_RETENTION_POLICY_KEY, Constants.SINK_INFLUX_RETENTION_POLICY_DEFAULT); + measurementName = configuration.getString(Constants.SINK_INFLUX_MEASUREMENT_NAME_KEY, Constants.SINK_INFLUX_MEASUREMENT_NAME_DEFAULT); this.influxDB = influxDB; this.columnNames = columnNames; this.errorHandler = errorHandler; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/InfluxError.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/InfluxError.java similarity index 94% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/InfluxError.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/InfluxError.java index 298842900..289fd1017 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/InfluxError.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/InfluxError.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.sink.influx.errors; +package com.gotocompany.dagger.core.sink.influx.errors; import org.influxdb.dto.Point; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropError.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/LateRecordDropError.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropError.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/LateRecordDropError.java index aa021311f..86adf8b1f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropError.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/LateRecordDropError.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.sink.influx.errors; +package com.gotocompany.dagger.core.sink.influx.errors; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.ErrorStatsReporter; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.connector.sink.Sink.InitContext; import org.apache.flink.metrics.Counter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorStatsReporter; -import io.odpf.dagger.core.utils.Constants; import org.influxdb.InfluxDBException; import org.influxdb.dto.Point; import org.slf4j.Logger; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/NoError.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/NoError.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/NoError.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/NoError.java index d78a7927c..875fc9832 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/NoError.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/NoError.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.sink.influx.errors; +package com.gotocompany.dagger.core.sink.influx.errors; import org.influxdb.dto.Point; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidError.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/ValidError.java similarity index 81% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidError.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/ValidError.java index eaa5ef91a..05f5b8e77 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidError.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/ValidError.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.sink.influx.errors; +package com.gotocompany.dagger.core.sink.influx.errors; -import io.odpf.dagger.core.exception.InfluxWriteException; -import io.odpf.dagger.core.sink.influx.InfluxDBSink; +import com.gotocompany.dagger.core.exception.InfluxWriteException; +import com.gotocompany.dagger.core.sink.influx.InfluxDBSink; import org.influxdb.dto.Point; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidException.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/ValidException.java similarity index 86% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidException.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/ValidException.java index db0474c1b..3c8bed822 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidException.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/influx/errors/ValidException.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.sink.influx.errors; +package com.gotocompany.dagger.core.sink.influx.errors; -import io.odpf.dagger.core.exception.InfluxWriteException; +import com.gotocompany.dagger.core.exception.InfluxWriteException; import org.influxdb.dto.Point; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/KafkaSerializationSchemaFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/KafkaSerializationSchemaFactory.java similarity index 55% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/KafkaSerializationSchemaFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/KafkaSerializationSchemaFactory.java index a754329d0..6462800d0 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/KafkaSerializationSchemaFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/KafkaSerializationSchemaFactory.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.sink.kafka; +package com.gotocompany.dagger.core.sink.kafka; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.DataTypes; -import io.odpf.dagger.core.sink.kafka.builder.KafkaJsonSerializerBuilder; -import io.odpf.dagger.core.sink.kafka.builder.KafkaProtoSerializerBuilder; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.DataTypes; +import com.gotocompany.dagger.core.sink.kafka.builder.KafkaJsonSerializerBuilder; +import com.gotocompany.dagger.core.sink.kafka.builder.KafkaProtoSerializerBuilder; +import com.gotocompany.dagger.core.utils.Constants; public class KafkaSerializationSchemaFactory { public static KafkaSerializerBuilder getSerializationSchema(Configuration configuration, StencilClientOrchestrator stencilClientOrchestrator, String[] columnNames) { diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/KafkaSerializerBuilder.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/KafkaSerializerBuilder.java similarity index 77% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/KafkaSerializerBuilder.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/KafkaSerializerBuilder.java index e1cdbc064..eab94a916 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/KafkaSerializerBuilder.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/KafkaSerializerBuilder.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.sink.kafka; +package com.gotocompany.dagger.core.sink.kafka; import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilder.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilder.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilder.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilder.java index 4d1973c54..73e5399cd 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilder.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilder.java @@ -1,17 +1,17 @@ -package io.odpf.dagger.core.sink.kafka.builder; +package com.gotocompany.dagger.core.sink.kafka.builder; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryPublisher; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryTypes; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema; import org.apache.flink.formats.json.JsonRowSchemaConverter; import org.apache.flink.formats.json.JsonRowSerializationSchema; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.exceptions.serde.InvalidJSONSchemaException; -import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; -import io.odpf.dagger.core.metrics.telemetry.TelemetryTypes; -import io.odpf.dagger.core.sink.kafka.KafkaSerializerBuilder; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.exceptions.serde.InvalidJSONSchemaException; +import com.gotocompany.dagger.core.sink.kafka.KafkaSerializerBuilder; import java.util.ArrayList; import java.util.HashMap; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilder.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilder.java similarity index 75% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilder.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilder.java index a48d2a17d..b97cf7e71 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilder.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilder.java @@ -1,15 +1,15 @@ -package io.odpf.dagger.core.sink.kafka.builder; +package com.gotocompany.dagger.core.sink.kafka.builder; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryPublisher; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryTypes; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.proto.serialization.KafkaProtoSerializer; -import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; -import io.odpf.dagger.core.metrics.telemetry.TelemetryTypes; -import io.odpf.dagger.core.sink.kafka.KafkaSerializerBuilder; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.proto.serialization.KafkaProtoSerializer; +import com.gotocompany.dagger.core.sink.kafka.KafkaSerializerBuilder; import java.util.ArrayList; import java.util.HashMap; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSink.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/log/LogSink.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSink.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/log/LogSink.java index d2bb3854d..60a5c5537 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSink.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/log/LogSink.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.sink.log; +package com.gotocompany.dagger.core.sink.log; import org.apache.flink.api.connector.sink.Committer; import org.apache.flink.api.connector.sink.GlobalCommitter; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSinkWriter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/log/LogSinkWriter.java similarity index 95% rename from dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSinkWriter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/sink/log/LogSinkWriter.java index bd358ca52..c643e4dcc 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSinkWriter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/sink/log/LogSinkWriter.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.sink.log; +package com.gotocompany.dagger.core.sink.log; import org.apache.flink.api.connector.sink.SinkWriter; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/DaggerSource.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/DaggerSource.java similarity index 89% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/DaggerSource.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/DaggerSource.java index 885385330..13980f761 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/DaggerSource.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/DaggerSource.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source; +package com.gotocompany.dagger.core.source; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.streaming.api.datastream.DataStream; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/DaggerSourceFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/DaggerSourceFactory.java similarity index 73% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/DaggerSourceFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/DaggerSourceFactory.java index b99735342..c940b4e71 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/DaggerSourceFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/DaggerSourceFactory.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.core.source; +package com.gotocompany.dagger.core.source; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.core.exception.InvalidDaggerSourceException; -import io.odpf.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.dagger.core.source.flinkkafkaconsumer.FlinkKafkaConsumerDaggerSource; -import io.odpf.dagger.core.source.kafka.KafkaDaggerSource; -import io.odpf.dagger.core.source.parquet.ParquetDaggerSource; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.exception.InvalidDaggerSourceException; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.flinkkafkaconsumer.FlinkKafkaConsumerDaggerSource; +import com.gotocompany.dagger.core.source.kafka.KafkaDaggerSource; +import com.gotocompany.dagger.core.source.parquet.ParquetDaggerSource; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; import org.apache.flink.types.Row; import java.util.Arrays; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/Stream.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/Stream.java similarity index 80% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/Stream.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/Stream.java index 3f5266b0b..78cbd2459 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/Stream.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/Stream.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.source; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.core.deserializer.DaggerDeserializerFactory; -import io.odpf.dagger.core.source.config.StreamConfig; +package com.gotocompany.dagger.core.source; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.core.deserializer.DaggerDeserializerFactory; import lombok.Getter; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.streaming.api.datastream.DataStream; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/StreamsFactory.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/StreamsFactory.java similarity index 66% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/StreamsFactory.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/StreamsFactory.java index 034e92c09..9b6b8daf3 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/StreamsFactory.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/StreamsFactory.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.source; +package com.gotocompany.dagger.core.source; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.source.config.StreamConfig; import java.util.ArrayList; import java.util.List; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/StreamConfig.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/StreamConfig.java similarity index 62% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/config/StreamConfig.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/StreamConfig.java index a44dc5e0a..6ade4a723 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/StreamConfig.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/StreamConfig.java @@ -1,37 +1,43 @@ -package io.odpf.dagger.core.source.config; +package com.gotocompany.dagger.core.source.config; import com.google.gson.annotations.JsonAdapter; -import io.odpf.dagger.core.source.config.adapter.DaggerSASLMechanismAdaptor; -import io.odpf.dagger.core.source.config.adapter.DaggerSecurityProtocolAdaptor; -import io.odpf.dagger.core.source.config.adapter.FileDateRangeAdaptor; -import io.odpf.dagger.core.source.config.adapter.SourceParquetFilePathsAdapter; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.config.models.TimeRangePool; -import io.odpf.dagger.core.source.parquet.SourceParquetReadOrderStrategy; -import io.odpf.dagger.core.source.parquet.SourceParquetSchemaMatchStrategy; +import com.gotocompany.dagger.core.enumeration.KafkaConnectorTypesMetadata; +import com.gotocompany.dagger.core.source.config.adapter.DaggerKafkaConsumerAdditionalConfigurationsAdaptor; +import com.gotocompany.dagger.core.source.config.adapter.DaggerSASLMechanismAdaptor; +import com.gotocompany.dagger.core.source.config.adapter.DaggerSSLKeyStoreFileTypeAdaptor; +import com.gotocompany.dagger.core.source.config.adapter.DaggerSSLProtocolAdaptor; +import com.gotocompany.dagger.core.source.config.adapter.DaggerSSLTrustStoreFileTypeAdaptor; +import com.gotocompany.dagger.core.source.config.adapter.DaggerSecurityProtocolAdaptor; +import com.gotocompany.dagger.core.source.config.adapter.FileDateRangeAdaptor; +import com.gotocompany.dagger.core.source.config.adapter.SourceParquetFilePathsAdapter; +import com.gotocompany.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.core.source.parquet.SourceParquetReadOrderStrategy; +import com.gotocompany.dagger.core.source.parquet.SourceParquetSchemaMatchStrategy; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.TimeRangePool; +import com.gotocompany.dagger.core.utils.KafkaConfigUtil; import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.annotations.SerializedName; import com.google.gson.stream.JsonReader; -import io.odpf.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.configuration.Configuration; import lombok.Getter; import org.apache.kafka.clients.consumer.OffsetResetStrategy; import java.io.StringReader; import java.util.Map; +import java.util.Objects; import java.util.Properties; import java.util.regex.Pattern; import java.util.stream.Stream; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_PROTO_CLASS; -import static io.odpf.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_TABLE; -import static io.odpf.dagger.core.utils.Constants.*; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_FILE_DATE_RANGE_KEY; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_PROTO_CLASS; +import static com.gotocompany.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_TABLE; +import static com.gotocompany.dagger.core.utils.Constants.*; public class StreamConfig { private static final Gson GSON = new GsonBuilder() @@ -41,6 +47,41 @@ public class StreamConfig { private static final String KAFKA_PREFIX = "source_kafka_consumer_config_"; + @SerializedName(SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEY_PASSWORD_KEY) + @Getter + private String sslKeyPassword; + + @SerializedName(SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION_KEY) + @Getter + private String sslKeystoreLocation; + + @SerializedName(SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_PASSWORD_KEY) + @Getter + private String sslKeystorePassword; + + @SerializedName(SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE_KEY) + @Getter + @JsonAdapter(value = DaggerSSLKeyStoreFileTypeAdaptor.class) + private String sslKeystoreType; + + @SerializedName(SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL_KEY) + @Getter + @JsonAdapter(value = DaggerSSLProtocolAdaptor.class) + private String sslProtocol; + + @SerializedName(SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_LOCATION_KEY) + @Getter + private String sslTruststoreLocation; + + @SerializedName(SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_PASSWORD_KEY) + @Getter + private String sslTruststorePassword; + + @SerializedName(SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE_KEY) + @Getter + @JsonAdapter(value = DaggerSSLTrustStoreFileTypeAdaptor.class) + private String sslTruststoreType; + @SerializedName(STREAM_SOURCE_KAFKA_TOPIC_NAMES_KEY) @Getter private String kafkaTopicNames; @@ -116,6 +157,11 @@ public class StreamConfig { @Getter private SourceParquetSchemaMatchStrategy parquetSchemaMatchStrategy; + @SerializedName(SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS) + @JsonAdapter(value = DaggerKafkaConsumerAdditionalConfigurationsAdaptor.class) + @Getter + private Map additionalConsumerConfigurations; + @SerializedName(STREAM_SOURCE_PARQUET_FILE_DATE_RANGE_KEY) @JsonAdapter(FileDateRangeAdaptor.class) @Getter @@ -170,7 +216,7 @@ public Properties getKafkaProps(Configuration configuration) { .stream() .filter(e -> e.getKey().toLowerCase().startsWith(KAFKA_PREFIX)) .forEach(e -> kafkaProps.setProperty(parseVarName(e.getKey(), KAFKA_PREFIX), e.getValue())); - setAdditionalConfigs(kafkaProps, configuration); + setAdditionalKafkaConsumerConfigs(kafkaProps, configuration); return kafkaProps; } @@ -179,10 +225,15 @@ private String parseVarName(String varName, String kafkaPrefix) { return String.join(".", names); } - private void setAdditionalConfigs(Properties kafkaProps, Configuration configuration) { + private void setAdditionalKafkaConsumerConfigs(Properties kafkaProps, Configuration configuration) { if (configuration.getBoolean(SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_KEY, SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_DEFAULT)) { kafkaProps.setProperty(SOURCE_KAFKA_MAX_PARTITION_FETCH_BYTES_KEY, SOURCE_KAFKA_MAX_PARTITION_FETCH_BYTES_DEFAULT); } + if (Objects.nonNull(this.additionalConsumerConfigurations)) { + Properties additionalKafkaProperties = new Properties(); + additionalKafkaProperties.putAll(this.additionalConsumerConfigurations); + kafkaProps.putAll(KafkaConfigUtil.parseKafkaConfiguration(KafkaConnectorTypesMetadata.SOURCE, additionalKafkaProperties)); + } } public Pattern getTopicPattern() { diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/StreamConfigValidator.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/StreamConfigValidator.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/config/StreamConfigValidator.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/StreamConfigValidator.java index c9c63b57e..d48d9ad97 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/StreamConfigValidator.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/StreamConfigValidator.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.core.source.config; +package com.gotocompany.dagger.core.source.config; import com.google.common.base.Preconditions; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; import java.util.Arrays; import java.util.stream.Stream; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_KEY; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_FILE_PATHS_KEY; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_KEY; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_FILE_PATHS_KEY; public class StreamConfigValidator { public static StreamConfig validateSourceDetails(StreamConfig streamConfig) { diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerKafkaConsumerAdditionalConfigurationsAdaptor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerKafkaConsumerAdditionalConfigurationsAdaptor.java new file mode 100644 index 000000000..15b7f83e9 --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerKafkaConsumerAdditionalConfigurationsAdaptor.java @@ -0,0 +1,40 @@ +package com.gotocompany.dagger.core.source.config.adapter; + +import com.google.gson.Gson; +import com.google.gson.TypeAdapter; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonWriter; +import com.gotocompany.dagger.core.enumeration.KafkaConnectorTypesMetadata; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class DaggerKafkaConsumerAdditionalConfigurationsAdaptor extends TypeAdapter> { + + @Override + public void write(JsonWriter jsonWriter, Map stringStringMap) throws IOException { + Gson gson = new Gson(); + jsonWriter.jsonValue(gson.toJson(stringStringMap)); + } + + @Override + public Map read(JsonReader jsonReader) throws IOException { + Gson gson = new Gson(); + Map map = gson.fromJson(jsonReader, Map.class); + List invalidProps = map.keySet().stream() + .filter(key -> !KafkaConnectorTypesMetadata.SOURCE.getConfigurationPattern() + .matcher(key) + .matches()) + .collect(Collectors.toList()); + if (!invalidProps.isEmpty()) { + throw new IllegalArgumentException("Invalid additional kafka consumer configuration properties found: " + invalidProps); + } + return map.entrySet() + .stream() + .filter(entry -> entry.getValue() != null) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptor.java similarity index 84% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptor.java index 28410fbcb..97e6edd11 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptor.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.source.config.adapter; +package com.gotocompany.dagger.core.source.config.adapter; import com.google.gson.TypeAdapter; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.utils.Constants; import java.io.IOException; import java.util.Arrays; diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLKeyStoreFileTypeAdaptor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLKeyStoreFileTypeAdaptor.java new file mode 100644 index 000000000..4d227a656 --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLKeyStoreFileTypeAdaptor.java @@ -0,0 +1,31 @@ +package com.gotocompany.dagger.core.source.config.adapter; + +import com.google.gson.TypeAdapter; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonWriter; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.utils.Constants; + +import java.io.IOException; +import java.util.Arrays; + +public class DaggerSSLKeyStoreFileTypeAdaptor extends TypeAdapter { + @Override + public void write(JsonWriter jsonWriter, String value) throws IOException { + if (value == null) { + jsonWriter.nullValue(); + return; + } + jsonWriter.value(value); + } + + @Override + public String read(JsonReader jsonReader) throws IOException { + String keyStoreFileType = jsonReader.nextString(); + if (Arrays.stream(Constants.SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SSL_STORE_FILE_TYPE).anyMatch(keyStoreFileType::equals)) { + return keyStoreFileType; + } else { + throw new InvalidConfigurationException(String.format("Configured wrong SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE_KEY supported values are %s", Arrays.toString(Constants.SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SSL_STORE_FILE_TYPE))); + } + } +} diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLProtocolAdaptor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLProtocolAdaptor.java new file mode 100644 index 000000000..737e4e94a --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLProtocolAdaptor.java @@ -0,0 +1,31 @@ +package com.gotocompany.dagger.core.source.config.adapter; + +import com.google.gson.TypeAdapter; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonWriter; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.utils.Constants; + +import java.io.IOException; +import java.util.Arrays; + +public class DaggerSSLProtocolAdaptor extends TypeAdapter { + @Override + public void write(JsonWriter jsonWriter, String value) throws IOException { + if (value == null) { + jsonWriter.nullValue(); + return; + } + jsonWriter.value(value); + } + + @Override + public String read(JsonReader jsonReader) throws IOException { + String sslProtocol = jsonReader.nextString(); + if (Arrays.stream(Constants.SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL).anyMatch(sslProtocol::equals)) { + return sslProtocol; + } else { + throw new InvalidConfigurationException(String.format("Configured wrong SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL supported values are %s", Arrays.toString(Constants.SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL))); + } + } +} diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLTrustStoreFileTypeAdaptor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLTrustStoreFileTypeAdaptor.java new file mode 100644 index 000000000..ac1ef004f --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLTrustStoreFileTypeAdaptor.java @@ -0,0 +1,31 @@ +package com.gotocompany.dagger.core.source.config.adapter; + +import com.google.gson.TypeAdapter; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonWriter; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.utils.Constants; + +import java.io.IOException; +import java.util.Arrays; + +public class DaggerSSLTrustStoreFileTypeAdaptor extends TypeAdapter { + @Override + public void write(JsonWriter jsonWriter, String value) throws IOException { + if (value == null) { + jsonWriter.nullValue(); + return; + } + jsonWriter.value(value); + } + + @Override + public String read(JsonReader jsonReader) throws IOException { + String trustStoreFileType = jsonReader.nextString(); + if (Arrays.stream(Constants.SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SSL_STORE_FILE_TYPE).anyMatch(trustStoreFileType::equals)) { + return trustStoreFileType; + } else { + throw new InvalidConfigurationException(String.format("Configured wrong SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE_KEY supported values are %s", Arrays.toString(Constants.SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SSL_STORE_FILE_TYPE))); + } + } +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptor.java similarity index 85% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptor.java index c4593ade2..7f0351a41 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptor.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.source.config.adapter; +package com.gotocompany.dagger.core.source.config.adapter; import com.google.gson.TypeAdapter; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.utils.Constants; import java.io.IOException; import java.util.Arrays; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/FileDateRangeAdaptor.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/FileDateRangeAdaptor.java similarity index 91% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/FileDateRangeAdaptor.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/FileDateRangeAdaptor.java index c189b97f2..147dd2624 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/FileDateRangeAdaptor.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/FileDateRangeAdaptor.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.source.config.adapter; +package com.gotocompany.dagger.core.source.config.adapter; import com.google.gson.TypeAdapter; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; -import io.odpf.dagger.core.exception.InvalidTimeRangeException; -import io.odpf.dagger.core.source.config.models.TimeRange; -import io.odpf.dagger.core.source.config.models.TimeRangePool; +import com.gotocompany.dagger.core.exception.InvalidTimeRangeException; +import com.gotocompany.dagger.core.source.config.models.TimeRange; +import com.gotocompany.dagger.core.source.config.models.TimeRangePool; import java.io.IOException; import java.text.ParseException; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/SourceParquetFilePathsAdapter.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/SourceParquetFilePathsAdapter.java similarity index 92% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/SourceParquetFilePathsAdapter.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/SourceParquetFilePathsAdapter.java index 3b21f055e..ecd24cc47 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/adapter/SourceParquetFilePathsAdapter.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/adapter/SourceParquetFilePathsAdapter.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.config.adapter; +package com.gotocompany.dagger.core.source.config.adapter; import com.google.gson.Gson; import com.google.gson.TypeAdapter; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/SourceDetails.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/SourceDetails.java similarity index 67% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/SourceDetails.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/SourceDetails.java index 2dc37ac92..668c2aa3e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/SourceDetails.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/SourceDetails.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.core.source.config.models; +package com.gotocompany.dagger.core.source.config.models; import com.google.gson.annotations.SerializedName; import lombok.Getter; import java.io.Serializable; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_NAME_KEY; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_TYPE_KEY; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_NAME_KEY; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_TYPE_KEY; public class SourceDetails implements Serializable { @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_NAME_KEY) diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/SourceName.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/SourceName.java new file mode 100644 index 000000000..f70658c55 --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/SourceName.java @@ -0,0 +1,16 @@ +package com.gotocompany.dagger.core.source.config.models; + +import com.google.gson.annotations.SerializedName; + +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_NAME_KAFKA_CONSUMER; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_NAME_KAFKA; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_NAME_PARQUET; + +public enum SourceName { + @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_NAME_KAFKA) + KAFKA_SOURCE, + @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_NAME_PARQUET) + PARQUET_SOURCE, + @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_NAME_KAFKA_CONSUMER) + KAFKA_CONSUMER +} diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/SourceType.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/SourceType.java new file mode 100644 index 000000000..749b7689f --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/SourceType.java @@ -0,0 +1,13 @@ +package com.gotocompany.dagger.core.source.config.models; + +import com.google.gson.annotations.SerializedName; + +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_TYPE_BOUNDED; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_TYPE_UNBOUNDED; + +public enum SourceType { + @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_TYPE_BOUNDED) + BOUNDED, + @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_TYPE_UNBOUNDED) + UNBOUNDED +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/TimeRange.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/TimeRange.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/TimeRange.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/TimeRange.java index 3faa07db6..fdf7977ae 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/TimeRange.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/TimeRange.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.config.models; +package com.gotocompany.dagger.core.source.config.models; import lombok.Getter; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/TimeRangePool.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/TimeRangePool.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/TimeRangePool.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/TimeRangePool.java index 68a0424f8..d291d5ee1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/TimeRangePool.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/config/models/TimeRangePool.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.config.models; +package com.gotocompany.dagger.core.source.config.models; import lombok.Getter; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustom.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustom.java similarity index 89% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustom.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustom.java index 06b30eea2..14860175c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustom.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustom.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.source.flinkkafkaconsumer; +package com.gotocompany.dagger.core.source.flinkkafkaconsumer; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporterFactory; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSource.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSource.java similarity index 75% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSource.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSource.java index 81a49e43e..5e17b0a63 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSource.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSource.java @@ -1,20 +1,20 @@ -package io.odpf.dagger.core.source.flinkkafkaconsumer; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.dagger.core.source.DaggerSource; +package com.gotocompany.dagger.core.source.flinkkafkaconsumer; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.DaggerSource; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema; import org.apache.flink.types.Row; -import static io.odpf.dagger.core.source.config.models.SourceName.KAFKA_CONSUMER; -import static io.odpf.dagger.core.source.config.models.SourceType.UNBOUNDED; +import static com.gotocompany.dagger.core.source.config.models.SourceName.KAFKA_CONSUMER; +import static com.gotocompany.dagger.core.source.config.models.SourceType.UNBOUNDED; public class FlinkKafkaConsumerDaggerSource implements DaggerSource { diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/kafka/KafkaDaggerSource.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/kafka/KafkaDaggerSource.java similarity index 75% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/kafka/KafkaDaggerSource.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/kafka/KafkaDaggerSource.java index c6e44f62f..550b6fb23 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/kafka/KafkaDaggerSource.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/kafka/KafkaDaggerSource.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.core.source.kafka; +package com.gotocompany.dagger.core.source.kafka; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.dagger.core.source.DaggerSource; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.core.source.DaggerSource; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.connector.kafka.source.KafkaSource; import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema; @@ -15,15 +15,12 @@ import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema; import org.apache.flink.types.Row; -import static io.odpf.dagger.core.source.config.models.SourceName.KAFKA_SOURCE; -import static io.odpf.dagger.core.source.config.models.SourceType.UNBOUNDED; - public class KafkaDaggerSource implements DaggerSource { private final DaggerDeserializer deserializer; private final StreamConfig streamConfig; private final Configuration configuration; - private static final SourceName SUPPORTED_SOURCE_NAME = KAFKA_SOURCE; - private static final SourceType SUPPORTED_SOURCE_TYPE = UNBOUNDED; + private static final SourceName SUPPORTED_SOURCE_NAME = SourceName.KAFKA_SOURCE; + private static final SourceType SUPPORTED_SOURCE_TYPE = SourceType.UNBOUNDED; public KafkaDaggerSource(StreamConfig streamConfig, Configuration configuration, DaggerDeserializer deserializer) { this.streamConfig = streamConfig; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/ParquetDaggerSource.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/ParquetDaggerSource.java similarity index 80% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/ParquetDaggerSource.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/ParquetDaggerSource.java index 078b59731..f61b3c7ae 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/ParquetDaggerSource.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/ParquetDaggerSource.java @@ -1,20 +1,20 @@ -package io.odpf.dagger.core.source.parquet; +package com.gotocompany.dagger.core.source.parquet; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; -import io.odpf.dagger.core.exception.DaggerConfigurationException; -import io.odpf.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; -import io.odpf.dagger.core.source.DaggerSource; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.parquet.path.HourDatePathParser; -import io.odpf.dagger.core.source.parquet.reader.ParquetReader; -import io.odpf.dagger.core.source.parquet.reader.ReaderProvider; -import io.odpf.dagger.core.source.parquet.splitassigner.ChronologyOrderedSplitAssigner; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; +import com.gotocompany.dagger.core.source.parquet.reader.ParquetReader; +import com.gotocompany.dagger.core.source.parquet.splitassigner.ChronologyOrderedSplitAssigner; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; +import com.gotocompany.dagger.core.source.DaggerSource; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.core.source.parquet.path.HourDatePathParser; +import com.gotocompany.dagger.core.source.parquet.reader.ReaderProvider; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.connector.file.src.FileSource; @@ -28,8 +28,8 @@ import java.util.Arrays; import java.util.function.Supplier; -import static io.odpf.dagger.core.source.config.models.SourceName.PARQUET_SOURCE; -import static io.odpf.dagger.core.source.config.models.SourceType.BOUNDED; +import static com.gotocompany.dagger.core.source.config.models.SourceName.PARQUET_SOURCE; +import static com.gotocompany.dagger.core.source.config.models.SourceType.BOUNDED; public class ParquetDaggerSource implements DaggerSource { private final DaggerDeserializer deserializer; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/ParquetFileRecordFormat.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/ParquetFileRecordFormat.java similarity index 93% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/ParquetFileRecordFormat.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/ParquetFileRecordFormat.java index e93b9eb81..d7966ac82 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/ParquetFileRecordFormat.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/ParquetFileRecordFormat.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.source.parquet; +package com.gotocompany.dagger.core.source.parquet; import static com.google.api.client.util.Preconditions.checkArgument; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; -import io.odpf.dagger.core.source.parquet.reader.ReaderProvider; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; +import com.gotocompany.dagger.core.source.parquet.reader.ReaderProvider; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.configuration.Configuration; import org.apache.flink.connector.file.src.reader.FileRecordFormat; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/ParquetFileSource.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/ParquetFileSource.java similarity index 91% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/ParquetFileSource.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/ParquetFileSource.java index f97fb14c1..5141ca227 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/ParquetFileSource.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/ParquetFileSource.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.source.parquet; +package com.gotocompany.dagger.core.source.parquet; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; -import io.odpf.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; +import com.gotocompany.dagger.core.source.config.models.SourceType; import lombok.Getter; import org.apache.flink.connector.file.src.FileSource; import org.apache.flink.connector.file.src.assigners.FileSplitAssigner; @@ -15,7 +15,7 @@ import java.io.Serializable; import static com.google.api.client.util.Preconditions.checkArgument; -import static io.odpf.dagger.core.source.config.models.SourceType.BOUNDED; +import static com.gotocompany.dagger.core.source.config.models.SourceType.BOUNDED; public class ParquetFileSource implements Serializable { @Getter diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/SourceParquetReadOrderStrategy.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/SourceParquetReadOrderStrategy.java similarity index 52% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/SourceParquetReadOrderStrategy.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/SourceParquetReadOrderStrategy.java index 2f6849ff4..5fe43b6bb 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/SourceParquetReadOrderStrategy.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/SourceParquetReadOrderStrategy.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.source.parquet; +package com.gotocompany.dagger.core.source.parquet; import com.google.gson.annotations.SerializedName; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_READ_ORDER_STRATEGY_EARLIEST_INDEX_FIRST; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_READ_ORDER_STRATEGY_EARLIEST_TIME_URL_FIRST; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_READ_ORDER_STRATEGY_EARLIEST_INDEX_FIRST; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_READ_ORDER_STRATEGY_EARLIEST_TIME_URL_FIRST; public enum SourceParquetReadOrderStrategy { @SerializedName(STREAM_SOURCE_PARQUET_READ_ORDER_STRATEGY_EARLIEST_TIME_URL_FIRST) diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/SourceParquetSchemaMatchStrategy.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/SourceParquetSchemaMatchStrategy.java similarity index 55% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/SourceParquetSchemaMatchStrategy.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/SourceParquetSchemaMatchStrategy.java index 8300f4dc7..c4b9c78ec 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/SourceParquetSchemaMatchStrategy.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/SourceParquetSchemaMatchStrategy.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.source.parquet; +package com.gotocompany.dagger.core.source.parquet; import com.google.gson.annotations.SerializedName; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_BACKWARD_COMPATIBLE_SCHEMA_MATCH_STRATEGY; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_SAME_SCHEMA_MATCH_STRATEGY; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_BACKWARD_COMPATIBLE_SCHEMA_MATCH_STRATEGY; +import static com.gotocompany.dagger.core.utils.Constants.STREAM_SOURCE_PARQUET_SAME_SCHEMA_MATCH_STRATEGY; public enum SourceParquetSchemaMatchStrategy { @SerializedName(STREAM_SOURCE_PARQUET_SAME_SCHEMA_MATCH_STRATEGY) diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/path/HourDatePathParser.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/path/HourDatePathParser.java similarity index 97% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/path/HourDatePathParser.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/path/HourDatePathParser.java index bee34ac52..55dc465c1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/path/HourDatePathParser.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/path/HourDatePathParser.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.parquet.path; +package com.gotocompany.dagger.core.source.parquet.path; import org.apache.flink.core.fs.Path; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/path/PathParser.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/path/PathParser.java similarity index 77% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/path/PathParser.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/path/PathParser.java index b1868aa42..bf893bba6 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/path/PathParser.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/path/PathParser.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.parquet.path; +package com.gotocompany.dagger.core.source.parquet.path; import org.apache.flink.core.fs.Path; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/reader/ParquetReader.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/reader/ParquetReader.java similarity index 88% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/reader/ParquetReader.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/reader/ParquetReader.java index 017aa92ba..52bb817ac 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/reader/ParquetReader.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/reader/ParquetReader.java @@ -1,14 +1,15 @@ -package io.odpf.dagger.core.source.parquet.reader; - -import io.odpf.dagger.common.exceptions.serde.DaggerDeserializationException; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; -import io.odpf.dagger.core.metrics.reporters.statsd.manager.DaggerCounterManager; -import io.odpf.dagger.core.metrics.reporters.statsd.manager.DaggerHistogramManager; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; -import io.odpf.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; -import io.odpf.dagger.core.exception.ParquetFileSourceReaderInitializationException; -import io.odpf.dagger.core.metrics.aspects.ParquetReaderAspects; +package com.gotocompany.dagger.core.source.parquet.reader; + +import com.gotocompany.dagger.common.exceptions.serde.DaggerDeserializationException; +import com.gotocompany.dagger.core.exception.ParquetFileSourceReaderInitializationException; +import com.gotocompany.dagger.core.metrics.aspects.ParquetReaderAspects; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.statsd.manager.DaggerCounterManager; +import com.gotocompany.dagger.core.metrics.reporters.statsd.manager.DaggerHistogramManager; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.ComponentTags; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; +import com.gotocompany.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; import org.apache.flink.connector.file.src.reader.FileRecordFormat; import org.apache.flink.connector.file.src.util.CheckpointedPosition; import org.apache.flink.types.Row; @@ -31,8 +32,6 @@ import java.io.IOException; import java.time.Instant; -import static io.odpf.dagger.core.metrics.reporters.statsd.tags.ComponentTags.getParquetReaderTags; - public class ParquetReader implements FileRecordFormat.Reader { private final Path hadoopFilePath; private final SimpleGroupDeserializer simpleGroupDeserializer; @@ -62,7 +61,7 @@ private ParquetReader(Path hadoopFilePath, SimpleGroupDeserializer simpleGroupDe } private void registerTagsWithMeasurementManagers(SerializedStatsDReporterSupplier statsDReporterSupplier) { - StatsDTag[] parquetReaderTags = getParquetReaderTags(); + StatsDTag[] parquetReaderTags = ComponentTags.getParquetReaderTags(); this.daggerCounterManager = new DaggerCounterManager(statsDReporterSupplier); this.daggerCounterManager.register(parquetReaderTags); this.daggerHistogramManager = new DaggerHistogramManager(statsDReporterSupplier); diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/reader/ReaderProvider.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/reader/ReaderProvider.java similarity index 82% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/reader/ReaderProvider.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/reader/ReaderProvider.java index 3fb592970..7a5b30478 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/reader/ReaderProvider.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/reader/ReaderProvider.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.parquet.reader; +package com.gotocompany.dagger.core.source.parquet.reader; import org.apache.flink.connector.file.src.reader.FileRecordFormat; import org.apache.flink.types.Row; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssigner.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssigner.java similarity index 78% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssigner.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssigner.java index ac83d0730..593ea20b0 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssigner.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssigner.java @@ -1,12 +1,14 @@ -package io.odpf.dagger.core.source.parquet.splitassigner; - -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; -import io.odpf.dagger.core.metrics.reporters.statsd.manager.DaggerGaugeManager; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; -import io.odpf.dagger.core.exception.PathParserNotProvidedException; -import io.odpf.dagger.core.source.config.models.TimeRangePool; -import io.odpf.dagger.core.source.parquet.path.PathParser; +package com.gotocompany.dagger.core.source.parquet.splitassigner; + +import com.gotocompany.dagger.core.exception.PathParserNotProvidedException; +import com.gotocompany.dagger.core.metrics.aspects.ChronologyOrderedSplitAssignerAspects; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.StatsDErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.statsd.manager.DaggerGaugeManager; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.ComponentTags; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; +import com.gotocompany.dagger.core.source.config.models.TimeRangePool; +import com.gotocompany.dagger.core.source.parquet.path.PathParser; import org.apache.flink.connector.file.src.FileSourceSplit; import org.apache.flink.connector.file.src.assigners.FileSplitAssigner; @@ -21,10 +23,6 @@ import java.util.stream.Collectors; import static com.google.api.client.util.Preconditions.checkArgument; -import static io.odpf.dagger.core.metrics.aspects.ChronologyOrderedSplitAssignerAspects.SPLITS_AWAITING_ASSIGNMENT; -import static io.odpf.dagger.core.metrics.aspects.ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_DISCOVERED; -import static io.odpf.dagger.core.metrics.aspects.ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_RECORDED; -import static io.odpf.dagger.core.metrics.reporters.statsd.tags.ComponentTags.getSplitAssignerTags; public class ChronologyOrderedSplitAssigner implements FileSplitAssigner { private final PriorityBlockingQueue unassignedSplits; @@ -45,14 +43,14 @@ private ChronologyOrderedSplitAssigner(Collection fileSourceSpl } private void initAndValidate(Collection fileSourceSplits) { - StatsDTag[] splitAssignerTags = getSplitAssignerTags(); + StatsDTag[] splitAssignerTags = ComponentTags.getSplitAssignerTags(); daggerGaugeManager.register(splitAssignerTags); - daggerGaugeManager.markValue(TOTAL_SPLITS_DISCOVERED, fileSourceSplits.size()); + daggerGaugeManager.markValue(ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_DISCOVERED, fileSourceSplits.size()); for (FileSourceSplit split : fileSourceSplits) { validateAndAddSplits(split); } - daggerGaugeManager.markValue(TOTAL_SPLITS_RECORDED, unassignedSplits.size()); - daggerGaugeManager.markValue(SPLITS_AWAITING_ASSIGNMENT, unassignedSplits.size()); + daggerGaugeManager.markValue(ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_RECORDED, unassignedSplits.size()); + daggerGaugeManager.markValue(ChronologyOrderedSplitAssignerAspects.SPLITS_AWAITING_ASSIGNMENT, unassignedSplits.size()); } @Override @@ -61,7 +59,7 @@ public Optional getNext(@Nullable String hostname) { if (instantEnrichedSplit == null) { return Optional.empty(); } - daggerGaugeManager.markValue(SPLITS_AWAITING_ASSIGNMENT, unassignedSplits.size()); + daggerGaugeManager.markValue(ChronologyOrderedSplitAssignerAspects.SPLITS_AWAITING_ASSIGNMENT, unassignedSplits.size()); return Optional.of(instantEnrichedSplit.getFileSourceSplit()); } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/splitassigner/IndexOrderedSplitAssigner.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/splitassigner/IndexOrderedSplitAssigner.java similarity index 91% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/splitassigner/IndexOrderedSplitAssigner.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/splitassigner/IndexOrderedSplitAssigner.java index e66194ca0..e7db5af2c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/splitassigner/IndexOrderedSplitAssigner.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/splitassigner/IndexOrderedSplitAssigner.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.parquet.splitassigner; +package com.gotocompany.dagger.core.source.parquet.splitassigner; import org.apache.flink.connector.file.src.FileSourceSplit; import org.apache.flink.connector.file.src.assigners.FileSplitAssigner; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/splitassigner/InstantEnrichedSplit.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/splitassigner/InstantEnrichedSplit.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/splitassigner/InstantEnrichedSplit.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/splitassigner/InstantEnrichedSplit.java index 73b683a77..7bbfb125f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/parquet/splitassigner/InstantEnrichedSplit.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/source/parquet/splitassigner/InstantEnrichedSplit.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.parquet.splitassigner; +package com.gotocompany.dagger.core.source.parquet.splitassigner; import lombok.Getter; import org.apache.flink.connector.file.src.FileSourceSplit; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/utils/Constants.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/utils/Constants.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/utils/Constants.java rename to dagger-core/src/main/java/com/gotocompany/dagger/core/utils/Constants.java index 30bb675e8..78fdbe707 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/utils/Constants.java +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/utils/Constants.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.utils; +package com.gotocompany.dagger.core.utils; public class Constants { @@ -74,11 +74,14 @@ public class Constants { public static final String SINK_KAFKA_JSON_SCHEMA_KEY = "SINK_KAFKA_JSON_SCHEMA"; public static final String SINK_KAFKA_DATA_TYPE = "SINK_KAFKA_DATA_TYPE"; public static final String SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE_KEY = "SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE"; + public static final String SINK_KAFKA_LINGER_MS_KEY = "SINK_KAFKA_LINGER_MS"; public static final boolean SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE_DEFAULT = false; public static final String SINK_KAFKA_COMPRESSION_TYPE_KEY = "compression.type"; + public static final String SINK_KAFKA_LINGER_MS_CONFIG_KEY = "linger.ms"; public static final String SINK_KAFKA_COMPRESSION_TYPE_DEFAULT = "snappy"; public static final String SINK_KAFKA_MAX_REQUEST_SIZE_KEY = "max.request.size"; public static final String SINK_KAFKA_MAX_REQUEST_SIZE_DEFAULT = "20971520"; + public static final String SINK_KAFKA_LINGER_MS_DEFAULT = "0"; public static final String ES_TYPE = "ES"; public static final String HTTP_TYPE = "HTTP"; @@ -89,7 +92,7 @@ public class Constants { public static final String LONGBOW_WRITER_PROCESSOR_KEY = "longbow_writer_processor"; public static final String LONGBOW_READER_PROCESSOR_KEY = "longbow_reader_processor"; public static final String TRANSFORM_PROCESSOR_KEY = "transform_processor"; - public static final String SQL_TRANSFORMER_CLASS = "io.odpf.dagger.functions.transformers.SQLTransformer"; + public static final String SQL_TRANSFORMER_CLASS = "SQLTransformer"; public static final String STREAM_INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX_KEY = "INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX"; public static final String STREAM_SOURCE_KAFKA_TOPIC_NAMES_KEY = "SOURCE_KAFKA_TOPIC_NAMES"; @@ -126,6 +129,15 @@ public class Constants { public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL"; public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SASL_MECHANISM_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SASL_MECHANISM"; public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SASL_JAAS_CONFIG_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SASL_JAAS_CONFIG"; + public static final String SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS = "SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS"; + public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEY_PASSWORD_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEY_PASSWORD"; + public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION"; + public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_PASSWORD_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_PASSWORD"; + public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE"; + public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_LOCATION_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_LOCATION"; + public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_PASSWORD_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_PASSWORD"; + public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE"; + public static final String SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL"; public static final String METRIC_TELEMETRY_ENABLE_KEY = "METRIC_TELEMETRY_ENABLE"; public static final boolean METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT = true; @@ -135,7 +147,7 @@ public class Constants { public static final String NONFATAL_EXCEPTION_METRIC_GROUP_KEY = "non.fatal.exception"; public static final String FUNCTION_FACTORY_CLASSES_KEY = "FUNCTION_FACTORY_CLASSES"; - public static final String FUNCTION_FACTORY_CLASSES_DEFAULT = "io.odpf.dagger.functions.udfs.factories.FunctionFactory"; + public static final String FUNCTION_FACTORY_CLASSES_DEFAULT = "FunctionFactory"; public static final String SINK_INFLUX_LATE_RECORDS_DROPPED_KEY = "influx.late.records.dropped"; public static final String SINK_INFLUX_DB_NAME_KEY = "SINK_INFLUX_DB_NAME"; @@ -175,6 +187,9 @@ public enum ExternalPostProcessorVariableType { REQUEST_VARIABLES, HEADER_VARIAB public static final String SINK_ERROR_TYPES_FOR_FAILURE = "SINK_ERROR_TYPES_FOR_FAILURE"; public static final String SINK_ERROR_TYPES_FOR_FAILURE_DEFAULT = ""; - public static final String[] SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL = {"SASL_PLAINTEXT", "SASL_SSL"}; + public static final String[] SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL = {"SASL_PLAINTEXT", "SASL_SSL", "SSL"}; public static final String[] SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SASL_MECHANISM = {"PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"}; + + public static final String[] SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL = {"TLS", "TLSv1.1", "TLSv1.2", "TLSv1.3", "SSL", "SSLv2", "SSLv3"}; + public static final String[] SUPPORTED_SOURCE_KAFKA_CONSUMER_CONFIG_SSL_STORE_FILE_TYPE = {"JKS", "PKCS12", "PEM"}; } diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/utils/DescriptorsUtil.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/utils/DescriptorsUtil.java new file mode 100644 index 000000000..8b3caa589 --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/utils/DescriptorsUtil.java @@ -0,0 +1,60 @@ +package com.gotocompany.dagger.core.utils; + +import com.google.protobuf.Descriptors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; + +import static com.google.protobuf.Descriptors.FieldDescriptor.JavaType.MESSAGE; + +/** + * Utility class that contains helper methods to get {@link Descriptors} {@link Descriptors.FieldDescriptor}. + */ +public class DescriptorsUtil { + + private static final Logger LOGGER = LoggerFactory.getLogger(DescriptorsUtil.class.getName()); + + /** + * Gets FieldDescriptor . + * + * @param descriptor the descriptor + * @param columnName the columnName + * @return the fieldDescriptor + */ + public static Descriptors.FieldDescriptor getFieldDescriptor(Descriptors.Descriptor descriptor, String columnName) { + if (descriptor == null || columnName == null) { + return null; + } + String[] nestedFields = columnName.split("\\."); + if (nestedFields.length == 1) { + return descriptor.findFieldByName(columnName); + } else { + return getNestedFieldDescriptor(descriptor, nestedFields); + } + } + + /** + * Gets FieldDescriptor . + * + * @param parentDescriptor the descriptor + * @param nestedColumnNames the array of columnNames + * @return the fieldDescriptor + */ + public static Descriptors.FieldDescriptor getNestedFieldDescriptor(Descriptors.Descriptor parentDescriptor, String[] nestedColumnNames) { + if (parentDescriptor == null || nestedColumnNames == null || nestedColumnNames.length == 0) { + return null; + } + String childColumnName = nestedColumnNames[0]; + if (nestedColumnNames.length == 1) { + return parentDescriptor.findFieldByName(childColumnName); + } + Descriptors.FieldDescriptor childFieldDescriptor = parentDescriptor.findFieldByName(childColumnName); + if (childFieldDescriptor == null || childFieldDescriptor.getJavaType() != MESSAGE) { + LOGGER.info(String.format("Either the Field Descriptor for the field '%s' is missing in the proto '%s', or the Field Descriptor is not of the MESSAGE type.", childColumnName, parentDescriptor.getFullName())); + return null; + } + Descriptors.Descriptor childDescriptor = childFieldDescriptor.getMessageType(); + return getNestedFieldDescriptor(childDescriptor, Arrays.copyOfRange(nestedColumnNames, 1, nestedColumnNames.length)); + } +} diff --git a/dagger-core/src/main/java/com/gotocompany/dagger/core/utils/KafkaConfigUtil.java b/dagger-core/src/main/java/com/gotocompany/dagger/core/utils/KafkaConfigUtil.java new file mode 100644 index 000000000..14539a01c --- /dev/null +++ b/dagger-core/src/main/java/com/gotocompany/dagger/core/utils/KafkaConfigUtil.java @@ -0,0 +1,28 @@ +package com.gotocompany.dagger.core.utils; + +import com.gotocompany.dagger.core.enumeration.KafkaConnectorTypesMetadata; + +import java.util.Properties; +import java.util.Set; +import java.util.regex.Matcher; + +public class KafkaConfigUtil { + + public static Properties parseKafkaConfiguration(KafkaConnectorTypesMetadata kafkaConnectorTypesMetadata, Properties properties) { + Properties kafkaProperties = new Properties(); + Set configKeys = properties.keySet(); + + for (Object key : configKeys) { + Matcher matcher = kafkaConnectorTypesMetadata.getConfigurationPattern() + .matcher(key.toString()); + if (matcher.find()) { + String kafkaConfigKey = matcher.group(1) + .toLowerCase() + .replaceAll("_+", "."); + kafkaProperties.setProperty(kafkaConfigKey, properties.get(key).toString()); + } + } + return kafkaProperties; + } + +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/KafkaProtoSQLProcessor.java b/dagger-core/src/main/java/io/odpf/dagger/core/KafkaProtoSQLProcessor.java deleted file mode 100644 index 0001b1326..000000000 --- a/dagger-core/src/main/java/io/odpf/dagger/core/KafkaProtoSQLProcessor.java +++ /dev/null @@ -1,40 +0,0 @@ -package io.odpf.dagger.core; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.config.ConfigurationProvider; -import io.odpf.dagger.core.config.ConfigurationProviderFactory; -import org.apache.flink.client.program.ProgramInvocationException; -import io.odpf.dagger.common.core.DaggerContext; - -import java.util.TimeZone; - -/** - * Main class to run Dagger. - */ -public class KafkaProtoSQLProcessor { - - /** - * The entry point of application. - * - * @param args the input arguments - * @throws ProgramInvocationException the program invocation exception - */ - public static void main(String[] args) throws ProgramInvocationException { - try { - ConfigurationProvider provider = new ConfigurationProviderFactory(args).provider(); - Configuration configuration = provider.get(); - TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - DaggerContext daggerContext = DaggerContext.init(configuration); - StreamManager streamManager = new StreamManager(daggerContext); - streamManager - .registerConfigs() - .registerSourceWithPreProcessors() - .registerFunctions() - .registerOutputStream() - .execute(); - } catch (Exception | AssertionError e) { - e.printStackTrace(); - throw new ProgramInvocationException(e); - } - } -} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/MeasurementManager.java b/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/MeasurementManager.java deleted file mode 100644 index 5c195c17b..000000000 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/MeasurementManager.java +++ /dev/null @@ -1,9 +0,0 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.manager; - -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; - -import java.io.Serializable; - -public interface MeasurementManager extends Serializable { - void register(StatsDTag[] tags); -} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/measurement/Gauge.java b/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/measurement/Gauge.java deleted file mode 100644 index a41fb29bd..000000000 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/measurement/Gauge.java +++ /dev/null @@ -1,9 +0,0 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.measurement; - -import io.odpf.dagger.common.metrics.aspects.Aspects; - -import java.io.Serializable; - -public interface Gauge extends Serializable { - void markValue(Aspects aspect, int gaugeValue); -} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/measurement/Histogram.java b/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/measurement/Histogram.java deleted file mode 100644 index 1c8f68cfe..000000000 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/statsd/measurement/Histogram.java +++ /dev/null @@ -1,9 +0,0 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.measurement; - -import io.odpf.dagger.common.metrics.aspects.Aspects; - -import java.io.Serializable; - -public interface Histogram extends Serializable { - void recordValue(Aspects aspect, long value); -} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/SourceName.java b/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/SourceName.java deleted file mode 100644 index 8027df048..000000000 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/SourceName.java +++ /dev/null @@ -1,16 +0,0 @@ -package io.odpf.dagger.core.source.config.models; - -import com.google.gson.annotations.SerializedName; - -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_NAME_KAFKA_CONSUMER; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_NAME_KAFKA; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_NAME_PARQUET; - -public enum SourceName { - @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_NAME_KAFKA) - KAFKA_SOURCE, - @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_NAME_PARQUET) - PARQUET_SOURCE, - @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_NAME_KAFKA_CONSUMER) - KAFKA_CONSUMER -} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/SourceType.java b/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/SourceType.java deleted file mode 100644 index 5d89064fc..000000000 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/config/models/SourceType.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.odpf.dagger.core.source.config.models; - -import com.google.gson.annotations.SerializedName; - -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_TYPE_BOUNDED; -import static io.odpf.dagger.core.utils.Constants.STREAM_SOURCE_DETAILS_SOURCE_TYPE_UNBOUNDED; - -public enum SourceType { - @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_TYPE_BOUNDED) - BOUNDED, - @SerializedName(STREAM_SOURCE_DETAILS_SOURCE_TYPE_UNBOUNDED) - UNBOUNDED -} diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/StreamManagerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/DaggerSqlJobBuilderTest.java similarity index 80% rename from dagger-core/src/test/java/io/odpf/dagger/core/StreamManagerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/DaggerSqlJobBuilderTest.java index 0194a7b88..5f9a748ea 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/StreamManagerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/DaggerSqlJobBuilderTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core; +package com.gotocompany.dagger.core; -import io.odpf.dagger.common.core.DaggerContextTestBase; -import io.odpf.dagger.common.core.DaggerContext; -import io.odpf.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.StreamInfo; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.api.common.typeinfo.TypeInformation; @@ -28,6 +28,7 @@ import org.powermock.modules.junit4.PowerMockRunner; import java.time.Duration; +import java.util.concurrent.TimeUnit; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.verify; @@ -37,9 +38,9 @@ @PrepareForTest(TableSchema.class) @RunWith(PowerMockRunner.class) -public class StreamManagerTest extends DaggerContextTestBase { +public class DaggerSqlJobBuilderTest extends DaggerContextTestBase { - private StreamManager streamManager; + private DaggerSqlJobBuilder daggerSqlJobBuilder; private String jsonArray = "[\n" + " {\n" @@ -48,7 +49,7 @@ public class StreamManagerTest extends DaggerContextTestBase { + " \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\",\n" + " \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:6667\",\n" + " \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"flink-sql-flud-gp0330\",\n" - + " \"INPUT_SCHEMA_PROTO_CLASS\": \"io.odpf.dagger.consumer.TestBookingLogMessage\",\n" + + " \"INPUT_SCHEMA_PROTO_CLASS\": \"com.gotocompany.dagger.consumer.TestBookingLogMessage\",\n" + " \"INPUT_SCHEMA_TABLE\": \"data_stream\",\n" + " \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\"\n" + " }\n" @@ -97,6 +98,9 @@ public void setup() { when(configuration.getString("STREAMS", "")).thenReturn(jsonArray); when(configuration.getBoolean("SCHEMA_REGISTRY_STENCIL_ENABLE", false)).thenReturn(false); when(configuration.getString("SCHEMA_REGISTRY_STENCIL_URLS", "")).thenReturn(""); + when(configuration.getBoolean("SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH", false)).thenReturn(false); + when(configuration.getLong("SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS", TimeUnit.HOURS.toMillis(2))).thenReturn(TimeUnit.HOURS.toMillis(2)); + when(configuration.getString("SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY", "LONG_POLLING")).thenReturn("LONG_POLLING"); when(configuration.getString("FLINK_JOB_ID", "SQL Flink job")).thenReturn("SQL Flink job"); when(configuration.getString("SINK_TYPE", "influx")).thenReturn("influx"); when(configuration.getString("FLINK_SQL_QUERY", "")).thenReturn(""); @@ -113,12 +117,12 @@ public void setup() { when(schema.getFieldNames()).thenReturn(new String[0]); PowerMockito.mockStatic(TableSchema.class); when(TableSchema.fromTypeInfo(typeInformation)).thenReturn(schema); - streamManager = new StreamManager(daggerContext); + daggerSqlJobBuilder = new DaggerSqlJobBuilder(daggerContext); } @Test public void shouldRegisterRequiredConfigsOnExecutionEnvironment() { - streamManager.registerConfigs(); + daggerSqlJobBuilder.registerConfigs(); verify(streamExecutionEnvironment, Mockito.times(1)).setParallelism(1); verify(streamExecutionEnvironment, Mockito.times(1)).enableCheckpointing(30000); @@ -136,32 +140,32 @@ public void shouldRegisterSourceWithPreprocessorsWithWaterMarks() { when(source.assignTimestampsAndWatermarks(any(WatermarkStrategy.class))).thenReturn(singleOutputStream); when(singleOutputStream.getType()).thenReturn(typeInformation); - StreamManagerStub streamManagerStub = new StreamManagerStub(daggerContext, new StreamInfo(dataStream, new String[]{})); - streamManagerStub.registerConfigs(); - streamManagerStub.registerSourceWithPreProcessors(); + DaggerSqlJobBuilderStub daggerSqlJobBuilderStub = new DaggerSqlJobBuilderStub(daggerContext, new StreamInfo(dataStream, new String[]{})); + daggerSqlJobBuilderStub.registerConfigs(); + daggerSqlJobBuilderStub.registerSourceWithPreProcessors(); verify(streamTableEnvironment, Mockito.times(1)).fromDataStream(any(), new ApiExpression[]{}); } @Test public void shouldCreateOutputStream() { - StreamManagerStub streamManagerStub = new StreamManagerStub(daggerContext, new StreamInfo(dataStream, new String[]{})); - streamManagerStub.registerOutputStream(); + DaggerSqlJobBuilderStub daggerSqlJobBuilderStub = new DaggerSqlJobBuilderStub(daggerContext, new StreamInfo(dataStream, new String[]{})); + daggerSqlJobBuilderStub.registerOutputStream(); verify(streamTableEnvironment, Mockito.times(1)).sqlQuery(""); } @Test public void shouldExecuteJob() throws Exception { - streamManager.execute(); + daggerSqlJobBuilder.execute(); verify(streamExecutionEnvironment, Mockito.times(1)).execute("SQL Flink job"); } - final class StreamManagerStub extends StreamManager { + final class DaggerSqlJobBuilderStub extends DaggerSqlJobBuilder { private final StreamInfo streamInfo; - private StreamManagerStub(DaggerContext daggerContext, StreamInfo streamInfo) { + private DaggerSqlJobBuilderStub(DaggerContext daggerContext, StreamInfo streamInfo) { super(daggerContext); this.streamInfo = streamInfo; } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/config/ConfigurationProviderFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/config/ConfigurationProviderFactoryTest.java similarity index 97% rename from dagger-core/src/test/java/io/odpf/dagger/core/config/ConfigurationProviderFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/config/ConfigurationProviderFactoryTest.java index 519ea2a1b..d175a84b4 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/config/ConfigurationProviderFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/config/ConfigurationProviderFactoryTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.config; +package com.gotocompany.dagger.core.config; import org.junit.Before; import org.junit.Rule; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/config/KafkaEnvironmentVariablesTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/config/KafkaEnvironmentVariablesTest.java similarity index 97% rename from dagger-core/src/test/java/io/odpf/dagger/core/config/KafkaEnvironmentVariablesTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/config/KafkaEnvironmentVariablesTest.java index d1aef9d23..ead63cb77 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/config/KafkaEnvironmentVariablesTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/config/KafkaEnvironmentVariablesTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.config; +package com.gotocompany.dagger.core.config; import org.apache.flink.configuration.Configuration; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/config/commandline/CommandlineConfigurationProviderTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/config/commandline/CommandlineConfigurationProviderTest.java similarity index 90% rename from dagger-core/src/test/java/io/odpf/dagger/core/config/commandline/CommandlineConfigurationProviderTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/config/commandline/CommandlineConfigurationProviderTest.java index 9fb8360f3..b39754592 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/config/commandline/CommandlineConfigurationProviderTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/config/commandline/CommandlineConfigurationProviderTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.config.commandline; +package com.gotocompany.dagger.core.config.commandline; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.config.CommandlineConfigurationProvider; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.config.CommandlineConfigurationProvider; import org.junit.Test; import java.util.Base64; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/config/system/EnvironmentConfigurationProviderTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/config/system/EnvironmentConfigurationProviderTest.java similarity index 79% rename from dagger-core/src/test/java/io/odpf/dagger/core/config/system/EnvironmentConfigurationProviderTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/config/system/EnvironmentConfigurationProviderTest.java index 36c45d3c6..c0170545d 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/config/system/EnvironmentConfigurationProviderTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/config/system/EnvironmentConfigurationProviderTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.config.system; +package com.gotocompany.dagger.core.config.system; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.config.EnvironmentConfigurationProvider; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.config.EnvironmentConfigurationProvider; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/config/system/FileConfigurationProvideTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/config/system/FileConfigurationProvideTest.java similarity index 75% rename from dagger-core/src/test/java/io/odpf/dagger/core/config/system/FileConfigurationProvideTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/config/system/FileConfigurationProvideTest.java index 9bd1510d3..f4c5d2ca4 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/config/system/FileConfigurationProvideTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/config/system/FileConfigurationProvideTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.config.system; +package com.gotocompany.dagger.core.config.system; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.config.FileConfigurationProvider; -import io.odpf.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.config.FileConfigurationProvider; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; import org.junit.Test; import static org.junit.Assert.assertEquals; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/deserializer/DaggerDeserializerFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/DaggerDeserializerFactoryTest.java similarity index 74% rename from dagger-core/src/test/java/io/odpf/dagger/core/deserializer/DaggerDeserializerFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/DaggerDeserializerFactoryTest.java index 0b7144f88..97dfb85aa 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/deserializer/DaggerDeserializerFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/DaggerDeserializerFactoryTest.java @@ -1,20 +1,21 @@ -package io.odpf.dagger.core.deserializer; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.json.deserialization.JsonDeserializer; -import io.odpf.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; -import io.odpf.dagger.common.serde.proto.deserialization.ProtoDeserializer; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.exception.DaggerConfigurationException; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.depot.metrics.StatsDReporter; -import io.odpf.stencil.client.StencilClient; +package com.gotocompany.dagger.core.deserializer; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.json.deserialization.JsonDeserializer; +import com.gotocompany.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; +import com.gotocompany.dagger.common.serde.proto.deserialization.ProtoDeserializer; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.depot.metrics.StatsDReporter; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.stencil.config.StencilConfig; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; @@ -30,6 +31,9 @@ public class DaggerDeserializerFactoryTest { @Mock private StreamConfig streamConfig; + @Mock + private StencilConfig stencilConfig; + @Mock private Configuration configuration; @@ -65,7 +69,8 @@ public void shouldReturnProtoDeserializerWhenConfigured() { when(streamConfig.getProtoClass()).thenReturn("com.tests.TestMessage"); when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); when(stencilClient.get("com.tests.TestMessage")).thenReturn(TestBookingLogMessage.getDescriptor()); - + when(stencilConfig.getCacheAutoRefresh()).thenReturn(false); + when(stencilClientOrchestrator.createStencilConfig()).thenReturn(stencilConfig); DaggerDeserializer daggerDeserializer = DaggerDeserializerFactory.create(streamConfig, configuration, stencilClientOrchestrator, statsDReporterSupplierMock); assertTrue(daggerDeserializer instanceof ProtoDeserializer); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/deserializer/JsonDeserializerProviderTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/JsonDeserializerProviderTest.java similarity index 87% rename from dagger-core/src/test/java/io/odpf/dagger/core/deserializer/JsonDeserializerProviderTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/JsonDeserializerProviderTest.java index 2bfd5a589..9698752d2 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/deserializer/JsonDeserializerProviderTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/JsonDeserializerProviderTest.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.deserializer; - -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.json.deserialization.JsonDeserializer; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.config.StreamConfig; +package com.gotocompany.dagger.core.deserializer; + +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.json.deserialization.JsonDeserializer; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/deserializer/ProtoDeserializerProviderTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/ProtoDeserializerProviderTest.java similarity index 78% rename from dagger-core/src/test/java/io/odpf/dagger/core/deserializer/ProtoDeserializerProviderTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/ProtoDeserializerProviderTest.java index dfe836d7d..61b9c9a9f 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/deserializer/ProtoDeserializerProviderTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/ProtoDeserializerProviderTest.java @@ -1,15 +1,16 @@ -package io.odpf.dagger.core.deserializer; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.proto.deserialization.ProtoDeserializer; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.stencil.client.StencilClient; +package com.gotocompany.dagger.core.deserializer; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.proto.deserialization.ProtoDeserializer; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.stencil.config.StencilConfig; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; @@ -23,6 +24,9 @@ public class ProtoDeserializerProviderTest { @Mock private StreamConfig streamConfig; + @Mock + private StencilConfig stencilConfig; + @Mock private Configuration configuration; @@ -84,6 +88,8 @@ public void shouldReturnProtoDeserializerForSupportedSourceNameAndSchemaType() { when(streamConfig.getEventTimestampFieldIndex()).thenReturn("5"); when(streamConfig.getProtoClass()).thenReturn("com.tests.TestMessage"); when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); + when(stencilConfig.getCacheAutoRefresh()).thenReturn(false); + when(stencilClientOrchestrator.createStencilConfig()).thenReturn(stencilConfig); when(stencilClient.get("com.tests.TestMessage")).thenReturn(TestBookingLogMessage.getDescriptor()); ProtoDeserializerProvider provider = new ProtoDeserializerProvider(streamConfig, configuration, stencilClientOrchestrator); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/deserializer/SimpleGroupDeserializerProviderTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/SimpleGroupDeserializerProviderTest.java similarity index 81% rename from dagger-core/src/test/java/io/odpf/dagger/core/deserializer/SimpleGroupDeserializerProviderTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/SimpleGroupDeserializerProviderTest.java index 09c856b92..ad50f7c67 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/deserializer/SimpleGroupDeserializerProviderTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/deserializer/SimpleGroupDeserializerProviderTest.java @@ -1,15 +1,15 @@ -package io.odpf.dagger.core.deserializer; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.stencil.client.StencilClient; +package com.gotocompany.dagger.core.deserializer; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.stencil.client.StencilClient; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/ErrorReporterFactoryTest.java similarity index 80% rename from dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/ErrorReporterFactoryTest.java index 1b7103405..927a55214 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/ErrorReporterFactoryTest.java @@ -1,16 +1,14 @@ -package io.odpf.dagger.core.metrics.reporters; +package com.gotocompany.dagger.core.metrics.reporters; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.metrics.MetricGroup; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_ENABLE_KEY; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; @@ -30,7 +28,7 @@ public class ErrorReporterFactoryTest { public void setup() { initMocks(this); when(configuration.getLong(Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT)).thenReturn(0L); - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); + when(configuration.getBoolean(Constants.METRIC_TELEMETRY_ENABLE_KEY, Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); } @Test diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporterTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/ErrorStatsReporterTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporterTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/ErrorStatsReporterTest.java index f15e51c09..a46b567b9 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporterTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/ErrorStatsReporterTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.reporters; +package com.gotocompany.dagger.core.metrics.reporters; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.metrics.Counter; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerMetricsConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerMetricsConfigTest.java similarity index 97% rename from dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerMetricsConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerMetricsConfigTest.java index bbc5c5414..f78d3c9b4 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerMetricsConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerMetricsConfigTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.reporters.statsd; +package com.gotocompany.dagger.core.metrics.reporters.statsd; import org.apache.flink.configuration.ConfigOption; import org.apache.flink.configuration.ConfigOptions; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerStatsDReporterTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerStatsDReporterTest.java similarity index 92% rename from dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerStatsDReporterTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerStatsDReporterTest.java index ac8f2b3f0..4f075a536 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/DaggerStatsDReporterTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/DaggerStatsDReporterTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.reporters.statsd; +package com.gotocompany.dagger.core.metrics.reporters.statsd; import org.apache.flink.configuration.Configuration; import org.junit.Before; @@ -21,7 +21,7 @@ public class DaggerStatsDReporterTest { private Configuration flinkConfiguration; @Mock - private io.odpf.dagger.common.configuration.Configuration daggerConfiguration; + private com.gotocompany.dagger.common.configuration.Configuration daggerConfiguration; @Before public void setup() throws IOException { diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/StatsDErrorReporterTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/StatsDErrorReporterTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/StatsDErrorReporterTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/StatsDErrorReporterTest.java index e8def56b7..d2ce518b0 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/StatsDErrorReporterTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/StatsDErrorReporterTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.metrics.reporters.statsd; +package com.gotocompany.dagger.core.metrics.reporters.statsd; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.depot.metrics.StatsDReporter; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManagerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManagerTest.java similarity index 53% rename from dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManagerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManagerTest.java index 49e84bf81..533580ecc 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManagerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerCounterManagerTest.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.manager; +package com.gotocompany.dagger.core.metrics.reporters.statsd.manager; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.core.metrics.aspects.ParquetReaderAspects; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; +import com.gotocompany.depot.metrics.StatsDReporter; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; -import static io.odpf.dagger.core.metrics.aspects.ParquetReaderAspects.READER_CLOSED; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.MockitoAnnotations.initMocks; @@ -27,18 +27,18 @@ public void setup() { public void shouldIncrementCounterMeasurement() { DaggerCounterManager daggerCounterManager = new DaggerCounterManager(statsDReporterSupplier); - daggerCounterManager.increment(READER_CLOSED); + daggerCounterManager.increment(ParquetReaderAspects.READER_CLOSED); - verify(statsDReporter, times(1)).captureCount(READER_CLOSED.getValue(), 1L, (String[]) null); + verify(statsDReporter, times(1)).captureCount(ParquetReaderAspects.READER_CLOSED.getValue(), 1L, (String[]) null); } @Test public void shouldIncrementCounterMeasurementWithDelta() { DaggerCounterManager daggerCounterManager = new DaggerCounterManager(statsDReporterSupplier); - daggerCounterManager.increment(READER_CLOSED, 5L); + daggerCounterManager.increment(ParquetReaderAspects.READER_CLOSED, 5L); - verify(statsDReporter, times(1)).captureCount(READER_CLOSED.getValue(), 5L, (String[]) null); + verify(statsDReporter, times(1)).captureCount(ParquetReaderAspects.READER_CLOSED.getValue(), 5L, (String[]) null); } @Test @@ -46,27 +46,27 @@ public void shouldIncrementCounterMeasurementWithRegisteredTags() { DaggerCounterManager daggerCounterManager = new DaggerCounterManager(statsDReporterSupplier); daggerCounterManager.register(new StatsDTag[]{new StatsDTag("tag1", "value1"), new StatsDTag("tag2", "value2")}); - daggerCounterManager.increment(READER_CLOSED); + daggerCounterManager.increment(ParquetReaderAspects.READER_CLOSED); - verify(statsDReporter, times(1)).captureCount(READER_CLOSED.getValue(), 1L, "tag1=value1", "tag2=value2"); + verify(statsDReporter, times(1)).captureCount(ParquetReaderAspects.READER_CLOSED.getValue(), 1L, "tag1=value1", "tag2=value2"); } @Test public void shouldDecrementCounterMeasurement() { DaggerCounterManager daggerCounterManager = new DaggerCounterManager(statsDReporterSupplier); - daggerCounterManager.decrement(READER_CLOSED); + daggerCounterManager.decrement(ParquetReaderAspects.READER_CLOSED); - verify(statsDReporter, times(1)).captureCount(READER_CLOSED.getValue(), -1L, (String[]) null); + verify(statsDReporter, times(1)).captureCount(ParquetReaderAspects.READER_CLOSED.getValue(), -1L, (String[]) null); } @Test public void shouldDecrementCounterMeasurementWithDelta() { DaggerCounterManager daggerCounterManager = new DaggerCounterManager(statsDReporterSupplier); - daggerCounterManager.decrement(READER_CLOSED, -5L); + daggerCounterManager.decrement(ParquetReaderAspects.READER_CLOSED, -5L); - verify(statsDReporter, times(1)).captureCount(READER_CLOSED.getValue(), -5L, (String[]) null); + verify(statsDReporter, times(1)).captureCount(ParquetReaderAspects.READER_CLOSED.getValue(), -5L, (String[]) null); } @Test @@ -74,8 +74,8 @@ public void shouldDecrementCounterMeasurementWithRegisteredTags() { DaggerCounterManager daggerCounterManager = new DaggerCounterManager(statsDReporterSupplier); daggerCounterManager.register(new StatsDTag[]{new StatsDTag("tag1", "value1"), new StatsDTag("tag2", "value2")}); - daggerCounterManager.decrement(READER_CLOSED); + daggerCounterManager.decrement(ParquetReaderAspects.READER_CLOSED); - verify(statsDReporter, times(1)).captureCount(READER_CLOSED.getValue(), -1L, "tag1=value1", "tag2=value2"); + verify(statsDReporter, times(1)).captureCount(ParquetReaderAspects.READER_CLOSED.getValue(), -1L, "tag1=value1", "tag2=value2"); } } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManagerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManagerTest.java similarity index 50% rename from dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManagerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManagerTest.java index 9658cf87f..93d095ff9 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManagerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerGaugeManagerTest.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.manager; +package com.gotocompany.dagger.core.metrics.reporters.statsd.manager; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.core.metrics.aspects.ChronologyOrderedSplitAssignerAspects; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; +import com.gotocompany.depot.metrics.StatsDReporter; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; -import static io.odpf.dagger.core.metrics.aspects.ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_DISCOVERED; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.MockitoAnnotations.initMocks; @@ -27,9 +27,9 @@ public void setup() { public void shouldMarkGaugeValue() { DaggerGaugeManager daggerGaugeManager = new DaggerGaugeManager(statsDReporterSupplier); - daggerGaugeManager.markValue(TOTAL_SPLITS_DISCOVERED, 1000); + daggerGaugeManager.markValue(ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_DISCOVERED, 1000); - verify(statsDReporter, times(1)).gauge(TOTAL_SPLITS_DISCOVERED.getValue(), 1000, (String[]) null); + verify(statsDReporter, times(1)).gauge(ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_DISCOVERED.getValue(), 1000, (String[]) null); } @Test @@ -37,8 +37,8 @@ public void shouldMarkGaugeValueWithRegisteredTags() { DaggerGaugeManager daggerGaugeManager = new DaggerGaugeManager(statsDReporterSupplier); daggerGaugeManager.register(new StatsDTag[]{new StatsDTag("tag1", "value1"), new StatsDTag("tag2", "value2")}); - daggerGaugeManager.markValue(TOTAL_SPLITS_DISCOVERED, 1000); + daggerGaugeManager.markValue(ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_DISCOVERED, 1000); - verify(statsDReporter, times(1)).gauge(TOTAL_SPLITS_DISCOVERED.getValue(), 1000, "tag1=value1", "tag2=value2"); + verify(statsDReporter, times(1)).gauge(ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_DISCOVERED.getValue(), 1000, "tag1=value1", "tag2=value2"); } } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManagerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManagerTest.java similarity index 53% rename from dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManagerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManagerTest.java index 571c6be6e..59083f1df 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManagerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/manager/DaggerHistogramManagerTest.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.manager; +package com.gotocompany.dagger.core.metrics.reporters.statsd.manager; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.metrics.reporters.statsd.tags.StatsDTag; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.core.metrics.aspects.ParquetReaderAspects; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.metrics.reporters.statsd.tags.StatsDTag; +import com.gotocompany.depot.metrics.StatsDReporter; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; -import static io.odpf.dagger.core.metrics.aspects.ParquetReaderAspects.READER_ROW_READ_TIME; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.MockitoAnnotations.initMocks; @@ -27,9 +27,9 @@ public void setup() { public void shouldRecordHistogramValue() { DaggerHistogramManager daggerHistogramManager = new DaggerHistogramManager(statsDReporterSupplier); - daggerHistogramManager.recordValue(READER_ROW_READ_TIME, 5L); + daggerHistogramManager.recordValue(ParquetReaderAspects.READER_ROW_READ_TIME, 5L); - verify(statsDReporter, times(1)).captureHistogram(READER_ROW_READ_TIME.getValue(), 5L, (String[]) null); + verify(statsDReporter, times(1)).captureHistogram(ParquetReaderAspects.READER_ROW_READ_TIME.getValue(), 5L, (String[]) null); } @Test @@ -37,8 +37,8 @@ public void shouldRecordHistogramValueWithRegisteredTags() { DaggerHistogramManager daggerHistogramManager = new DaggerHistogramManager(statsDReporterSupplier); daggerHistogramManager.register(new StatsDTag[]{new StatsDTag("tag1", "value1"), new StatsDTag("tag2", "value2")}); - daggerHistogramManager.recordValue(READER_ROW_READ_TIME, 6L); + daggerHistogramManager.recordValue(ParquetReaderAspects.READER_ROW_READ_TIME, 6L); - verify(statsDReporter, times(1)).captureHistogram(READER_ROW_READ_TIME.getValue(), 6L, "tag1=value1", "tag2=value2"); + verify(statsDReporter, times(1)).captureHistogram(ParquetReaderAspects.READER_ROW_READ_TIME.getValue(), 6L, "tag1=value1", "tag2=value2"); } } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/StatsDTagTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/StatsDTagTest.java similarity index 95% rename from dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/StatsDTagTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/StatsDTagTest.java index 1db97bd8d..c53c2e740 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/statsd/tags/StatsDTagTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/metrics/reporters/statsd/tags/StatsDTagTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.metrics.reporters.statsd.tags; +package com.gotocompany.dagger.core.metrics.reporters.statsd.tags; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/ParentPostProcessorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/ParentPostProcessorTest.java similarity index 88% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/ParentPostProcessorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/ParentPostProcessorTest.java index 0125fe382..fa9bc6616 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/ParentPostProcessorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/ParentPostProcessorTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; -import io.odpf.dagger.common.core.DaggerContextTestBase; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/PostProcessorConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PostProcessorConfigTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/PostProcessorConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PostProcessorConfigTest.java index d09909857..d00366485 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/PostProcessorConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PostProcessorConfigTest.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; +import com.gotocompany.dagger.core.processors.common.OutputMapping; import com.jayway.jsonpath.InvalidJsonException; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.external.ExternalSourceConfig; -import io.odpf.dagger.core.processors.external.es.EsSourceConfig; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; -import io.odpf.dagger.core.processors.external.pg.PgSourceConfig; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.transformers.TransformConfig; +import com.gotocompany.dagger.core.processors.external.ExternalSourceConfig; +import com.gotocompany.dagger.core.processors.external.es.EsSourceConfig; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.processors.external.pg.PgSourceConfig; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.transformers.TransformConfig; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -64,7 +64,7 @@ public void shouldReturnHttpExternalSourceConfig() { outputMapping = new OutputMapping("$.data.tensor.values[0]"); outputMappings.put("surge_factor", outputMapping); - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8000", "", "post", null, null, null, null, "5000", "5000", true, null, null, headerMap, outputMappings, null, false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8000", "", "post", null, null, null, null, "5000", "5000", true, null, null, null, headerMap, outputMappings, null, false); assertEquals(httpSourceConfig, defaultPostProcessorConfig.getExternalSource().getHttpConfig().get(0)); } @@ -120,7 +120,7 @@ public void shouldBeEmptyWhenNoneOfTheConfigsExist() { @Test public void shouldNotBeEmptyWhenExternalSourceHasHttpConfigExist() { ArrayList http = new ArrayList<>(); - http.add(new HttpSourceConfig("", "", "", "", "", "", "", "", "", false, "", "", new HashMap<>(), new HashMap<>(), "metricId_01", false)); + http.add(new HttpSourceConfig("", "", "", "", "", "", "", "", "", false, null, "", "", new HashMap<>(), new HashMap<>(), "metricId_01", false)); ArrayList es = new ArrayList<>(); ArrayList pg = new ArrayList<>(); ExternalSourceConfig externalSourceConfig = new ExternalSourceConfig(http, es, pg, new ArrayList<>()); @@ -258,14 +258,14 @@ public void shouldBeFalseWhenTransformerSourceDoesNotExists() { @Test public void shouldReturnTrueForHasSQLTransformerIfTransformConfigContainsSqlTransformer() { - String configuration = "{ \"external_source\": { \"es\": [ { \"host\": \"localhost:9200\", \"output_mapping\": { \"customer_profile\": { \"path\": \"$._source\" } }, \"query_param_pattern\": \"/customers/customer/%s\", \"query_param_variables\": \"customer_id\", \"retry_timeout\": \"5000\", \"socket_timeout\": \"6000\", \"stream_timeout\": \"5000\", \"type\": \"TestLogMessage\" } ], \"http\": [ { \"body_column_from_sql\": \"request_body\", \"connect_timeout\": \"5000\", \"endpoint\": \"http://localhost:8000\", \"fail_on_errors\": \"true\", \"headers\": { \"content-type\": \"application/json\" }, \"output_mapping\": { \"surge_factor\": { \"path\": \"$.data.tensor.values[0]\" } }, \"stream_timeout\": \"5000\", \"verb\": \"post\" } ] }, \"internal_source\":[ { \"output_field\": \"event_timestamp\", \"value\": \"CURRENT_TIMESTAMP\", \"type\": \"function\" }, { \"output_field\": \"s2_id_level\", \"value\": \"7\", \"type\": \"constant\" } ], \"transformers\": [ { \"transformation_arguments\": { \"sqlQuery\": \"SELECT * from data_stream\" }, \"transformation_class\": \"io.odpf.dagger.functions.transformers.SQLTransformer\" } ] }"; + String configuration = "{ \"external_source\": { \"es\": [ { \"host\": \"localhost:9200\", \"output_mapping\": { \"customer_profile\": { \"path\": \"$._source\" } }, \"query_param_pattern\": \"/customers/customer/%s\", \"query_param_variables\": \"customer_id\", \"retry_timeout\": \"5000\", \"socket_timeout\": \"6000\", \"stream_timeout\": \"5000\", \"type\": \"TestLogMessage\" } ], \"http\": [ { \"body_column_from_sql\": \"request_body\", \"connect_timeout\": \"5000\", \"endpoint\": \"http://localhost:8000\", \"fail_on_errors\": \"true\", \"headers\": { \"content-type\": \"application/json\" }, \"output_mapping\": { \"surge_factor\": { \"path\": \"$.data.tensor.values[0]\" } }, \"stream_timeout\": \"5000\", \"verb\": \"post\" } ] }, \"internal_source\":[ { \"output_field\": \"event_timestamp\", \"value\": \"CURRENT_TIMESTAMP\", \"type\": \"function\" }, { \"output_field\": \"s2_id_level\", \"value\": \"7\", \"type\": \"constant\" } ], \"transformers\": [ { \"transformation_arguments\": { \"sqlQuery\": \"SELECT * from data_stream\" }, \"transformation_class\": \"SQLTransformer\" } ] }"; PostProcessorConfig postProcessorConfig = PostProcessorConfig.parse(configuration); assertTrue(postProcessorConfig.hasSQLTransformer()); } @Test public void shouldNotReturnTrueForHasSQLTransformerIfTransformConfigDoesNotContainSqlTransformer() { - String configuration = "{ \"external_source\": { \"es\": [ { \"host\": \"localhost:9200\", \"output_mapping\": { \"customer_profile\": { \"path\": \"$._source\" } }, \"query_param_pattern\": \"/customers/customer/%s\", \"query_param_variables\": \"customer_id\", \"retry_timeout\": \"5000\", \"socket_timeout\": \"6000\", \"stream_timeout\": \"5000\", \"type\": \"TestLogMessage\" } ], \"http\": [ { \"body_column_from_sql\": \"request_body\", \"connect_timeout\": \"5000\", \"endpoint\": \"http://localhost:8000\", \"fail_on_errors\": \"true\", \"headers\": { \"content-type\": \"application/json\" }, \"output_mapping\": { \"surge_factor\": { \"path\": \"$.data.tensor.values[0]\" } }, \"stream_timeout\": \"5000\", \"verb\": \"post\" } ] }, \"internal_source\":[ { \"output_field\": \"event_timestamp\", \"value\": \"CURRENT_TIMESTAMP\", \"type\": \"function\" }, { \"output_field\": \"s2_id_level\", \"value\": \"7\", \"type\": \"constant\" } ], \"transformers\": [ { \"transformation_arguments\": { \"sqlQuery\": \"SELECT * from data_stream\" }, \"transformation_class\": \"io.odpf.dagger.transformer.DeDuplicationTransformer\" } ] }"; + String configuration = "{ \"external_source\": { \"es\": [ { \"host\": \"localhost:9200\", \"output_mapping\": { \"customer_profile\": { \"path\": \"$._source\" } }, \"query_param_pattern\": \"/customers/customer/%s\", \"query_param_variables\": \"customer_id\", \"retry_timeout\": \"5000\", \"socket_timeout\": \"6000\", \"stream_timeout\": \"5000\", \"type\": \"TestLogMessage\" } ], \"http\": [ { \"body_column_from_sql\": \"request_body\", \"connect_timeout\": \"5000\", \"endpoint\": \"http://localhost:8000\", \"fail_on_errors\": \"true\", \"headers\": { \"content-type\": \"application/json\" }, \"output_mapping\": { \"surge_factor\": { \"path\": \"$.data.tensor.values[0]\" } }, \"stream_timeout\": \"5000\", \"verb\": \"post\" } ] }, \"internal_source\":[ { \"output_field\": \"event_timestamp\", \"value\": \"CURRENT_TIMESTAMP\", \"type\": \"function\" }, { \"output_field\": \"s2_id_level\", \"value\": \"7\", \"type\": \"constant\" } ], \"transformers\": [ { \"transformation_arguments\": { \"sqlQuery\": \"SELECT * from data_stream\" }, \"transformation_class\": \"com.gotocompany.dagger.transformer.DeDuplicationTransformer\" } ] }"; defaultPostProcessorConfig = PostProcessorConfig.parse(configuration); assertFalse(defaultPostProcessorConfig.hasSQLTransformer()); } @@ -279,7 +279,7 @@ public void shouldNotReturnTrueForHasSQLTransformerIfTransformConfigDoesNotExist @Test public void shouldReturnTrueForHasSQLTransformerIfAnyOneTransformConfigContainsSQLTransformer() { - String configuration = "{ \"external_source\": { \"es\": [ { \"host\": \"localhost:9200\", \"output_mapping\": { \"customer_profile\": { \"path\": \"$._source\" } }, \"query_param_pattern\": \"/customers/customer/%s\", \"query_param_variables\": \"customer_id\", \"retry_timeout\": \"5000\", \"socket_timeout\": \"6000\", \"stream_timeout\": \"5000\", \"type\": \"TestLogMessage\" } ], \"http\": [ { \"body_column_from_sql\": \"request_body\", \"connect_timeout\": \"5000\", \"endpoint\": \"http://localhost:8000\", \"fail_on_errors\": \"true\", \"headers\": { \"content-type\": \"application/json\" }, \"output_mapping\": { \"surge_factor\": { \"path\": \"$.data.tensor.values[0]\" } }, \"stream_timeout\": \"5000\", \"verb\": \"post\" } ] }, \"internal_source\":[ { \"output_field\": \"event_timestamp\", \"value\": \"CURRENT_TIMESTAMP\", \"type\": \"function\" }, { \"output_field\": \"s2_id_level\", \"value\": \"7\", \"type\": \"constant\" } ], \"transformers\": [ { \"transformation_arguments\": { \"sqlQuery\": \"SELECT * from data_stream\" }, \"transformation_class\": \"io.odpf.dagger.functions.transformers.SQLTransformer\" }, { \"transformation_arguments\": { \"arg1\": \"test\" }, \"transformation_class\": \"io.odpf.dagger.transformer.Test\" } ] }"; + String configuration = "{ \"external_source\": { \"es\": [ { \"host\": \"localhost:9200\", \"output_mapping\": { \"customer_profile\": { \"path\": \"$._source\" } }, \"query_param_pattern\": \"/customers/customer/%s\", \"query_param_variables\": \"customer_id\", \"retry_timeout\": \"5000\", \"socket_timeout\": \"6000\", \"stream_timeout\": \"5000\", \"type\": \"TestLogMessage\" } ], \"http\": [ { \"body_column_from_sql\": \"request_body\", \"connect_timeout\": \"5000\", \"endpoint\": \"http://localhost:8000\", \"fail_on_errors\": \"true\", \"headers\": { \"content-type\": \"application/json\" }, \"output_mapping\": { \"surge_factor\": { \"path\": \"$.data.tensor.values[0]\" } }, \"stream_timeout\": \"5000\", \"verb\": \"post\" } ] }, \"internal_source\":[ { \"output_field\": \"event_timestamp\", \"value\": \"CURRENT_TIMESTAMP\", \"type\": \"function\" }, { \"output_field\": \"s2_id_level\", \"value\": \"7\", \"type\": \"constant\" } ], \"transformers\": [ { \"transformation_arguments\": { \"sqlQuery\": \"SELECT * from data_stream\" }, \"transformation_class\": \"SQLTransformer\" }, { \"transformation_arguments\": { \"arg1\": \"test\" }, \"transformation_class\": \"com.gotocompany.dagger.transformer.Test\" } ] }"; defaultPostProcessorConfig = PostProcessorConfig.parse(configuration); assertTrue(defaultPostProcessorConfig.hasSQLTransformer()); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/PostProcessorFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PostProcessorFactoryTest.java similarity index 61% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/PostProcessorFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PostProcessorFactoryTest.java index 54d2478a5..f7e714517 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/PostProcessorFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PostProcessorFactoryTest.java @@ -1,19 +1,19 @@ -package io.odpf.dagger.core.processors; - -import io.odpf.dagger.common.core.DaggerContextTestBase; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.processors.longbow.LongbowProcessor; -import io.odpf.dagger.core.processors.telemetry.TelemetryProcessor; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.PostProcessor; +package com.gotocompany.dagger.core.processors; + +import com.gotocompany.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowProcessor; +import com.gotocompany.dagger.core.processors.telemetry.TelemetryProcessor; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import java.util.List; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.core.utils.Constants.*; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; @@ -52,8 +52,8 @@ public void setup() { @Test public void shouldReturnLongbowProcessor() { columnNames = new String[]{"longbow_key", "longbow_data", "event_timestamp", "rowtime", "longbow_duration"}; - when(configuration.getString(FLINK_SQL_QUERY_KEY, FLINK_SQL_QUERY_DEFAULT)).thenReturn("select a as `longbow_key` from l"); - when(configuration.getBoolean(PROCESSOR_POSTPROCESSOR_ENABLE_KEY, PROCESSOR_POSTPROCESSOR_ENABLE_DEFAULT)).thenReturn(false); + when(configuration.getString(Constants.FLINK_SQL_QUERY_KEY, Constants.FLINK_SQL_QUERY_DEFAULT)).thenReturn("select a as `longbow_key` from l"); + when(configuration.getBoolean(Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY, Constants.PROCESSOR_POSTPROCESSOR_ENABLE_DEFAULT)).thenReturn(false); when(configuration.getString(INPUT_STREAMS, "")).thenReturn(jsonArray); List postProcessors = PostProcessorFactory.getPostProcessors(daggerContext, stencilClientOrchestrator, columnNames, metricsTelemetryExporter); @@ -64,8 +64,8 @@ public void shouldReturnLongbowProcessor() { @Test public void shouldReturnParentPostProcessor() { - when(configuration.getString(FLINK_SQL_QUERY_KEY, FLINK_SQL_QUERY_DEFAULT)).thenReturn("test-sql"); - when(configuration.getBoolean(PROCESSOR_POSTPROCESSOR_ENABLE_KEY, PROCESSOR_POSTPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); + when(configuration.getString(Constants.FLINK_SQL_QUERY_KEY, Constants.FLINK_SQL_QUERY_DEFAULT)).thenReturn("test-sql"); + when(configuration.getBoolean(Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY, Constants.PROCESSOR_POSTPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); List postProcessors = PostProcessorFactory.getPostProcessors(daggerContext, stencilClientOrchestrator, columnNames, metricsTelemetryExporter); @@ -75,9 +75,9 @@ public void shouldReturnParentPostProcessor() { @Test public void shouldReturnTelemetryPostProcessor() { - when(configuration.getString(FLINK_SQL_QUERY_KEY, FLINK_SQL_QUERY_DEFAULT)).thenReturn("test-sql"); - when(configuration.getBoolean(PROCESSOR_POSTPROCESSOR_ENABLE_KEY, PROCESSOR_POSTPROCESSOR_ENABLE_DEFAULT)).thenReturn(false); - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); + when(configuration.getString(Constants.FLINK_SQL_QUERY_KEY, Constants.FLINK_SQL_QUERY_DEFAULT)).thenReturn("test-sql"); + when(configuration.getBoolean(Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY, Constants.PROCESSOR_POSTPROCESSOR_ENABLE_DEFAULT)).thenReturn(false); + when(configuration.getBoolean(Constants.METRIC_TELEMETRY_ENABLE_KEY, Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); List postProcessors = PostProcessorFactory.getPostProcessors(daggerContext, stencilClientOrchestrator, columnNames, metricsTelemetryExporter); @@ -87,8 +87,8 @@ public void shouldReturnTelemetryPostProcessor() { @Test public void shouldNotReturnAnyPostProcessor() { - when(configuration.getString(FLINK_SQL_QUERY_KEY, FLINK_SQL_QUERY_DEFAULT)).thenReturn("test-sql"); - when(configuration.getBoolean(PROCESSOR_POSTPROCESSOR_ENABLE_KEY, PROCESSOR_POSTPROCESSOR_ENABLE_DEFAULT)).thenReturn(false); + when(configuration.getString(Constants.FLINK_SQL_QUERY_KEY, Constants.FLINK_SQL_QUERY_DEFAULT)).thenReturn("test-sql"); + when(configuration.getBoolean(Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY, Constants.PROCESSOR_POSTPROCESSOR_ENABLE_DEFAULT)).thenReturn(false); List postProcessors = PostProcessorFactory.getPostProcessors(daggerContext, stencilClientOrchestrator, columnNames, metricsTelemetryExporter); assertEquals(0, postProcessors.size()); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/PreProcessorFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PreProcessorFactoryTest.java similarity index 66% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/PreProcessorFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PreProcessorFactoryTest.java index 45a92edd9..1a60b6be5 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/PreProcessorFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PreProcessorFactoryTest.java @@ -1,17 +1,15 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; -import io.odpf.dagger.common.core.DaggerContextTestBase; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.Preprocessor; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.core.processors.types.Preprocessor; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import java.util.List; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_PREPROCESSOR_CONFIG_KEY; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_PREPROCESSOR_ENABLE_KEY; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; @@ -49,8 +47,8 @@ public void setup() { @Test public void shouldReturnPreProcessors() { - when(configuration.getBoolean(PROCESSOR_PREPROCESSOR_ENABLE_KEY, PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); - when(configuration.getString(PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn(preProcessorConfigJson); + when(configuration.getBoolean(Constants.PROCESSOR_PREPROCESSOR_ENABLE_KEY, Constants.PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); + when(configuration.getString(Constants.PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn(preProcessorConfigJson); List preProcessors = PreProcessorFactory.getPreProcessors(daggerContext, "booking", metricsTelemetryExporter); assertEquals(1, preProcessors.size()); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/PreProcessorOrchestratorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PreProcessorOrchestratorTest.java similarity index 72% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/PreProcessorOrchestratorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PreProcessorOrchestratorTest.java index 635bd84b5..d959ba8d7 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/PreProcessorOrchestratorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/PreProcessorOrchestratorTest.java @@ -1,17 +1,17 @@ -package io.odpf.dagger.core.processors; +package com.gotocompany.dagger.core.processors; +import com.gotocompany.dagger.core.processors.types.Preprocessor; +import com.gotocompany.dagger.core.utils.Constants; import com.jayway.jsonpath.InvalidJsonException; -import io.odpf.dagger.common.core.DaggerContextTestBase; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.transformers.TableTransformConfig; -import io.odpf.dagger.core.processors.transformers.TransformConfig; -import io.odpf.dagger.core.processors.transformers.TransformProcessor; -import io.odpf.dagger.core.processors.types.Preprocessor; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.processors.transformers.TableTransformConfig; +import com.gotocompany.dagger.core.processors.transformers.TransformConfig; +import com.gotocompany.dagger.core.processors.transformers.TransformProcessor; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -22,8 +22,6 @@ import java.util.HashMap; import java.util.List; -import static io.odpf.dagger.core.utils.Constants.*; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_PREPROCESSOR_CONFIG_KEY; import static org.junit.Assert.*; import static org.junit.Assert.assertNull; import static org.mockito.Mockito.when; @@ -81,7 +79,7 @@ public void shouldGetProcessors() { transformConfigs.add(new TransformConfig("InvalidRecordFilterTransformer", new HashMap<>())); TableTransformConfig ttc = new TableTransformConfig("test", transformConfigs); config.tableTransformers = Collections.singletonList(ttc); - when(configuration.getString(PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn(preProcessorFilterConfigJson); + when(configuration.getString(Constants.PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn(preProcessorFilterConfigJson); when(configuration.getBoolean(Constants.PROCESSOR_PREPROCESSOR_ENABLE_KEY, Constants.PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); PreProcessorOrchestrator ppo = new PreProcessorOrchestrator(daggerContext, exporter, "test"); Mockito.when(streamInfo.getColumnNames()).thenReturn(new String[0]); @@ -106,8 +104,8 @@ public void shouldNotGetProcessors() { @Test public void shouldNotNullConfig() { - when(configuration.getBoolean(PROCESSOR_PREPROCESSOR_ENABLE_KEY, PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); - when(configuration.getString(PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn(preProcessorConfigJson); + when(configuration.getBoolean(Constants.PROCESSOR_PREPROCESSOR_ENABLE_KEY, Constants.PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); + when(configuration.getString(Constants.PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn(preProcessorConfigJson); PreProcessorOrchestrator ppo = new PreProcessorOrchestrator(daggerContext, exporter, "test"); PreProcessorConfig preProcessorConfig = ppo.parseConfig(configuration); assertNotNull(preProcessorConfig); @@ -115,8 +113,8 @@ public void shouldNotNullConfig() { @Test public void shouldParseConfig() { - when(configuration.getBoolean(PROCESSOR_PREPROCESSOR_ENABLE_KEY, PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); - when(configuration.getString(PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn(preProcessorConfigJson); + when(configuration.getBoolean(Constants.PROCESSOR_PREPROCESSOR_ENABLE_KEY, Constants.PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); + when(configuration.getString(Constants.PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn(preProcessorConfigJson); PreProcessorOrchestrator ppo = new PreProcessorOrchestrator(daggerContext, exporter, "test"); PreProcessorConfig preProcessorConfig = ppo.parseConfig(configuration); assertEquals(2, preProcessorConfig.getTableTransformers().size()); @@ -126,8 +124,8 @@ public void shouldParseConfig() { @Test public void shouldThrowExceptionForInvalidJson() { - when(configuration.getBoolean(PROCESSOR_PREPROCESSOR_ENABLE_KEY, PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); - when(configuration.getString(PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn("blah"); + when(configuration.getBoolean(Constants.PROCESSOR_PREPROCESSOR_ENABLE_KEY, Constants.PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(true); + when(configuration.getString(Constants.PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn("blah"); InvalidJsonException exception = assertThrows(InvalidJsonException.class, () -> new PreProcessorOrchestrator(daggerContext, exporter, "test")); assertEquals("Invalid JSON Given for PROCESSOR_PREPROCESSOR_CONFIG", exception.getMessage()); @@ -135,8 +133,8 @@ public void shouldThrowExceptionForInvalidJson() { @Test public void shouldNotParseConfigWhenDisabled() { - when(configuration.getBoolean(PROCESSOR_PREPROCESSOR_ENABLE_KEY, PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(false); - when(configuration.getString(PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn(preProcessorConfigJson); + when(configuration.getBoolean(Constants.PROCESSOR_PREPROCESSOR_ENABLE_KEY, Constants.PROCESSOR_PREPROCESSOR_ENABLE_DEFAULT)).thenReturn(false); + when(configuration.getString(Constants.PROCESSOR_PREPROCESSOR_CONFIG_KEY, "")).thenReturn(preProcessorConfigJson); PreProcessorOrchestrator ppo = new PreProcessorOrchestrator(daggerContext, exporter, "test"); PreProcessorConfig preProcessorConfig = ppo.parseConfig(configuration); assertNull(preProcessorConfig); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/DescriptorManagerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/DescriptorManagerTest.java similarity index 75% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/common/DescriptorManagerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/DescriptorManagerTest.java index f0f1a3482..bba77f9d8 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/DescriptorManagerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/DescriptorManagerTest.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.stencil.StencilClientFactory; -import io.odpf.stencil.client.StencilClient; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.stencil.StencilClientFactory; +import com.gotocompany.stencil.client.StencilClient; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -13,7 +13,8 @@ import java.util.Collections; import java.util.List; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; @@ -35,7 +36,7 @@ public void shouldReturnValidDescriptors() { when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); DescriptorManager descriptorManager = new DescriptorManager(stencilClientOrchestrator); Descriptors.Descriptor descriptor = descriptorManager - .getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + .getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); assertEquals(TestBookingLogMessage.getDescriptor(), descriptor); } @@ -46,7 +47,7 @@ public void shouldReturnValidDescriptorsInCaseOfEnrichment() { when(stencilClientOrchestrator.enrichStencilClient(grpcSpecificStencilURLs)).thenReturn(stencilClient); DescriptorManager descriptorManager = new DescriptorManager(stencilClientOrchestrator, grpcSpecificStencilURLs); Descriptors.Descriptor descriptor = descriptorManager - .getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + .getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); assertEquals(TestBookingLogMessage.getDescriptor(), descriptor); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/EndpointHandlerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/EndpointHandlerTest.java similarity index 72% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/common/EndpointHandlerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/EndpointHandlerTest.java index 7cb27945e..92203a733 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/EndpointHandlerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/EndpointHandlerTest.java @@ -1,17 +1,17 @@ -package io.odpf.dagger.core.processors.common; - -import io.odpf.dagger.core.utils.Constants.ExternalPostProcessorVariableType; -import io.odpf.stencil.StencilClientFactory; -import io.odpf.stencil.client.StencilClient; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.consumer.TestEnumType; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.types.SourceConfig; +package com.gotocompany.dagger.core.processors.common; + +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.consumer.TestEnumType; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.types.SourceConfig; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.stencil.StencilClientFactory; +import com.gotocompany.stencil.client.StencilClient; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; import org.junit.Before; @@ -55,7 +55,7 @@ public void setup() { initMocks(this); StencilClient stencilClient = StencilClientFactory.getClient(); when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); - inputProtoClasses = new String[] {"io.odpf.dagger.consumer.TestBookingLogMessage"}; + inputProtoClasses = new String[]{"com.gotocompany.dagger.consumer.TestBookingLogMessage"}; descriptorManager = new DescriptorManager(stencilClientOrchestrator); } @@ -71,11 +71,11 @@ public void shouldReturnEndpointQueryVariableValuesForPrimitiveDataFromDescripto RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "customer_id"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "customer_id"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); - assertArrayEquals(endpointOrQueryVariablesValues, new Object[] {"123456"}); + assertArrayEquals(endpointOrQueryVariablesValues, new Object[]{"123456"}); } @Test @@ -90,11 +90,11 @@ public void shouldReturnEndpointQueryVariableValuesForPrimitiveDataIfInputColumn RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "id"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "id"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); - assertArrayEquals(endpointOrQueryVariablesValues, new Object[] {"123456"}); + assertArrayEquals(endpointOrQueryVariablesValues, new Object[]{"123456"}); } @Test @@ -118,11 +118,11 @@ public void shouldReturnJsonValueOfEndpointQueryValuesInCaseOfArray() { RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "test_enums"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "test_enums"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); - assertArrayEquals(new Object[] {"[\"+I[UNKNOWN]\",\"+I[TYPE1]\"]"}, endpointOrQueryVariablesValues); + assertArrayEquals(new Object[]{"[\"+I[UNKNOWN]\",\"+I[TYPE1]\"]"}, endpointOrQueryVariablesValues); } @Test @@ -143,17 +143,17 @@ public void shouldReturnJsonValueOfEndpointQueryValuesIncaseOfComplexDatatype() RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "driver_pickup_location"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "driver_pickup_location"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, "driver_pickup_location", resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, "driver_pickup_location", resultFuture); - assertArrayEquals(endpointOrQueryVariablesValues, new Object[] {"{\"name\":\"test_driver\",\"address\":null,\"latitude\":172.5,\"longitude\":175.5,\"type\":null,\"note\":null,\"place_id\":null,\"accuracy_meter\":null,\"gate_id\":null}"}); + assertArrayEquals(endpointOrQueryVariablesValues, new Object[]{"{\"name\":\"test_driver\",\"address\":null,\"latitude\":172.5,\"longitude\":175.5,\"type\":null,\"note\":null,\"place_id\":null,\"accuracy_meter\":null,\"gate_id\":null}"}); } @Test public void shouldReturnEndpointQueryVariableValuesForPrimitiveDataFromDescriptorInCaseOfMultipleStreams() { when(sourceConfig.getVariables()).thenReturn("customer_id"); - inputProtoClasses = new String[] {"io.odpf.dagger.consumer.TestBookingLogMessage", "io.odpf.dagger.consumer.TestBookingLogMessage"}; + inputProtoClasses = new String[]{"com.gotocompany.dagger.consumer.TestBookingLogMessage", "com.gotocompany.dagger.consumer.TestBookingLogMessage"}; Row row = new Row(2); Row inputData = new Row(2); @@ -163,17 +163,17 @@ public void shouldReturnEndpointQueryVariableValuesForPrimitiveDataFromDescripto RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "customer_id"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "customer_id"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); - assertArrayEquals(endpointOrQueryVariablesValues, new Object[] {"123456"}); + assertArrayEquals(endpointOrQueryVariablesValues, new Object[]{"123456"}); } @Test public void shouldInferEndpointVariablesFromTheCorrectStreams() { when(sourceConfig.getVariables()).thenReturn("order_number,customer_url"); - inputProtoClasses = new String[] {"io.odpf.dagger.consumer.TestBookingLogMessage", "io.odpf.dagger.consumer.TestBookingLogMessage"}; + inputProtoClasses = new String[]{"com.gotocompany.dagger.consumer.TestBookingLogMessage", "com.gotocompany.dagger.consumer.TestBookingLogMessage"}; Row row = new Row(2); Row inputData = new Row(2); @@ -184,17 +184,17 @@ public void shouldInferEndpointVariablesFromTheCorrectStreams() { RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "customer_url"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "customer_url"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); - assertArrayEquals(endpointOrQueryVariablesValues, new Object[] {"test_order_number", "customer_url_test"}); + assertArrayEquals(endpointOrQueryVariablesValues, new Object[]{"test_order_number", "customer_url_test"}); } @Test public void shouldReturnEmptyObjectIfNoQueryVariables() { when(sourceConfig.getVariables()).thenReturn(""); - inputProtoClasses = new String[] {"io.odpf.dagger.consumer.TestBookingLogMessage", "io.odpf.dagger.consumer.TestBookingLogMessage"}; + inputProtoClasses = new String[]{"com.gotocompany.dagger.consumer.TestBookingLogMessage", "com.gotocompany.dagger.consumer.TestBookingLogMessage"}; Row row = new Row(2); Row inputData = new Row(2); @@ -205,17 +205,17 @@ public void shouldReturnEmptyObjectIfNoQueryVariables() { RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "customer_url"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "customer_url"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); - assertArrayEquals(endpointOrQueryVariablesValues, new Object[] {}); + assertArrayEquals(endpointOrQueryVariablesValues, new Object[]{}); } @Test public void shouldThrowErrorIfRequestVariablesAreNotProperlyConfigures() { when(sourceConfig.getVariables()).thenReturn("czx"); - inputProtoClasses = new String[] {"io.odpf.dagger.consumer.TestBookingLogMessage", "io.odpf.dagger.consumer.TestBookingLogMessage"}; + inputProtoClasses = new String[]{"com.gotocompany.dagger.consumer.TestBookingLogMessage", "com.gotocompany.dagger.consumer.TestBookingLogMessage"}; Row row = new Row(2); Row inputData = new Row(2); @@ -226,16 +226,16 @@ public void shouldThrowErrorIfRequestVariablesAreNotProperlyConfigures() { RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "customer_url"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "customer_url"}), descriptorManager); InvalidConfigurationException exception = assertThrows(InvalidConfigurationException.class, () -> endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture)); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture)); assertEquals("Column 'czx' not found as configured in the 'REQUEST_VARIABLES' variable", exception.getMessage()); } @Test public void shouldThrowErrorIfInputProtoNotFound() { when(sourceConfig.getVariables()).thenReturn("driver_pickup_location"); - inputProtoClasses = new String[] {"io.odpf.dagger.consumer.TestBookingLogMessage1"}; + inputProtoClasses = new String[]{"com.gotocompany.dagger.consumer.TestBookingLogMessage1"}; Row row = new Row(2); Row inputData = new Row(2); @@ -251,10 +251,10 @@ public void shouldThrowErrorIfInputProtoNotFound() { RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "driver_pickup_location"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "driver_pickup_location"}), descriptorManager); assertThrows(NullPointerException.class, - () -> endpointHandler.getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture)); + () -> endpointHandler.getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture)); verify(errorReporter, times(1)).reportFatalException(any(DescriptorNotFoundException.class)); verify(resultFuture, times(1)).completeExceptionally(any(DescriptorNotFoundException.class)); } @@ -272,9 +272,9 @@ public void shouldCheckIfQueryIsValid() { RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "customer_id"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "customer_id"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); boolean queryInvalid = endpointHandler.isQueryInvalid(resultFuture, rowManager, sourceConfig.getVariables(), endpointOrQueryVariablesValues); assertFalse(queryInvalid); @@ -293,9 +293,9 @@ public void shouldCheckIfQueryIsInValidInCaseOfSingeEmptyVariableValueForSingleF RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "customer_id"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "customer_id"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); boolean queryInvalid = endpointHandler.isQueryInvalid(resultFuture, rowManager, sourceConfig.getVariables(), endpointOrQueryVariablesValues); assertTrue(queryInvalid); @@ -317,9 +317,9 @@ public void shouldCheckIfQueryIsValidInCaseOfSomeVariableValue() { RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "customer_id"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "customer_id"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); boolean queryInvalid = endpointHandler.isQueryInvalid(resultFuture, rowManager, sourceConfig.getVariables(), endpointOrQueryVariablesValues); assertFalse(queryInvalid); @@ -339,9 +339,9 @@ public void shouldCheckIfQueryIsInvalidInCaseOfAllVariableValues() { RowManager rowManager = new RowManager(row); endpointHandler = new EndpointHandler(meterStatsManager, errorReporter, - inputProtoClasses, getColumnNameManager(new String[] {"order_number", "customer_id"}), descriptorManager); + inputProtoClasses, getColumnNameManager(new String[]{"order_number", "customer_id"}), descriptorManager); Object[] endpointOrQueryVariablesValues = endpointHandler - .getVariablesValue(rowManager, ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); + .getVariablesValue(rowManager, Constants.ExternalPostProcessorVariableType.REQUEST_VARIABLES, sourceConfig.getVariables(), resultFuture); boolean queryInvalid = endpointHandler.isQueryInvalid(resultFuture, rowManager, sourceConfig.getVariables(), endpointOrQueryVariablesValues); assertTrue(queryInvalid); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/FetchOutputDecoratorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/FetchOutputDecoratorTest.java similarity index 97% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/common/FetchOutputDecoratorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/FetchOutputDecoratorTest.java index 3ca1bd6d0..e25b984e5 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/FetchOutputDecoratorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/FetchOutputDecoratorTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.stencil.client.StencilClient; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.stencil.client.StencilClient; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/InitializationDecoratorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/InitializationDecoratorTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/common/InitializationDecoratorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/InitializationDecoratorTest.java index eb164c064..55c808d93 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/InitializationDecoratorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/InitializationDecoratorTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.ColumnNameManager; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/OutputMappingTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/OutputMappingTest.java similarity index 95% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/common/OutputMappingTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/OutputMappingTest.java index 4fdc8e649..1c5e9ea21 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/OutputMappingTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/OutputMappingTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/RowManagerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/RowManagerTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/common/RowManagerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/RowManagerTest.java index ffaad4733..695b22794 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/RowManagerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/RowManagerTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.dagger.core.exception.InputOutputMappingException; +import com.gotocompany.dagger.core.exception.InputOutputMappingException; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/SchemaConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/SchemaConfigTest.java similarity index 77% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/common/SchemaConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/SchemaConfigTest.java index 4c06f7d30..7157c056a 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/SchemaConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/SchemaConfigTest.java @@ -1,14 +1,15 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.utils.Constants; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.core.utils.Constants.SINK_KAFKA_PROTO_MESSAGE_KEY; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.when; @@ -30,7 +31,7 @@ public void setup() { initMocks(this); String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"topic-name\",\"INPUT_SCHEMA_TABLE\":\"booking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"InputProtoMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"10.1.2.3:9092\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-config\",\"SOURCE_KAFKA_NAME\":\"test\"}]"; when(configuration.getString(INPUT_STREAMS, "")).thenReturn(streams); - when(configuration.getString(SINK_KAFKA_PROTO_MESSAGE_KEY, "")).thenReturn("OutputProtoMessage"); + when(configuration.getString(Constants.SINK_KAFKA_PROTO_MESSAGE_KEY, "")).thenReturn("OutputProtoMessage"); } @Test @@ -42,7 +43,7 @@ public void shouldReturnStencilOrchestrator() { @Test public void shouldReturnColumnNameManager() { SchemaConfig schemaConfig = new SchemaConfig(configuration, stencilClientOrchestrator, columnNameManager); - assertEquals(columnNameManager, schemaConfig.getColumnNameManager()); + Assert.assertEquals(columnNameManager, schemaConfig.getColumnNameManager()); } @Test diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/ValidRecordsDecoratorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/ValidRecordsDecoratorTest.java similarity index 73% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/common/ValidRecordsDecoratorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/ValidRecordsDecoratorTest.java index f02b4a69e..e0235a5bf 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/common/ValidRecordsDecoratorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/common/ValidRecordsDecoratorTest.java @@ -1,15 +1,15 @@ -package io.odpf.dagger.core.processors.common; +package com.gotocompany.dagger.core.processors.common; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.types.FilterDecorator; import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.types.FilterDecorator; -import io.odpf.dagger.common.serde.proto.deserialization.ProtoDeserializer; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.common.serde.proto.deserialization.ProtoDeserializer; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.junit.Before; import org.junit.Test; @@ -18,7 +18,7 @@ import java.util.List; import java.util.stream.Collectors; -import static io.odpf.dagger.common.core.Constants.*; +import static com.gotocompany.dagger.common.core.Constants.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; @@ -40,6 +40,9 @@ public void setUp() { initMocks(this); when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_ENABLE_KEY, SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_ENABLE_DEFAULT); when(configuration.getString(SCHEMA_REGISTRY_STENCIL_URLS_KEY, SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_URLS_DEFAULT); + when(configuration.getBoolean(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH_DEFAULT); + when(configuration.getLong(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_KEY, SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS_DEFAULT); + when(configuration.getString(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_KEY, SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT)).thenReturn(SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY_DEFAULT); stencilClientOrchestrator = new StencilClientOrchestrator(configuration); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/ExternalPostProcessorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/ExternalPostProcessorTest.java similarity index 85% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/ExternalPostProcessorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/ExternalPostProcessorTest.java index c6d661e7d..779ea14e3 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/ExternalPostProcessorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/ExternalPostProcessorTest.java @@ -1,20 +1,21 @@ -package io.odpf.dagger.core.processors.external; +package com.gotocompany.dagger.core.processors.external; -import io.odpf.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; +import com.gotocompany.dagger.core.processors.PostProcessorConfig; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.streaming.api.datastream.DataStream; -import io.odpf.stencil.client.StencilClient; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; -import io.odpf.dagger.core.processors.PostProcessorConfig; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.external.es.EsSourceConfig; -import io.odpf.dagger.core.processors.external.es.EsStreamDecorator; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; -import io.odpf.dagger.core.processors.external.http.HttpStreamDecorator; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.core.processors.external.es.EsSourceConfig; +import com.gotocompany.dagger.core.processors.external.es.EsStreamDecorator; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.processors.external.http.HttpStreamDecorator; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -26,10 +27,6 @@ import java.util.List; import java.util.Map; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_ENABLE_KEY; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -72,7 +69,7 @@ public void setup() { HashMap httpColumnNames = new HashMap<>(); httpColumnNames.put("http_field_1", new OutputMapping("")); httpColumnNames.put("http_field_2", new OutputMapping("")); - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("endpoint", "", "POST", "/some/patttern/%s", "variable", "", "", "123", "234", false, "type", "20", new HashMap<>(), httpColumnNames, "metricId_01", false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("endpoint", "", "POST", "/some/patttern/%s", "variable", "", "", "123", "234", false, null, "type", "20", new HashMap<>(), httpColumnNames, "metricId_01", false); HashMap esOutputMapping = new HashMap<>(); esOutputMapping.put("es_field_1", new OutputMapping("")); EsSourceConfig esSourceConfig = new EsSourceConfig("host", "port", "", "", "endpointPattern", @@ -82,8 +79,8 @@ public void setup() { when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); when(stencilClient.get("TestLogMessage")).thenReturn(TestBookingLogMessage.getDescriptor()); when(httpStreamDecorator.decorate(dataStream)).thenReturn(dataStream); - when(configuration.getLong(METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT)).thenReturn(METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT); - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT); + when(configuration.getLong(Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT)).thenReturn(Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT); + when(configuration.getBoolean(Constants.METRIC_TELEMETRY_ENABLE_KEY, Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT); String postProcessorConfigString = "{\n" + " \"external_source\": {\n" @@ -135,7 +132,7 @@ public void shouldProcessWithRightConfiguration() { outputMapping.put("order_id", new OutputMapping("path")); List httpSourceConfigs = new ArrayList<>(); - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("endpoint", "", "POST", "/some/patttern/%s", "variable", "", "", "123", "234", false, "type", "20", new HashMap<>(), outputMapping, "metricId_01", false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("endpoint", "", "POST", "/some/patttern/%s", "variable", "", "", "123", "234", false, null, "type", "20", new HashMap<>(), outputMapping, "metricId_01", false); httpSourceConfigs.add(httpSourceConfig); List esSourceConfigs = new ArrayList<>(); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/ExternalSourceConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/ExternalSourceConfigTest.java similarity index 93% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/ExternalSourceConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/ExternalSourceConfigTest.java index 4a3f7245e..0ab669d30 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/ExternalSourceConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/ExternalSourceConfigTest.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.processors.external; - -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.external.es.EsSourceConfig; -import io.odpf.dagger.core.processors.external.grpc.GrpcSourceConfig; -import io.odpf.dagger.core.processors.external.grpc.GrpcSourceConfigBuilder; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; -import io.odpf.dagger.core.processors.external.pg.PgSourceConfig; +package com.gotocompany.dagger.core.processors.external; + +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.external.es.EsSourceConfig; +import com.gotocompany.dagger.core.processors.external.grpc.GrpcSourceConfig; +import com.gotocompany.dagger.core.processors.external.grpc.GrpcSourceConfigBuilder; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.processors.external.pg.PgSourceConfig; import org.junit.Before; import org.junit.Test; @@ -29,7 +29,7 @@ public void setUp() { HashMap httpOutputMapping = new HashMap<>(); httpOutputMapping.put("http_field_1", new OutputMapping("")); httpOutputMapping.put("http_field_2", new OutputMapping("")); - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("endpoint", "", "POST", "/some/patttern/%s", "variable", "", "", "123", "234", false, "type", "20", new HashMap<>(), httpOutputMapping, "metricId_01", false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("endpoint", "", "POST", "/some/patttern/%s", "variable", "", "", "123", "234", false, null, "type", "20", new HashMap<>(), httpOutputMapping, "metricId_01", false); http = new ArrayList<>(); http.add(httpSourceConfig); es = new ArrayList<>(); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsAsyncConnectorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsAsyncConnectorTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsAsyncConnectorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsAsyncConnectorTest.java index e8360543d..bbc4095e0 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsAsyncConnectorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsAsyncConnectorTest.java @@ -1,22 +1,23 @@ -package io.odpf.dagger.core.processors.external.es; - +package com.gotocompany.dagger.core.processors.external.es; + +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; -import io.odpf.stencil.client.StencilClient; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; import org.junit.Before; @@ -32,10 +33,6 @@ import java.util.List; import java.util.concurrent.TimeoutException; -import static io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects.INVALID_CONFIGURATION; -import static io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects.TIMEOUTS; -import static io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects.TOTAL_EXTERNAL_CALLS; -import static io.odpf.dagger.core.utils.Constants.ES_TYPE; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.*; @@ -110,7 +107,7 @@ public void shouldNotEnrichOutputWhenEndpointVariableIsEmptyAndRequiredInPattern esAsyncConnector.asyncInvoke(streamRow, resultFuture); verify(resultFuture, times(1)).completeExceptionally(any(InvalidConfigurationException.class)); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(esClient, never()).performRequestAsync(any(Request.class), any(EsResponseHandler.class)); } @@ -129,7 +126,7 @@ public void shouldEnrichOutputWhenEndpointVariableIsEmptyAndNotRequiredInPattern Request expectedRequest = new Request("GET", "/drivers/"); verify(esClient, times(1)).performRequestAsync(eq(expectedRequest), any(EsResponseHandler.class)); verify(resultFuture, times(0)).completeExceptionally(any(InvalidConfigurationException.class)); - verify(meterStatsManager, times(0)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(0)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); } @Test @@ -214,7 +211,7 @@ public void shouldGiveErrorWhenEndpointPatternIsInvalid() throws Exception { verify(resultFuture, times(1)).completeExceptionally(invalidConfigurationExceptionCaptor.capture()); assertEquals(expectedExceptionMessage, invalidConfigurationExceptionCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(errorReporter, times(1)).reportFatalException(reportExceptionCaptor.capture()); assertEquals(expectedExceptionMessage, reportExceptionCaptor.getValue().getMessage()); @@ -243,7 +240,7 @@ public void shouldGiveErrorWhenEndpointPatternIsIncompatible() throws Exception verify(resultFuture, times(1)).completeExceptionally(invalidConfigurationExceptionCaptor.capture()); assertEquals(expectedExceptionMessage, invalidConfigurationExceptionCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(errorReporter, times(1)).reportFatalException(reportExceptionCaptor.capture()); assertEquals(expectedExceptionMessage, reportExceptionCaptor.getValue().getMessage()); @@ -266,7 +263,7 @@ public void shouldEnrichOutputForCorrespondingEnrichmentKey() throws Exception { esAsyncConnector.asyncInvoke(streamRow, resultFuture); Request request = new Request("GET", "/drivers/driver/11223344545"); - verify(meterStatsManager, times(1)).markEvent(TOTAL_EXTERNAL_CALLS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_EXTERNAL_CALLS); verify(esClient, times(1)).performRequestAsync(eq(request), any(EsResponseHandler.class)); } @@ -287,7 +284,7 @@ public void shouldEnrichOutputWhenUserAndPasswordAreNull() throws Exception { esAsyncConnector.asyncInvoke(streamRow, resultFuture); Request request = new Request("GET", "/drivers/driver/11223344545"); - verify(meterStatsManager, times(1)).markEvent(TOTAL_EXTERNAL_CALLS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_EXTERNAL_CALLS); verify(esClient, times(1)).performRequestAsync(eq(request), any(EsResponseHandler.class)); } @@ -300,7 +297,7 @@ public void shouldNotEnrichOutputOnTimeout() throws Exception { esAsyncConnector.open(configuration); esAsyncConnector.timeout(streamRow, resultFuture); - verify(meterStatsManager, times(1)).markEvent(TIMEOUTS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TIMEOUTS); verify(errorReporter, times(1)).reportNonFatalException(any(TimeoutException.class)); verify(resultFuture, times(1)).complete(Collections.singleton(streamRow)); } @@ -310,7 +307,7 @@ public void shouldAddPostProcessorTypeMetrics() { when(stencilClient.get(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); ArrayList postProcessorType = new ArrayList<>(); - postProcessorType.add(ES_TYPE); + postProcessorType.add(Constants.ES_TYPE); HashMap> metrics = new HashMap<>(); metrics.put("post_processor_type", postProcessorType); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsResponseHandlerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsResponseHandlerTest.java similarity index 95% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsResponseHandlerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsResponseHandlerTest.java index 80b52ebcd..89727c8f6 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsResponseHandlerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsResponseHandlerTest.java @@ -1,19 +1,19 @@ -package io.odpf.dagger.core.processors.external.es; +package com.gotocompany.dagger.core.processors.external.es; import com.google.protobuf.Descriptors; +import com.gotocompany.dagger.core.exception.HttpFailureException; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; import com.jayway.jsonpath.PathNotFoundException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestEnrichedBookingLogMessage; -import io.odpf.dagger.consumer.TestProfile; -import io.odpf.dagger.core.exception.HttpFailureException; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.common.serde.typehandler.TypeHandlerFactory; -import io.odpf.dagger.common.serde.typehandler.RowFactory; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage; +import com.gotocompany.dagger.consumer.TestProfile; +import com.gotocompany.dagger.common.serde.typehandler.TypeHandlerFactory; +import com.gotocompany.dagger.common.serde.typehandler.RowFactory; import mockit.Mock; import mockit.MockUp; import org.apache.flink.streaming.api.functions.async.ResultFuture; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsSourceConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsSourceConfigTest.java similarity index 98% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsSourceConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsSourceConfigTest.java index 337bb5ac0..81d1e9c2d 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsSourceConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsSourceConfigTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.external.es; +package com.gotocompany.dagger.core.processors.external.es; -import io.odpf.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.OutputMapping; import org.junit.Test; import java.util.Arrays; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsStreamDecoratorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsStreamDecoratorTest.java similarity index 79% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsStreamDecoratorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsStreamDecoratorTest.java index b98963205..54312a566 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsStreamDecoratorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/es/EsStreamDecoratorTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.es; +package com.gotocompany.dagger.core.processors.external.es; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -24,7 +24,7 @@ public class EsStreamDecoratorTest { @Before public void setUp() { esSourceConfig = new EsSourceConfig("localhost", "9200", "", "", "", - "driver_id", "io.odpf.TestProtoMessage", "30", + "driver_id", "com.gotocompany.TestProtoMessage", "30", "5000", "5000", "5000", "5000", false, new HashMap<>(), "metricId_01", false); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcAsyncConnectorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcAsyncConnectorTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcAsyncConnectorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcAsyncConnectorTest.java index 032c2c34f..6c9e6d424 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcAsyncConnectorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcAsyncConnectorTest.java @@ -1,22 +1,22 @@ -package io.odpf.dagger.core.processors.external.grpc; - -import io.odpf.stencil.client.StencilClient; +package com.gotocompany.dagger.core.processors.external.grpc; + +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.DescriptorManager; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.stencil.client.StencilClient; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestGrpcRequest; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.DescriptorManager; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.external.grpc.client.GrpcClient; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestGrpcRequest; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; +import com.gotocompany.dagger.core.processors.external.grpc.client.GrpcClient; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; @@ -32,7 +32,6 @@ import java.util.List; import java.util.concurrent.TimeoutException; -import static io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects.*; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; @@ -125,7 +124,7 @@ public void shouldCloseGrpcClient() throws Exception { grpcAsyncConnector.close(); verify(grpcClient, times(1)).close(); - verify(meterStatsManager, times(1)).markEvent(CLOSE_CONNECTION_ON_EXTERNAL_CLIENT); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.CLOSE_CONNECTION_ON_EXTERNAL_CLIENT); } @Test @@ -136,7 +135,7 @@ public void shouldMakeGrpcClientNullAfterClose() throws Exception { grpcAsyncConnector.close(); verify(grpcClient, times(1)).close(); - verify(meterStatsManager, times(1)).markEvent(CLOSE_CONNECTION_ON_EXTERNAL_CLIENT); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.CLOSE_CONNECTION_ON_EXTERNAL_CLIENT); assertNull(grpcAsyncConnector.getGrpcClient()); } @@ -203,7 +202,7 @@ public void shouldCompleteExceptionallyWhenEndpointVariableIsInvalid() throws Ex grpcAsyncConnector.open(flinkConfiguration); grpcAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); ArgumentCaptor reportInvalidConfigCaptor = ArgumentCaptor.forClass(InvalidConfigurationException.class); verify(errorReporter, times(1)).reportFatalException(reportInvalidConfigCaptor.capture()); assertEquals("Column 'invalid_variable' not found as configured in the 'REQUEST_VARIABLES' variable", @@ -228,7 +227,7 @@ public void shouldCompleteExceptionallyWhenEndpointVariableIsEmptyAndRequiredInP grpcAsyncConnector.open(flinkConfiguration); grpcAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); ArgumentCaptor reportInvalidConfigCaptor = ArgumentCaptor.forClass(InvalidConfigurationException.class); verify(errorReporter, times(1)).reportFatalException(reportInvalidConfigCaptor.capture()); assertEquals("pattern config '{'field1': '%s' , 'field2' : 'val2'}' is incompatible with the variable config ''", @@ -265,8 +264,8 @@ public void shouldEnrichWhenEndpointVariableIsEmptyAndNotRequiredInPattern() thr eq(TestGrpcRequest.getDescriptor()), any()); assertEquals(expectedRequest, dynamicMessageCaptor.getValue()); - verify(meterStatsManager, times(1)).markEvent(TOTAL_EXTERNAL_CALLS); - verify(meterStatsManager, times(0)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_EXTERNAL_CALLS); + verify(meterStatsManager, times(0)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(errorReporter, never()).reportFatalException(any(InvalidConfigurationException.class)); } @@ -282,7 +281,7 @@ public void shouldCompleteExceptionallyWhenEndpointPatternIsInvalid() throws Exc grpcAsyncConnector.open(flinkConfiguration); grpcAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); ArgumentCaptor reportInvalidConfigCaptor = ArgumentCaptor.forClass(InvalidConfigurationException.class); verify(errorReporter, times(1)).reportFatalException(reportInvalidConfigCaptor.capture()); assertEquals("pattern config '{'field1': 'val1' , 'field2' : '%'}' is invalid", reportInvalidConfigCaptor.getValue().getMessage()); @@ -337,7 +336,7 @@ public void shouldCompleteExceptionallyWhenEndpointPatternIsIncompatible() throw grpcAsyncConnector.open(flinkConfiguration); grpcAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); ArgumentCaptor reportInvalidConfigCaptor = ArgumentCaptor.forClass(InvalidConfigurationException.class); verify(errorReporter, times(1)).reportFatalException(reportInvalidConfigCaptor.capture()); assertEquals("pattern config '{'field1': '%d' , 'field2' : 'val2'}' is incompatible with the variable config 'customer_id'", diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcResponseHandlerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcResponseHandlerTest.java similarity index 51% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcResponseHandlerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcResponseHandlerTest.java index 4218c95f6..60c883f04 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcResponseHandlerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcResponseHandlerTest.java @@ -1,20 +1,21 @@ -package io.odpf.dagger.core.processors.external.grpc; +package com.gotocompany.dagger.core.processors.external.grpc; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; +import com.gotocompany.dagger.core.exception.GrpcFailureException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; import com.jayway.jsonpath.PathNotFoundException; -import io.odpf.dagger.common.metrics.aspects.Aspects; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestGrpcResponse; -import io.odpf.dagger.consumer.TestLocation; -import io.odpf.dagger.core.exception.GrpcFailureException; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestGrpcResponse; +import com.gotocompany.dagger.consumer.TestLocation; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; import org.junit.Before; @@ -27,7 +28,6 @@ import java.util.HashMap; import java.util.List; -import static io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects.*; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; @@ -74,15 +74,15 @@ public void setup() { streamData.setField(1, new Row(2)); rowManager = new RowManager(streamData); columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); - grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(5000).setGrpcRequestProtoSchema("io.odpf.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("io.odpf.dagger.consumer.GrpcResponse").setGrpcMethodUrl("io.odpf.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); + grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(5000).setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.GrpcResponse").setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); } @Test public void shouldDetectProperBodyAndHandleResponseIfRetainResponseTypeIsFalse() throws InvalidProtocolBufferException { outputMapping.put("success", new OutputMapping("$.success")); - grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("io.odpf.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("io.odpf.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("io.odpf.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); + grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); - grpcSourceConfig.setType("io.odpf.dagger.consumer.TestGrpcResponse"); + grpcSourceConfig.setType("com.gotocompany.dagger.consumer.TestGrpcResponse"); DynamicMessage message = DynamicMessage.parseFrom(TestGrpcResponse.getDescriptor(), TestGrpcResponse.newBuilder().setSuccess(true).build().toByteArray()); GrpcResponseHandler grpcResponseHandler = new GrpcResponseHandler(grpcSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); @@ -96,7 +96,7 @@ public void shouldDetectProperBodyAndHandleResponseIfRetainResponseTypeIsFalse() grpcResponseHandler.onNext(message); - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); } @@ -107,9 +107,9 @@ public void shouldDetectProperBodyAndHandleResponseIfRetainResponseTypeIsFalseAN descriptor = TestBookingLogMessage.getDescriptor(); outputMapping.put("driver_id", new OutputMapping("$.driver_id")); - grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("io.odpf.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("io.odpf.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("io.odpf.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); + grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); grpcSourceConfig.setRetainResponseType(false); - grpcSourceConfig.setType("io.odpf.dagger.consumer.TestAggregatedSupplyMessage"); + grpcSourceConfig.setType("com.gotocompany.dagger.consumer.TestAggregatedSupplyMessage"); streamData.setField(0, inputData); @@ -135,7 +135,7 @@ public void shouldDetectProperBodyAndHandleResponseIfRetainResponseTypeIsFalseAN grpcResponseHandler.onNext(message); - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); } @@ -144,7 +144,7 @@ public void shouldDetectProperBodyAndHandleResponseIfRetainResponseTypeIsFalseAN @Test public void shouldRecordErrorInCaseOfUnknownException() throws InvalidProtocolBufferException { outputMapping.put("success", new OutputMapping("$.success")); - grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("io.odpf.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("io.odpf.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("io.odpf.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); + grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); GrpcResponseHandler grpcResponseHandler = new GrpcResponseHandler(grpcSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); @@ -158,7 +158,7 @@ public void shouldRecordErrorInCaseOfUnknownException() throws InvalidProtocolBu grpcResponseHandler.onError(new Throwable("io.grpc.StatusRuntimeException: UNKNOWN")); - verify(meterStatsManager, times(1)).markEvent(OTHER_ERRORS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.OTHER_ERRORS); ArgumentCaptor exceptionCaptor = ArgumentCaptor.forClass(GrpcFailureException.class); verify(errorReporter, times(1)).reportNonFatalException(exceptionCaptor.capture()); assertEquals("io.grpc.StatusRuntimeException: UNKNOWN", exceptionCaptor.getValue().getMessage()); @@ -168,7 +168,7 @@ public void shouldRecordErrorInCaseOfUnknownException() throws InvalidProtocolBu @Test public void shouldRecordFatalErrorInCaseOfUnknownExceptionWithFailOnErrorTrue() throws InvalidProtocolBufferException { outputMapping.put("success", new OutputMapping("$.success")); - grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("io.odpf.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("io.odpf.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("io.odpf.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); + grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); grpcSourceConfig.setFailOnErrors(true); GrpcResponseHandler grpcResponseHandler = new GrpcResponseHandler(grpcSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); @@ -182,7 +182,7 @@ public void shouldRecordFatalErrorInCaseOfUnknownExceptionWithFailOnErrorTrue() grpcResponseHandler.startTimer(); grpcResponseHandler.onError(new Throwable("io.grpc.StatusRuntimeException: UNKNOWN")); - verify(meterStatsManager, times(1)).markEvent(OTHER_ERRORS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.OTHER_ERRORS); verify(errorReporter, times(1)).reportFatalException(any()); verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); } @@ -191,9 +191,9 @@ public void shouldRecordFatalErrorInCaseOfUnknownExceptionWithFailOnErrorTrue() public void shouldDetectExceptionIfMessageIsWrong() throws InvalidProtocolBufferException { outputMapping.put("success", new OutputMapping("$.order_number")); grpcSourceConfig = new GrpcSourceConfigBuilder() - .setGrpcRequestProtoSchema("io.odpf.dagger.consumer.TestGrpcRequest") - .setGrpcResponseProtoSchema("io.odpf.dagger.consumer.de.meta.WrongGrpcResponse") - .setGrpcMethodUrl("io.odpf.dagger.consumer.test/TestMethod") + .setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest") + .setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.de.meta.WrongGrpcResponse") + .setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod") .setRequestPattern("{\"key\": \"%s\"}") .setOutputMapping(outputMapping) .createGrpcSourceConfig(); @@ -216,7 +216,7 @@ public void shouldDetectExceptionIfMessageIsWrong() throws InvalidProtocolBuffer grpcResponseHandler.onNext(message); - verify(meterStatsManager, times(1)).markEvent(FAILURES_ON_READING_PATH); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURES_ON_READING_PATH); verify(errorReporter, times(1)).reportFatalException(any(PathNotFoundException.class)); verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); } @@ -224,9 +224,9 @@ public void shouldDetectExceptionIfMessageIsWrong() throws InvalidProtocolBuffer @Test public void shouldThrowErrorWhenFieldIsNotPresentInOutputDescriptor() throws InvalidProtocolBufferException { outputMapping.put("value", new OutputMapping("$.field3")); - grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("io.odpf.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("io.odpf.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("io.odpf.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); + grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); - grpcSourceConfig.setType("io.odpf.dagger.consumer.TestGrpcResponse"); + grpcSourceConfig.setType("com.gotocompany.dagger.consumer.TestGrpcResponse"); DynamicMessage message = DynamicMessage.parseFrom(TestGrpcResponse.getDescriptor(), TestGrpcResponse.newBuilder().setSuccess(true).build().toByteArray()); @@ -261,7 +261,7 @@ public void shouldDetectProperComplexBodyAndHandleResponseIfRetainResponseTypeIs TestLocation location = TestLocation.newBuilder().setAddress("Indonesia").setName("GojekTech").build(); TestBookingLogMessage bookingLogMessage = TestBookingLogMessage.newBuilder().setDriverPickupLocation(location).setCustomerId("123456").build(); - grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("io.odpf.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("io.odpf.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("io.odpf.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); + grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); grpcSourceConfig.setRetainResponseType(true); outputColumnNames = Arrays.asList("address", "name"); @@ -282,9 +282,127 @@ public void shouldDetectProperComplexBodyAndHandleResponseIfRetainResponseTypeIs grpcResponseHandler.onNext(message); - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); } + @Test + public void shouldDetectProperComplexBodyAndNestedProtoAndHandleResponseIfRetainResponseTypeIsFalse() throws InvalidProtocolBufferException { + descriptor = TestBookingLogMessage.getDescriptor(); + outputMapping.put("driver_pickup_location.address", new OutputMapping("$.driver_pickup_location.address")); + outputMapping.put("driver_pickup_location.name", new OutputMapping("$.driver_pickup_location.name")); + + TestLocation location = TestLocation.newBuilder().setAddress("Indonesia").setName("GojekTech").build(); + TestBookingLogMessage bookingLogMessage = TestBookingLogMessage.newBuilder().setDriverPickupLocation(location).setCustomerId("123456").build(); + + outputColumnNames = Arrays.asList("driver_pickup_location.address", "driver_pickup_location.name"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + + grpcSourceConfig = new GrpcSourceConfigBuilder() + .setEndpoint("localhost") + .setServicePort(8000) + .setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest") + .setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.TestGrpcResponse") + .setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod") + .setRequestPattern("{\"key\": \"%s\"}") + .setRequestVariables("customer_id") + .setOutputMapping(outputMapping) + .createGrpcSourceConfig(); + grpcSourceConfig.setRetainResponseType(false); + + DynamicMessage message = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), bookingLogMessage.toByteArray()); + GrpcResponseHandler grpcResponseHandler = new GrpcResponseHandler(grpcSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, "Indonesia"); + outputData.setField(1, "GojekTech"); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + + grpcResponseHandler.startTimer(); + grpcResponseHandler.onNext(message); + + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); + } + + @Test + public void shouldDetectProperComplexBodyAndNestedProtoAndHandleResponseIfRetainResponseTypeIsTrue() throws InvalidProtocolBufferException { + descriptor = TestBookingLogMessage.getDescriptor(); + outputMapping.put("driver_pickup_location.address", new OutputMapping("$.driver_pickup_location.address")); + outputMapping.put("driver_pickup_location.name", new OutputMapping("$.driver_pickup_location.name")); + + TestLocation location = TestLocation.newBuilder().setAddress("Indonesia").setName("GojekTech").build(); + TestBookingLogMessage bookingLogMessage = TestBookingLogMessage.newBuilder().setDriverPickupLocation(location).setCustomerId("123456").build(); + + outputColumnNames = Arrays.asList("driver_pickup_location.address", "driver_pickup_location.name"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + + grpcSourceConfig = new GrpcSourceConfigBuilder() + .setEndpoint("localhost") + .setServicePort(8000) + .setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest") + .setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.TestGrpcResponse") + .setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod") + .setRequestPattern("{\"key\": \"%s\"}") + .setRequestVariables("customer_id") + .setOutputMapping(outputMapping) + .createGrpcSourceConfig(); + grpcSourceConfig.setRetainResponseType(true); + + DynamicMessage message = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), bookingLogMessage.toByteArray()); + GrpcResponseHandler grpcResponseHandler = new GrpcResponseHandler(grpcSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, "Indonesia"); + outputData.setField(1, "GojekTech"); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + + grpcResponseHandler.startTimer(); + grpcResponseHandler.onNext(message); + + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); + } + + @Test + public void shouldThrowErrorWhenNestedFieldIsNotPresentInOutputDescriptor() throws InvalidProtocolBufferException { + descriptor = TestBookingLogMessage.getDescriptor(); + outputMapping.put("driver_pickup_location.invalid_address", new OutputMapping("$.driver_pickup_location.address")); + TestLocation location = TestLocation.newBuilder().setAddress("Indonesia").setName("GojekTech").build(); + TestBookingLogMessage bookingLogMessage = TestBookingLogMessage.newBuilder().setDriverPickupLocation(location).setCustomerId("123456").build(); + + grpcSourceConfig = new GrpcSourceConfigBuilder().setEndpoint("localhost").setServicePort(8000).setGrpcRequestProtoSchema("com.gotocompany.dagger.consumer.TestGrpcRequest").setGrpcResponseProtoSchema("com.gotocompany.dagger.consumer.TestGrpcResponse").setGrpcMethodUrl("com.gotocompany.dagger.consumer.test/TestMethod").setRequestPattern("{\"key\": \"%s\"}").setRequestVariables("customer_id").setOutputMapping(outputMapping).createGrpcSourceConfig(); + grpcSourceConfig.setRetainResponseType(false); + + outputColumnNames = Arrays.asList("driver_pickup_location.address", "driver_pickup_location.name"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + + DynamicMessage message = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), bookingLogMessage.toByteArray()); + GrpcResponseHandler grpcResponseHandler = new GrpcResponseHandler(grpcSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, "Indonesia"); + outputData.setField(1, "GojekTech"); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + + grpcResponseHandler.startTimer(); + assertThrows(Exception.class, () -> grpcResponseHandler.onNext(message)); + + ArgumentCaptor exceptionCaptor = ArgumentCaptor.forClass(IllegalArgumentException.class); + verify(errorReporter, times(1)).reportFatalException(exceptionCaptor.capture()); + assertEquals("Field Descriptor not found for field: driver_pickup_location.invalid_address", exceptionCaptor.getValue().getMessage()); + + ArgumentCaptor exceptionCaptor2 = ArgumentCaptor.forClass(IllegalArgumentException.class); + verify(resultFuture, times(1)).completeExceptionally(exceptionCaptor2.capture()); + assertEquals("Field Descriptor not found for field: driver_pickup_location.invalid_address", exceptionCaptor2.getValue().getMessage()); + } } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcSourceConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcSourceConfigTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcSourceConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcSourceConfigTest.java index d1973c08a..b54c48c15 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcSourceConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcSourceConfigTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.external.grpc; +package com.gotocompany.dagger.core.processors.external.grpc; import com.google.gson.annotations.SerializedName; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; import org.junit.Before; import org.junit.Test; @@ -119,13 +119,13 @@ public void hasTypeShouldBeTrueWhenTypeIsPresent() { @Test public void hasTypeShouldBeFalseWhenTypeIsNull() { - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("", "", "", "", "", "", "", null, "", false, null, "", new HashMap<>(), new HashMap<>(), metricId, false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("", "", "", "", "", "", "", null, "", false, "", null, "", new HashMap<>(), new HashMap<>(), metricId, false); assertFalse(httpSourceConfig.hasType()); } @Test public void hasTypeShouldBeFalseWhenTypeIsEmpty() { - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("", "", "", "", "", "", "", "", "", false, "", "", new HashMap<>(), new HashMap<>(), metricId, false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("", "", "", "", "", "", "", "", "", false, "", "", "", new HashMap<>(), new HashMap<>(), metricId, false); assertFalse(httpSourceConfig.hasType()); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcStreamDecoratorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcStreamDecoratorTest.java similarity index 82% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcStreamDecoratorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcStreamDecoratorTest.java index 26775ad46..a35ff5456 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/GrpcStreamDecoratorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/GrpcStreamDecoratorTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.grpc; +package com.gotocompany.dagger.core.processors.external.grpc; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/client/DynamicMessageMarshallerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/client/DynamicMessageMarshallerTest.java similarity index 92% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/client/DynamicMessageMarshallerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/client/DynamicMessageMarshallerTest.java index dd4a65a07..50b72c0a8 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/client/DynamicMessageMarshallerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/client/DynamicMessageMarshallerTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.external.grpc.client; +package com.gotocompany.dagger.core.processors.external.grpc.client; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.consumer.TestGrpcRequest; +import com.gotocompany.dagger.consumer.TestGrpcRequest; import org.apache.commons.io.IOUtils; import org.junit.Test; diff --git a/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcClientTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcClientTest.java new file mode 100644 index 000000000..50295a4bb --- /dev/null +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcClientTest.java @@ -0,0 +1,73 @@ +package com.gotocompany.dagger.core.processors.external.grpc.client; + +import io.grpc.Channel; +import com.gotocompany.dagger.core.processors.external.grpc.GrpcSourceConfig; +import io.grpc.ManagedChannelBuilder; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; +import static org.mockito.Mockito.times; +import static org.mockito.MockitoAnnotations.initMocks; + +public class GrpcClientTest { + + @Mock + private GrpcSourceConfig grpcSourceConfig; + + @Mock + private ManagedChannelBuilder channelBuilder; + + @Before + public void setUp() { + initMocks(this); + when(grpcSourceConfig.getEndpoint()).thenReturn("localhost"); + when(grpcSourceConfig.getServicePort()).thenReturn(8080); + } + + @Test + public void channelShouldBeAddedForAHostAndPort() { + + GrpcClient grpcClient = new GrpcClient(grpcSourceConfig); + + grpcClient.addChannel(); + + Channel decoratedChannel = grpcClient.getDecoratedChannel(); + assertNotNull(decoratedChannel); + + } + + @Test + public void channelBuilderShouldBeDecoratedWithKeepaliveAndTimeOutMS() { + when(grpcSourceConfig.getGrpcArgKeepaliveTimeMs()).thenReturn("1000"); + when(grpcSourceConfig.getGrpcArgKeepaliveTimeoutMs()).thenReturn("100"); + when(channelBuilder.keepAliveTime(anyLong(), any())).thenReturn(channelBuilder); + + GrpcClient grpcClient = new GrpcClient(grpcSourceConfig); + grpcClient.decorateManagedChannelBuilder(channelBuilder); + verify(channelBuilder, times(1)).keepAliveTimeout(Long.parseLong("100"), TimeUnit.MILLISECONDS); + verify(channelBuilder, times(1)).keepAliveTime(Long.parseLong("1000"), TimeUnit.MILLISECONDS); + } + + @Test + public void grpcClientCloseShouldWork() { + + GrpcClient grpcClient = new GrpcClient(grpcSourceConfig); + + grpcClient.addChannel(); + + Channel decoratedChannel = grpcClient.getDecoratedChannel(); + assertNotNull(decoratedChannel); + + grpcClient.close(); + decoratedChannel = grpcClient.getDecoratedChannel(); + assertNull(decoratedChannel); + + } + +} diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcRequestHandlerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcRequestHandlerTest.java similarity index 88% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcRequestHandlerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcRequestHandlerTest.java index d301085dd..e491b474a 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcRequestHandlerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/grpc/client/GrpcRequestHandlerTest.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.processors.external.grpc.client; +package com.gotocompany.dagger.core.processors.external.grpc.client; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.consumer.TestGrpcRequest; -import io.odpf.dagger.core.exception.InvalidGrpcBodyException; -import io.odpf.dagger.core.processors.common.DescriptorManager; -import io.odpf.dagger.core.processors.external.grpc.GrpcSourceConfig; -import io.odpf.dagger.core.processors.external.grpc.GrpcSourceConfigBuilder; +import com.gotocompany.dagger.core.exception.InvalidGrpcBodyException; +import com.gotocompany.dagger.core.processors.common.DescriptorManager; +import com.gotocompany.dagger.consumer.TestGrpcRequest; +import com.gotocompany.dagger.core.processors.external.grpc.GrpcSourceConfig; +import com.gotocompany.dagger.core.processors.external.grpc.GrpcSourceConfigBuilder; import org.junit.Test; import static org.junit.Assert.*; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpAsyncConnectorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpAsyncConnectorTest.java similarity index 77% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpAsyncConnectorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpAsyncConnectorTest.java index 12b6138a4..88e3cc139 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpAsyncConnectorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpAsyncConnectorTest.java @@ -1,21 +1,21 @@ -package io.odpf.dagger.core.processors.external.http; - -import io.odpf.stencil.client.StencilClient; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.exception.InvalidHttpVerbException; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.DescriptorManager; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.external.AsyncConnector; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; +package com.gotocompany.dagger.core.processors.external.http; + +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.exception.InvalidHttpVerbException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.DescriptorManager; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.core.processors.external.AsyncConnector; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; @@ -26,13 +26,9 @@ import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; +import java.util.*; import java.util.concurrent.TimeoutException; -import static io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects.*; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; @@ -97,7 +93,7 @@ public void setUp() { externalMetricConfig = new ExternalMetricConfig("metricId-http-01", shutDownPeriod, telemetryEnabled); defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", - "customer_id", "", "", "123", "234", false, httpConfigType, "345", + "customer_id", "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); } @@ -109,7 +105,7 @@ public void shouldCloseHttpClient() throws Exception { httpAsyncConnector.close(); verify(httpClient, times(1)).close(); - verify(meterStatsManager, times(1)).markEvent(CLOSE_CONNECTION_ON_EXTERNAL_CLIENT); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.CLOSE_CONNECTION_ON_EXTERNAL_CLIENT); } @Test @@ -120,7 +116,7 @@ public void shouldMakeHttpClientNullAfterClose() throws Exception { httpAsyncConnector.close(); verify(httpClient, times(1)).close(); - verify(meterStatsManager, times(1)).markEvent(CLOSE_CONNECTION_ON_EXTERNAL_CLIENT); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.CLOSE_CONNECTION_ON_EXTERNAL_CLIENT); assertNull(httpAsyncConnector.getHttpClient()); } @@ -140,6 +136,45 @@ public void shouldRegisterStatsManagerInOpen() throws Exception { verify(meterStatsManager, times(1)).register("source_metricId", "HTTP.metricId-http-01", ExternalSourceAspects.values()); } + @Test + public void shouldReturnEmptySetIfFailOnErrorsExclusionCodeRangeNULL() throws Exception { + HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(defaultHttpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); + + httpAsyncConnector.open(flinkConfiguration); + + Set failOnErrorsExclusionSet = httpAsyncConnector.getFailOnErrorsExclusionSet(); + assertTrue(failOnErrorsExclusionSet.isEmpty()); + } + + @Test + public void shouldReturnEmptySetIfFailOnErrorsExclusionCodeRangeEmpty() throws Exception { + HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(defaultHttpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); + + httpAsyncConnector.open(flinkConfiguration); + + Set failOnErrorsExclusionSet = httpAsyncConnector.getFailOnErrorsExclusionSet(); + assertTrue(failOnErrorsExclusionSet.isEmpty()); + } + + @Test + public void shouldReturnSetIfFailOnErrorsExclusionCodeRangeProvided() throws Exception { + defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", + "customer_id", "", "", "123", "234", true, "400,410-499", httpConfigType, "345", + headers, outputMapping, "metricId_02", false); + HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(defaultHttpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); + + httpAsyncConnector.open(flinkConfiguration); + + Set failOnErrorsExclusionSet = httpAsyncConnector.getFailOnErrorsExclusionSet(); + assertTrue(failOnErrorsExclusionSet.contains(400)); + assertFalse(failOnErrorsExclusionSet.contains(401)); + assertFalse(failOnErrorsExclusionSet.contains(409)); + assertTrue(failOnErrorsExclusionSet.contains(410)); + assertTrue(failOnErrorsExclusionSet.contains(429)); + assertTrue(failOnErrorsExclusionSet.contains(499)); + assertTrue(failOnErrorsExclusionSet.size() == 91); + } + @Test public void shouldInitializeDescriptorManagerInOpen() throws Exception { when(schemaConfig.getStencilClientOrchestrator()).thenReturn(stencilClientOrchestrator); @@ -187,7 +222,7 @@ public void shouldCompleteExceptionallyIfOutputDescriptorNotFound() throws Excep public void shouldCompleteExceptionallyWhenRequestVariableIsInvalid() throws Exception { when(defaultDescriptorManager.getDescriptor(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); String invalidRequestVariable = "invalid_variable"; - defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", invalidRequestVariable, "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", invalidRequestVariable, "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(boundRequestBuilder); when(boundRequestBuilder.setBody("{\"key\": \"123456\"}")).thenReturn(boundRequestBuilder); HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(defaultHttpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); @@ -195,7 +230,7 @@ public void shouldCompleteExceptionallyWhenRequestVariableIsInvalid() throws Exc httpAsyncConnector.open(flinkConfiguration); httpAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); ArgumentCaptor exceptionArgumentCaptor = ArgumentCaptor.forClass(InvalidConfigurationException.class); verify(errorReporter, times(1)).reportFatalException(exceptionArgumentCaptor.capture()); assertEquals("Column 'invalid_variable' not found as configured in the 'REQUEST_VARIABLES' variable", exceptionArgumentCaptor.getValue().getMessage()); @@ -210,7 +245,7 @@ public void shouldCompleteExceptionallyWhenRequestVariableIsInvalid() throws Exc @Test public void shouldCompleteExceptionallyWhenEndpointVariableIsEmptyAndRequiredInPattern() throws Exception { String emptyRequestVariable = ""; - defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", emptyRequestVariable, "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", emptyRequestVariable, "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); when(defaultDescriptorManager.getDescriptor(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(boundRequestBuilder); when(boundRequestBuilder.setBody("{\"key\": \"123456\"}")).thenReturn(boundRequestBuilder); @@ -219,7 +254,7 @@ public void shouldCompleteExceptionallyWhenEndpointVariableIsEmptyAndRequiredInP httpAsyncConnector.open(flinkConfiguration); httpAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(errorReporter, times(1)).reportFatalException(any(InvalidConfigurationException.class)); verify(resultFuture, times(1)).completeExceptionally(any(InvalidConfigurationException.class)); } @@ -227,7 +262,7 @@ public void shouldCompleteExceptionallyWhenEndpointVariableIsEmptyAndRequiredInP @Test public void shouldEnrichWhenEndpointVariableIsEmptyAndNotRequiredInPattern() throws Exception { String emptyRequestVariable = ""; - defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"static\"}", emptyRequestVariable, "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"static\"}", emptyRequestVariable, "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(defaultHttpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(boundRequestBuilder); @@ -239,15 +274,15 @@ public void shouldEnrichWhenEndpointVariableIsEmptyAndNotRequiredInPattern() thr httpAsyncConnector.asyncInvoke(streamData, resultFuture); verify(boundRequestBuilder, times(1)).execute(any(HttpResponseHandler.class)); - verify(meterStatsManager, times(1)).markEvent(TOTAL_EXTERNAL_CALLS); - verify(meterStatsManager, times(0)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_EXTERNAL_CALLS); + verify(meterStatsManager, times(0)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(errorReporter, times(0)).reportFatalException(any(InvalidConfigurationException.class)); } @Test public void shouldCompleteExceptionallyWhenEndpointPatternIsInvalid() throws Exception { String invalidRequestPattern = "{\"key\": \"%\"}"; - defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", invalidRequestPattern, "customer_id", "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", invalidRequestPattern, "customer_id", "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); when(defaultDescriptorManager.getDescriptor(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(boundRequestBuilder); when(boundRequestBuilder.setBody("{\"key\": \"123456\"}")).thenReturn(boundRequestBuilder); @@ -255,14 +290,14 @@ public void shouldCompleteExceptionallyWhenEndpointPatternIsInvalid() throws Exc httpAsyncConnector.open(flinkConfiguration); httpAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(errorReporter, times(1)).reportFatalException(any(InvalidConfigurationException.class)); verify(resultFuture, times(1)).completeExceptionally(any(InvalidConfigurationException.class)); } @Test public void shouldGetDescriptorFromOutputProtoIfTypeNotGiven() throws Exception { - defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, "345", headers, outputMapping, "metricId_02", false); + defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, null, "345", headers, outputMapping, "metricId_02", false); when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(boundRequestBuilder); when(boundRequestBuilder.setBody("{\"key\": \"123456\"}")).thenReturn(boundRequestBuilder); when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); @@ -277,7 +312,7 @@ public void shouldGetDescriptorFromOutputProtoIfTypeNotGiven() throws Exception @Test public void shouldGetDescriptorFromTypeIfGiven() throws Exception { - defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, "TestBookingLogMessage", "345", headers, outputMapping, "metricId_02", false); + defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, "TestBookingLogMessage", "345", headers, outputMapping, "metricId_02", false); when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(boundRequestBuilder); when(boundRequestBuilder.setBody("{\"key\": \"123456\"}")).thenReturn(boundRequestBuilder); when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); @@ -293,7 +328,7 @@ public void shouldGetDescriptorFromTypeIfGiven() throws Exception { @Test public void shouldCompleteExceptionallyWhenEndpointPatternIsIncompatible() throws Exception { String invalidRequestPattern = "{\"key\": \"%d\"}"; - defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", invalidRequestPattern, "customer_id", "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", invalidRequestPattern, "customer_id", "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); when(defaultDescriptorManager.getDescriptor(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(boundRequestBuilder); when(boundRequestBuilder.setBody("{\"key\": \"123456\"}")).thenReturn(boundRequestBuilder); @@ -301,7 +336,7 @@ public void shouldCompleteExceptionallyWhenEndpointPatternIsIncompatible() throw httpAsyncConnector.open(flinkConfiguration); httpAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(errorReporter, times(1)).reportFatalException(any(InvalidConfigurationException.class)); verify(resultFuture, times(1)).completeExceptionally(any(InvalidConfigurationException.class)); } @@ -319,7 +354,7 @@ public void shouldPerformPostRequestWithCorrectParameters() throws Exception { httpAsyncConnector.asyncInvoke(streamData, resultFuture); verify(boundRequestBuilder, times(1)).execute(any(HttpResponseHandler.class)); - verify(meterStatsManager, times(1)).markEvent(TOTAL_EXTERNAL_CALLS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_EXTERNAL_CALLS); } @Test @@ -344,7 +379,7 @@ public void shouldAddDynamicHeaders() throws Exception { when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); when(defaultDescriptorManager.getDescriptor(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); HttpSourceConfig dynamicHeaderHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", - "customer_id", "{\"X_KEY\": \"%s\"}", "customer_id", "123", "234", false, httpConfigType, "345", + "customer_id", "{\"X_KEY\": \"%s\"}", "customer_id", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(dynamicHeaderHttpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); @@ -362,13 +397,13 @@ public void shouldNotAddDynamicHeaders() throws Exception { when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); when(defaultDescriptorManager.getDescriptor(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); HttpSourceConfig dynamicHeaderHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", - "customer_id", "{\"X_KEY\": \"%s\"}", "customer_ids", "123", "234", false, httpConfigType, "345", + "customer_id", "{\"X_KEY\": \"%s\"}", "customer_ids", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(dynamicHeaderHttpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); httpAsyncConnector.open(flinkConfiguration); httpAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(errorReporter, times(1)).reportFatalException(any(InvalidConfigurationException.class)); verify(resultFuture, times(1)).completeExceptionally(any(InvalidConfigurationException.class)); } @@ -377,7 +412,7 @@ public void shouldNotAddDynamicHeaders() throws Exception { public void shouldCompleteExceptionallyWhenHeaderVariableIsInvalid() throws Exception { when(defaultDescriptorManager.getDescriptor(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); String invalidHeaderVariable = "invalid_variable"; - defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "{\"key\": \"%s\"}", invalidHeaderVariable, "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "{\"key\": \"%s\"}", invalidHeaderVariable, "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(boundRequestBuilder); when(boundRequestBuilder.setBody("{\"key\": \"123456\"}")).thenReturn(boundRequestBuilder); HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(defaultHttpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); @@ -385,7 +420,7 @@ public void shouldCompleteExceptionallyWhenHeaderVariableIsInvalid() throws Exce httpAsyncConnector.open(flinkConfiguration); httpAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); ArgumentCaptor exceptionArgumentCaptor = ArgumentCaptor.forClass(InvalidConfigurationException.class); verify(errorReporter, times(1)).reportFatalException(exceptionArgumentCaptor.capture()); assertEquals("Column 'invalid_variable' not found as configured in the 'HEADER_VARIABLES' variable", exceptionArgumentCaptor.getValue().getMessage()); @@ -400,7 +435,7 @@ public void shouldCompleteExceptionallyWhenHeaderVariableIsInvalid() throws Exce @Test public void shouldCompleteExceptionallyWhenHeaderPatternIsIncompatible() throws Exception { String invalidHeaderPattern = "{\"key\": \"%d\"}"; - defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", invalidHeaderPattern, "customer_id", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + defaultHttpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", invalidHeaderPattern, "customer_id", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); when(defaultDescriptorManager.getDescriptor(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(boundRequestBuilder); when(boundRequestBuilder.setBody("{\"key\": \"123456\"}")).thenReturn(boundRequestBuilder); @@ -408,14 +443,24 @@ public void shouldCompleteExceptionallyWhenHeaderPatternIsIncompatible() throws httpAsyncConnector.open(flinkConfiguration); httpAsyncConnector.asyncInvoke(streamData, resultFuture); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(errorReporter, times(1)).reportFatalException(any(InvalidConfigurationException.class)); verify(resultFuture, times(1)).completeExceptionally(any(InvalidConfigurationException.class)); } @Test public void shouldThrowExceptionInTimeoutIfFailOnErrorIsTrue() throws Exception { - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(httpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); + + httpAsyncConnector.timeout(streamData, resultFuture); + + verify(resultFuture, times(1)).completeExceptionally(any(TimeoutException.class)); + } + + @Test + public void shouldThrowExceptionInTimeoutIfFailOnErrorIsTrueWithExcludeFailOnErrorCodeRange() throws Exception { + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "400-600", httpConfigType, "345", headers, outputMapping, "metricId_02", false); HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(httpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); httpAsyncConnector.timeout(streamData, resultFuture); @@ -425,7 +470,17 @@ public void shouldThrowExceptionInTimeoutIfFailOnErrorIsTrue() throws Exception @Test public void shouldReportFatalInTimeoutIfFailOnErrorIsTrue() throws Exception { - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(httpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); + + httpAsyncConnector.timeout(streamData, resultFuture); + + verify(errorReporter, times(1)).reportFatalException(any(TimeoutException.class)); + } + + @Test + public void shouldReportFatalInTimeoutIfFailOnErrorIsTrueWithExcludeFailOnErrorCodeRange() throws Exception { + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "401-600", httpConfigType, "345", headers, outputMapping, "metricId_02", false); HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(httpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); httpAsyncConnector.timeout(streamData, resultFuture); @@ -435,7 +490,7 @@ public void shouldReportFatalInTimeoutIfFailOnErrorIsTrue() throws Exception { @Test public void shouldReportNonFatalInTimeoutIfFailOnErrorIsFalse() { - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(httpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); httpAsyncConnector.timeout(streamData, resultFuture); @@ -455,7 +510,7 @@ public void shouldPassTheInputWithRowSizeCorrespondingToColumnNamesInTimeoutIfFa public void shouldThrowExceptionIfUnsupportedHttpVerbProvided() throws Exception { when(defaultDescriptorManager.getDescriptor(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "PATCH", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "PATCH", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); HttpAsyncConnector httpAsyncConnector = new HttpAsyncConnector(httpSourceConfig, externalMetricConfig, schemaConfig, httpClient, errorReporter, meterStatsManager, defaultDescriptorManager); httpAsyncConnector.open(flinkConfiguration); diff --git a/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpResponseHandlerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpResponseHandlerTest.java new file mode 100644 index 000000000..b1731ec74 --- /dev/null +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpResponseHandlerTest.java @@ -0,0 +1,636 @@ +package com.gotocompany.dagger.core.processors.external.http; + +import com.google.protobuf.Descriptors; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.exception.HttpFailureException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestSurgeFactorLogMessage; +import org.apache.flink.streaming.api.functions.async.ResultFuture; +import org.apache.flink.types.Row; +import org.asynchttpclient.Response; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; + +import java.util.*; +import java.util.stream.IntStream; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; +import static org.mockito.MockitoAnnotations.initMocks; + +public class HttpResponseHandlerTest { + + @Mock + private ResultFuture resultFuture; + + @Mock + private Response response; + + @Mock + private MeterStatsManager meterStatsManager; + + @Mock + private HttpSourceConfig httpSourceConfig; + + @Mock + private ErrorReporter errorReporter; + + private Descriptors.Descriptor descriptor; + private List outputColumnNames; + private String[] inputColumnNames; + private HashMap outputMapping; + private HashMap headers; + private String httpConfigType; + private Row streamData; + private RowManager rowManager; + private ColumnNameManager columnNameManager; + private Row inputData; + + @Before + public void setup() { + initMocks(this); + descriptor = TestSurgeFactorLogMessage.getDescriptor(); + outputColumnNames = Collections.singletonList("value"); + inputColumnNames = new String[]{"order_id", "customer_id", "driver_id"}; + outputMapping = new HashMap<>(); + headers = new HashMap<>(); + headers.put("content-type", "application/json"); + httpConfigType = "test"; + streamData = new Row(2); + inputData = new Row(3); + inputData.setField(1, "123456"); + streamData.setField(0, inputData); + streamData.setField(1, new Row(2)); + rowManager = new RowManager(streamData); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + } + + @Test + public void shouldPassInputIfFailOnErrorFalseAndStatusCodeIs404() { + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(404); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURE_CODE_404); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); + assertEquals("Received status code : 404", failureCaptor.getValue().getMessage()); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldPassInputIfFailOnErrorFalseAndStatusCodeIs4XXOtherThan404() { + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(402); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURE_CODE_4XX); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); + assertEquals("Received status code : 402", failureCaptor.getValue().getMessage()); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldPassInputIfFailOnErrorFalseAndStatusCodeIs5XX() { + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(502); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURE_CODE_5XX); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); + assertEquals("Received status code : 502", failureCaptor.getValue().getMessage()); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldPassInputIfFailOnErrorFalseAndStatusCodeIsOtherThan5XXAnd4XX() { + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(302); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.OTHER_ERRORS); + verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); + assertEquals("Received status code : 302", failureCaptor.getValue().getMessage()); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldThrowErrorIfFailOnErrorTrueAndStatusCodeIs404() { + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(404); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(resultFuture).completeExceptionally(any(HttpFailureException.class)); + ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(errorReporter, times(1)) + .reportFatalException(argumentCaptor.capture()); + assertEquals("Received status code : 404", argumentCaptor.getValue().getMessage()); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURE_CODE_404); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldThrowErrorIfFailOnErrorTrueAndStatusCodeIs4XXOtherThan404() { + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(400); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(resultFuture, times(1)).completeExceptionally(failureCaptor.capture()); + assertEquals("Received status code : 400", failureCaptor.getValue().getMessage()); + verify(errorReporter, times(1)).reportFatalException(any(HttpFailureException.class)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURE_CODE_4XX); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldThrowErrorIfFailOnErrorTrueAndStatusCodeIs5XX() { + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(502); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(resultFuture, times(1)).completeExceptionally(failureCaptor.capture()); + assertEquals("Received status code : 502", failureCaptor.getValue().getMessage()); + verify(errorReporter, times(1)).reportFatalException(any(HttpFailureException.class)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURE_CODE_5XX); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldThrowErrorIfFailOnErrorTrueAndStatusCodeIsOtherThan5XXAnd4XX() { + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(302); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(resultFuture, times(1)).completeExceptionally(failureCaptor.capture()); + assertEquals("Received status code : 302", failureCaptor.getValue().getMessage()); + verify(errorReporter, times(1)).reportFatalException(any(HttpFailureException.class)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.OTHER_ERRORS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldPassInputIfFailOnErrorFalseAndOnThrowable() { + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Throwable throwable = new Throwable("throwable message"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onThrowable(throwable); + + verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); + assertEquals("throwable message", failureCaptor.getValue().getMessage()); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldThrowErrorIfFailOnErrorTrueAndOnThrowable() { + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Throwable throwable = new Throwable("throwable message"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onThrowable(throwable); + + verify(resultFuture).completeExceptionally(any(RuntimeException.class)); + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(errorReporter, times(1)).reportFatalException(failureCaptor.capture()); + assertEquals("throwable message", failureCaptor.getValue().getMessage()); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.OTHER_ERRORS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldThrowErrorIfFailOnErrorTrueAndOnThrowableWithExcludeFailOnErrorsCodeRange() { + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "404-499", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HashSet failOnErrorsExclusionSet = new HashSet(); + IntStream.rangeClosed(404, 499).forEach(statusCode -> failOnErrorsExclusionSet.add(statusCode)); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, failOnErrorsExclusionSet, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Throwable throwable = new Throwable("throwable message"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onThrowable(throwable); + + verify(resultFuture).completeExceptionally(any(RuntimeException.class)); + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(errorReporter, times(1)).reportFatalException(failureCaptor.capture()); + assertEquals("throwable message", failureCaptor.getValue().getMessage()); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.OTHER_ERRORS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldFailForAnyNone2xxIfFailOnErrorsTrueWithNullExcludeFailOnErrorsCodeRange() { + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(400); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(resultFuture, times(1)).completeExceptionally(failureCaptor.capture()); + assertEquals("Received status code : 400", failureCaptor.getValue().getMessage()); + verify(errorReporter, times(1)).reportFatalException(any(HttpFailureException.class)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURE_CODE_4XX); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldFailForAnyNone2xxIfFailOnErrorsTrueWithEmptyExcludeFailOnErrorsCodeRange() { + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(400); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(resultFuture, times(1)).completeExceptionally(failureCaptor.capture()); + assertEquals("Received status code : 400", failureCaptor.getValue().getMessage()); + verify(errorReporter, times(1)).reportFatalException(any(HttpFailureException.class)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURE_CODE_4XX); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldPassForAnyNone2xxInsideOfFailOnErrorsCodeRangeIfFailOnErrorsTrue() { + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "400,404-499", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HashSet failOnErrorsExclusionSet = new HashSet(); + IntStream.rangeClosed(404, 499).forEach(statusCode -> failOnErrorsExclusionSet.add(statusCode)); + failOnErrorsExclusionSet.add(400); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, failOnErrorsExclusionSet, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(400); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURE_CODE_4XX); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); + assertEquals("Received status code : 400", failureCaptor.getValue().getMessage()); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldFailForAnyNone2xxOutsideOfFailOnErrorsCodeRangeIfFailOnErrorsTrue() { + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "400,404-499", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HashSet failOnErrorsExclusionSet = new HashSet(); + IntStream.rangeClosed(404, 499).forEach(statusCode -> failOnErrorsExclusionSet.add(statusCode)); + failOnErrorsExclusionSet.add(400); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, failOnErrorsExclusionSet, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + when(response.getStatusCode()).thenReturn(502); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); + verify(resultFuture, times(1)).completeExceptionally(failureCaptor.capture()); + assertEquals("Received status code : 502", failureCaptor.getValue().getMessage()); + verify(errorReporter, times(1)).reportFatalException(any(HttpFailureException.class)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURE_CODE_5XX); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldPopulateSingleResultFromHttpCallInInputRow() { + outputMapping.put("surge_factor", new OutputMapping("$.surge")); + outputColumnNames = Collections.singletonList("surge_factor"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, 0.732f); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + when(response.getStatusCode()).thenReturn(200); + when(response.getResponseBody()).thenReturn("{\n" + + " \"surge\": 0.732\n" + + "}"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); + } + + @Test + public void shouldPopulateMultipleResultsFromHttpCallInInputRow() { + outputMapping.put("surge_factor", new OutputMapping("$.surge")); + outputMapping.put("s2_id_level", new OutputMapping("$.prediction")); + outputColumnNames = Arrays.asList("surge_factor", "s2_id_level"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, 0.732f); + outputData.setField(1, 345); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + when(response.getStatusCode()).thenReturn(200); + when(response.getResponseBody()).thenReturn("{\n" + + " \"surge\": 0.732,\n" + + " \"prediction\": 345\n" + + "}"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); + verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + } + + @Test + public void shouldThrowExceptionIfFieldNotFoundInFieldDescriptorWhenTypeIsPassed() { + httpConfigType = "com.gotocompany.dagger.consumer.TestBookingLogMessage"; + descriptor = TestBookingLogMessage.getDescriptor(); + outputMapping.put("surge_factor", new OutputMapping("$.surge")); + outputColumnNames = Collections.singletonList("surge_factor"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + when(response.getStatusCode()).thenReturn(200); + when(response.getResponseBody()).thenReturn("{\n" + + " \"surge\": 0.732\n" + + "}"); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + + httpResponseHandler.startTimer(); + assertThrows(NullPointerException.class, + () -> httpResponseHandler.onCompleted(response)); + verify(resultFuture, times(1)).completeExceptionally(any(IllegalArgumentException.class)); + } + + @Test + public void shouldThrowExceptionIfPathIsWrongIfFailOnErrorsTrue() { + outputMapping.put("surge_factor", new OutputMapping("invalidPath")); + outputColumnNames = Collections.singletonList("surge_factor"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, "", httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, 0.732f); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + when(response.getStatusCode()).thenReturn(200); + when(response.getResponseBody()).thenReturn("{\n" + + " \"surge\": 0.732\n" + + "}"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(resultFuture, times(1)).completeExceptionally(any(RuntimeException.class)); + verify(errorReporter, times(1)).reportFatalException(any(RuntimeException.class)); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.FAILURES_ON_READING_PATH); + } + + @Test + public void shouldPopulateResultAsObjectIfTypeIsNotPassedAndRetainResponseTypeIsTrue() { + outputMapping.put("surge_factor", new OutputMapping("$.surge")); + outputColumnNames = Collections.singletonList("surge_factor"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, "", null, "345", headers, outputMapping, "metricId_02", true); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, 0.732); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + when(response.getStatusCode()).thenReturn(200); + when(response.getResponseBody()).thenReturn("{\n" + + " \"surge\": 0.732\n" + + "}"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); + } + + @Test + public void shouldNotPopulateResultAsObjectIfTypeIsNotPassedAndRetainResponseTypeIsFalse() { + outputMapping.put("surge_factor", new OutputMapping("$.surge")); + outputColumnNames = Collections.singletonList("surge_factor"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, null, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, 0.732f); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + when(response.getStatusCode()).thenReturn(200); + when(response.getResponseBody()).thenReturn("{\n" + + " \"surge\": 0.732\n" + + "}"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); + } + + @Test + public void shouldHandleAnySuccessResponseCodeOtherThan200() { + outputMapping.put("surge_factor", new OutputMapping("$.surge")); + outputColumnNames = Collections.singletonList("surge_factor"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, 0.732f); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + when(response.getStatusCode()).thenReturn(201); + when(response.getResponseBody()).thenReturn("{\n" + + " \"surge\": 0.732\n" + + "}"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); + } + + @Test + public void shouldPopulateSingleResultFromHttpCallInInputRowForNestedColumnSetRetainTypeFalse() { + descriptor = com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage.getDescriptor(); + outputMapping.put("customer_profile.email", new OutputMapping("$.email")); + outputMapping.put("customer_profile.name", new OutputMapping("$.name")); + outputColumnNames = Arrays.asList("customer_profile.email", "customer_profile.name"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, "test_email@go-jek.com"); + outputData.setField(1, "test_name"); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + when(response.getStatusCode()).thenReturn(200); + when(response.getResponseBody()).thenReturn("{\n" + + " \"email\": \"test_email@go-jek.com\",\n" + + " \"name\": \"test_name\"\n" + + "}"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); + } + + @Test + public void shouldPopulateSingleResultFromHttpCallInInputRowForNestedColumnSetRetainTypeTrue() { + descriptor = com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage.getDescriptor(); + outputMapping.put("customer_profile.email", new OutputMapping("$.email")); + outputMapping.put("customer_profile.name", new OutputMapping("$.name")); + outputColumnNames = Arrays.asList("customer_profile.email", "customer_profile.name"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", true); + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, "test_email@go-jek.com"); + outputData.setField(1, "test_name"); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + when(response.getStatusCode()).thenReturn(200); + when(response.getResponseBody()).thenReturn("{\n" + + " \"email\": \"test_email@go-jek.com\",\n" + + " \"name\": \"test_name\"\n" + + "}"); + + httpResponseHandler.startTimer(); + httpResponseHandler.onCompleted(response); + + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); + verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); + } + + @Test + public void shouldThrowNullPointerExceptionWhenNestedFieldIsNotPresentInOutputDescriptor() { + descriptor = com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage.getDescriptor(); + outputMapping.put("customer_profile.invalid_email", new OutputMapping("$.email")); + outputColumnNames = Arrays.asList("customer_profile.invalid_email"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, "test_email@go-jek.com"); + outputData.setField(1, "test_name"); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + when(response.getStatusCode()).thenReturn(200); + when(response.getResponseBody()).thenReturn("{\n" + + " \"email\": \"test_email@go-jek.com\",\n" + + " \"name\": \"test_name\"\n" + + "}"); + + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + httpResponseHandler.startTimer(); + assertThrows(NullPointerException.class, + () -> httpResponseHandler.onCompleted(response)); + verify(resultFuture, times(1)).completeExceptionally(any(RuntimeException.class)); + } + + @Test + public void shouldThrowExceptionWhenNestedParentFieldIsNotPresentInOutputDescriptor() { + descriptor = com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage.getDescriptor(); + outputMapping.put("customer-profile.email", new OutputMapping("$.email")); + outputColumnNames = Arrays.asList("customer-profile.email"); + columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, httpConfigType, "345", headers, outputMapping, "metricId_02", false); + Row resultStreamData = new Row(2); + Row outputData = new Row(2); + outputData.setField(0, "test_email@go-jek.com"); + outputData.setField(1, "test_name"); + resultStreamData.setField(0, inputData); + resultStreamData.setField(1, outputData); + when(response.getStatusCode()).thenReturn(200); + when(response.getResponseBody()).thenReturn("{\n" + + " \"email\": \"test_email@go-jek.com\",\n" + + " \"name\": \"test_name\"\n" + + "}"); + + HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, new HashSet(), meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); + + httpResponseHandler.startTimer(); + + assertThrows(NullPointerException.class, + () -> httpResponseHandler.onCompleted(response)); + verify(resultFuture, times(1)).completeExceptionally(any(RuntimeException.class)); + } +} diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpSourceConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpSourceConfigTest.java similarity index 90% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpSourceConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpSourceConfigTest.java index 736fe022d..520a31962 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpSourceConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpSourceConfigTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.external.http; +package com.gotocompany.dagger.core.processors.external.http; -import io.odpf.dagger.core.processors.common.OutputMapping; +import com.gotocompany.dagger.core.processors.common.OutputMapping; import org.junit.Before; import org.junit.Test; @@ -27,6 +27,7 @@ public class HttpSourceConfigTest { private String headerVariables; private String connectTimeout; private boolean failOnErrors; + private String failOnErrorsCodeRange; private String type; private String capacity; private String metricId; @@ -49,11 +50,12 @@ public void setup() { headerVariables = "customer_id"; connectTimeout = "234"; failOnErrors = false; + failOnErrorsCodeRange = ""; type = "InputProtoMessage"; capacity = "345"; metricId = "metricId-http-01"; retainResponseType = false; - defaultHttpSourceConfig = new HttpSourceConfig(endpoint, endpointVariable, verb, requestPattern, requestVariables, headerPattern, headerVariables, streamTimeout, connectTimeout, failOnErrors, type, capacity, headerMap, outputMappings, metricId, retainResponseType); + defaultHttpSourceConfig = new HttpSourceConfig(endpoint, endpointVariable, verb, requestPattern, requestVariables, headerPattern, headerVariables, streamTimeout, connectTimeout, failOnErrors, failOnErrorsCodeRange, type, capacity, headerMap, outputMappings, metricId, retainResponseType); } @Test @@ -113,13 +115,13 @@ public void hasTypeShouldBeTrueWhenTypeIsPresent() { @Test public void hasTypeShouldBeFalseWhenTypeIsNull() { - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("", "", "", "", "", "", "", null, "", false, null, "", new HashMap<>(), new HashMap<>(), metricId, false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("", "", "", "", "", "", "", null, "", false, null, null, "", new HashMap<>(), new HashMap<>(), metricId, false); assertFalse(httpSourceConfig.hasType()); } @Test public void hasTypeShouldBeFalseWhenTypeIsEmpty() { - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("", "", "", "", "", "", "", "", "", false, "", "", new HashMap<>(), new HashMap<>(), metricId, false); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("", "", "", "", "", "", "", "", "", false, null, "", "", new HashMap<>(), new HashMap<>(), metricId, false); assertFalse(httpSourceConfig.hasType()); } @@ -153,7 +155,7 @@ public void shouldValidate() { @Test public void shouldThrowExceptionIfAllFieldsMissing() { - HttpSourceConfig httpSourceConfig = new HttpSourceConfig(null, null, null, null, requestVariables, null, null, null, null, false, null, capacity, null, null, metricId, retainResponseType); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig(null, null, null, null, requestVariables, null, null, null, null, false, null, null, capacity, null, null, metricId, retainResponseType); IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> httpSourceConfig.validateFields()); assertEquals("Missing required fields: [endpoint, streamTimeout, requestPattern, verb, connectTimeout, outputMapping]", exception.getMessage()); @@ -162,7 +164,7 @@ public void shouldThrowExceptionIfAllFieldsMissing() { @Test public void shouldThrowExceptionIfSomeFieldsMissing() { - HttpSourceConfig httpSourceConfig = new HttpSourceConfig("localhost", "", "post", "body", requestVariables, null, null, null, null, false, null, capacity, null, null, "metricId_01", retainResponseType); + HttpSourceConfig httpSourceConfig = new HttpSourceConfig("localhost", "", "post", "body", requestVariables, null, null, null, null, false, null, null, capacity, null, null, "metricId_01", retainResponseType); IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> httpSourceConfig.validateFields()); assertEquals("Missing required fields: [streamTimeout, connectTimeout, outputMapping]", exception.getMessage()); @@ -176,7 +178,7 @@ public void shouldThrowExceptionIfFieldsOfNestedObjectsAreMissing() { outputMappings.put("field", outputMappingWithNullField); defaultHttpSourceConfig = new HttpSourceConfig("http://localhost", "", - "post", "request_body", requestVariables, "", "", "4000", "1000", false, "", capacity, headerMap, outputMappings, "metricId_01", retainResponseType); + "post", "request_body", requestVariables, "", "", "4000", "1000", false, null, "", capacity, headerMap, outputMappings, "metricId_01", retainResponseType); IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> defaultHttpSourceConfig.validateFields()); assertEquals("Missing required fields: [path]", exception.getMessage()); @@ -191,7 +193,7 @@ public void shouldThrowExceptionIfRequestPatternIsEmpty() { outputMappings.put("field", outputMappingWithNullField); defaultHttpSourceConfig = new HttpSourceConfig("http://localhost", "", - "post", "", requestVariables, "", "", "4000", "1000", false, "", capacity, headerMap, outputMappings, "metricId_01", retainResponseType); + "post", "", requestVariables, "", "", "4000", "1000", false, null, "", capacity, headerMap, outputMappings, "metricId_01", retainResponseType); IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> defaultHttpSourceConfig.validateFields()); assertEquals("Missing required fields: [requestPattern]", exception.getMessage()); @@ -226,7 +228,7 @@ public void shouldReturnMandatoryFields() { @Test public void shouldValidateWhenOutputMappingIsEmpty() { - defaultHttpSourceConfig = new HttpSourceConfig(endpoint, endpointVariable, verb, requestPattern, requestVariables, headerPattern, headerVariables, streamTimeout, connectTimeout, failOnErrors, type, capacity, headerMap, new HashMap<>(), "metricId_01", retainResponseType); + defaultHttpSourceConfig = new HttpSourceConfig(endpoint, endpointVariable, verb, requestPattern, requestVariables, headerPattern, headerVariables, streamTimeout, connectTimeout, failOnErrors, failOnErrorsCodeRange, type, capacity, headerMap, new HashMap<>(), "metricId_01", retainResponseType); IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> defaultHttpSourceConfig.validateFields()); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpStreamDecoratorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpStreamDecoratorTest.java similarity index 82% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpStreamDecoratorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpStreamDecoratorTest.java index 6540ab0f8..afa56c06a 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpStreamDecoratorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/HttpStreamDecoratorTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.http; +package com.gotocompany.dagger.core.processors.external.http; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/request/HttpGetRequestHandlerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/request/HttpGetRequestHandlerTest.java similarity index 87% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/request/HttpGetRequestHandlerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/request/HttpGetRequestHandlerTest.java index dc6da7f72..d4db23a22 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/request/HttpGetRequestHandlerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/request/HttpGetRequestHandlerTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.http.request; +package com.gotocompany.dagger.core.processors.external.http.request; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.BoundRequestBuilder; import org.junit.Before; @@ -40,14 +40,14 @@ public void setup() { @Test public void shouldReturnTrueForGetVerbOnCanCreate() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "", "type", "345", new HashMap<>(), null, "metricId_01", false); HttpGetRequestHandler httpGetRequestBuilder = new HttpGetRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), headerVariablesValues.toArray(), endpointVariablesValues.toArray()); assertTrue(httpGetRequestBuilder.canCreate()); } @Test public void shouldReturnFalseForVerbOtherThanGetOnCanBuild() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "", "type", "345", new HashMap<>(), null, "metricId_01", false); HttpGetRequestHandler httpGetRequestBuilder = new HttpGetRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), headerVariablesValues.toArray(), endpointVariablesValues.toArray()); assertFalse(httpGetRequestBuilder.canCreate()); } @@ -55,7 +55,7 @@ public void shouldReturnFalseForVerbOtherThanGetOnCanBuild() { @Test public void shouldBuildGetRequest() { when(httpClient.prepareGet("http://localhost:8080/test/key/1")).thenReturn(request); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "", "", "123", "234", false, "", "type", "345", new HashMap<>(), null, "metricId_01", false); HttpGetRequestHandler httpGetRequestBuilder = new HttpGetRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), headerVariablesValues.toArray(), endpointVariablesValues.toArray()); assertEquals(request, httpGetRequestBuilder.create()); } @@ -64,7 +64,7 @@ public void shouldBuildGetRequest() { public void shouldBuildGetRequestWithOnlyDynamicHeader() { when(httpClient.prepareGet("http://localhost:8080/test/key/1")).thenReturn(request); when(request.addHeader("header_key", "1")).thenReturn(request); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "{\"header_key\": \"%s\"}", "1", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "{\"header_key\": \"%s\"}", "1", "123", "234", false, "", "type", "345", new HashMap<>(), null, "metricId_01", false); HttpGetRequestHandler httpGetRequestBuilder = new HttpGetRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), headerVariablesValues.toArray(), endpointVariablesValues.toArray()); httpGetRequestBuilder.create(); verify(request, times(1)).addHeader(anyString(), anyString()); @@ -77,7 +77,7 @@ public void shouldBuildGetRequestWithDynamicAndStaticHeader() { when(request.addHeader("header_key", "1")).thenReturn(request); HashMap staticHeader = new HashMap(); staticHeader.put("static", "2"); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "{\"header_key\": \"%s\"}", "1", "123", "234", false, "type", "345", staticHeader, null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "{\"header_key\": \"%s\"}", "1", "123", "234", false, "", "type", "345", staticHeader, null, "metricId_01", false); HttpGetRequestHandler httpGetRequestBuilder = new HttpGetRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), headerVariablesValues.toArray(), endpointVariablesValues.toArray()); httpGetRequestBuilder.create(); verify(request, times(2)).addHeader(anyString(), anyString()); @@ -90,7 +90,7 @@ public void shouldBuildGetRequestWithMultipleDynamicAndStaticHeaders() { when(httpClient.prepareGet("http://localhost:8080/test/key/1")).thenReturn(request); HashMap staticHeader = new HashMap(); staticHeader.put("static", "3"); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%s\"}", "1,2", "123", "234", false, "type", "345", staticHeader, null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%s\"}", "1,2", "123", "234", false, "", "type", "345", staticHeader, null, "metricId_01", false); HttpGetRequestHandler httpGetRequestBuilder = new HttpGetRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), headerVariablesValues.toArray(), endpointVariablesValues.toArray()); httpGetRequestBuilder.create(); verify(request, times(3)).addHeader(anyString(), anyString()); @@ -107,7 +107,7 @@ public void shouldThrowErrorIfHeaderVariablesAreIncompatible() { ArrayList incompatibleHeaderVariablesValues = new ArrayList<>(); incompatibleHeaderVariablesValues.add("test1"); incompatibleHeaderVariablesValues.add("test12"); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%d\"}", "1,2", "123", "234", false, "type", "345", staticHeader, null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%d\"}", "1,2", "123", "234", false, "", "type", "345", staticHeader, null, "metricId_01", false); HttpGetRequestHandler httpGetRequestBuilder = new HttpGetRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), incompatibleHeaderVariablesValues.toArray(), endpointVariablesValues.toArray()); InvalidConfigurationException exception = assertThrows(InvalidConfigurationException.class, () -> httpGetRequestBuilder.create()); assertEquals("pattern config '{\"header_key_1\": \"%s\",\"header_key_2\": \"%d\"}' is incompatible with the variable config '1,2'", exception.getMessage()); @@ -118,7 +118,7 @@ public void shouldThrowErrorIfHeaderHeaderPatternIsInvalid() { when(httpClient.prepareGet("http://localhost:8080/test/key/1")).thenReturn(request); HashMap staticHeader = new HashMap(); staticHeader.put("static", "3"); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%p\"}", "1,2", "123", "234", false, "type", "345", staticHeader, null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "/key/%s", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%p\"}", "1,2", "123", "234", false, "", "type", "345", staticHeader, null, "metricId_01", false); HttpGetRequestHandler httpGetRequestBuilder = new HttpGetRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), headerVariablesValues.toArray(), endpointVariablesValues.toArray()); InvalidConfigurationException exception = assertThrows(InvalidConfigurationException.class, () -> httpGetRequestBuilder.create()); assertEquals("pattern config '{\"header_key_1\": \"%s\",\"header_key_2\": \"%p\"}' is invalid", exception.getMessage()); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/request/HttpPostRequestHandlerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/request/HttpPostRequestHandlerTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/request/HttpPostRequestHandlerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/request/HttpPostRequestHandlerTest.java index 95faf4d6c..9302a2c83 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/request/HttpPostRequestHandlerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/request/HttpPostRequestHandlerTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.http.request; +package com.gotocompany.dagger.core.processors.external.http.request; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.BoundRequestBuilder; import org.junit.Before; @@ -41,14 +41,14 @@ public void setup() { @Test public void shouldReturnTrueForPostVerbOnCanCreate() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", null, "POST", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", null, "POST", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", false); HttpPostRequestHandler httpPostRequestBuilder = new HttpPostRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), dynamicHeaderVariablesValues.toArray(), endpointVariablesValues.toArray()); assertTrue(httpPostRequestBuilder.canCreate()); } @Test public void shouldReturnFalseForVerbOtherThanPostOnCanBuild() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "GET", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", false); HttpPostRequestHandler httpPostRequestBuilder = new HttpPostRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), dynamicHeaderVariablesValues.toArray(), endpointVariablesValues.toArray()); assertFalse(httpPostRequestBuilder.canCreate()); } @@ -57,7 +57,7 @@ public void shouldReturnFalseForVerbOtherThanPostOnCanBuild() { public void shouldBuildPostRequestWithoutHeader() { when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(request); when(request.setBody("{\"key\": \"1\"}")).thenReturn(request); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", false); HttpPostRequestHandler httpPostRequestBuilder = new HttpPostRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), dynamicHeaderVariablesValues.toArray(), endpointVariablesValues.toArray()); assertEquals(request, httpPostRequestBuilder.create()); } @@ -66,7 +66,7 @@ public void shouldBuildPostRequestWithoutHeader() { public void shouldBuildPostRequestWithOnlyDynamicHeader() { when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(request); when(request.setBody("{\"key\": \"1\"}")).thenReturn(request); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "{\"header_key\": \"%s\"}", "1", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "{\"header_key\": \"%s\"}", "1", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", false); HttpPostRequestHandler httpPostRequestBuilder = new HttpPostRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), dynamicHeaderVariablesValues.toArray(), endpointVariablesValues.toArray()); httpPostRequestBuilder.create(); verify(request, times(1)).addHeader(anyString(), anyString()); @@ -79,7 +79,7 @@ public void shouldBuildPostRequestWithDynamicAndStaticHeader() { when(request.setBody("{\"key\": \"1\"}")).thenReturn(request); HashMap staticHeader = new HashMap(); staticHeader.put("static", "2"); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "{\"header_key\": \"%s\"}", "1", "123", "234", false, "type", "345", staticHeader, null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "{\"header_key\": \"%s\"}", "1", "123", "234", false, null, "type", "345", staticHeader, null, "metricId_01", false); HttpPostRequestHandler httpPostRequestBuilder = new HttpPostRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), dynamicHeaderVariablesValues.toArray(), endpointVariablesValues.toArray()); httpPostRequestBuilder.create(); verify(request, times(2)).addHeader(anyString(), anyString()); @@ -93,7 +93,7 @@ public void shouldBuildPostRequestWithMultipleDynamicAndStaticHeaders() { when(request.setBody("{\"key\": \"1\"}")).thenReturn(request); HashMap staticHeader = new HashMap(); staticHeader.put("static", "3"); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%s\"}", "1,2", "123", "234", false, "type", "345", staticHeader, null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%s\"}", "1,2", "123", "234", false, null, "type", "345", staticHeader, null, "metricId_01", false); HttpPostRequestHandler httpPostRequestBuilder = new HttpPostRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), dynamicHeaderVariablesValues.toArray(), endpointVariablesValues.toArray()); httpPostRequestBuilder.create(); verify(request, times(3)).addHeader(anyString(), anyString()); @@ -111,7 +111,7 @@ public void shouldThrowErrorIfHeaderVariablesAreIncompatible() { ArrayList incompatibleHeaderVariablesValues = new ArrayList<>(); incompatibleHeaderVariablesValues.add("test1"); incompatibleHeaderVariablesValues.add("test12"); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%d\"}", "1,2", "123", "234", false, "type", "345", staticHeader, null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%d\"}", "1,2", "123", "234", false, null, "type", "345", staticHeader, null, "metricId_01", false); HttpPostRequestHandler httpPostRequestBuilder = new HttpPostRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), incompatibleHeaderVariablesValues.toArray(), endpointVariablesValues.toArray()); InvalidConfigurationException exception = assertThrows(InvalidConfigurationException.class, () -> httpPostRequestBuilder.create()); assertEquals("pattern config '{\"header_key_1\": \"%s\",\"header_key_2\": \"%d\"}' is incompatible with the variable config '1,2'", exception.getMessage()); @@ -123,7 +123,7 @@ public void shouldThrowErrorIfHeaderHeaderPatternIsInvalid() { when(request.setBody("{\"key\": \"1\"}")).thenReturn(request); HashMap staticHeader = new HashMap(); staticHeader.put("static", "3"); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%p\"}", "1,2", "123", "234", false, "type", "345", staticHeader, null, "metricId_01", false); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "1", "{\"header_key_1\": \"%s\",\"header_key_2\": \"%p\"}", "1,2", "123", "234", false, null, "type", "345", staticHeader, null, "metricId_01", false); HttpPostRequestHandler httpPostRequestBuilder = new HttpPostRequestHandler(httpSourceConfig, httpClient, requestVariablesValues.toArray(), dynamicHeaderVariablesValues.toArray(), endpointVariablesValues.toArray()); InvalidConfigurationException exception = assertThrows(InvalidConfigurationException.class, () -> httpPostRequestBuilder.create()); assertEquals("pattern config '{\"header_key_1\": \"%s\",\"header_key_2\": \"%p\"}' is invalid", exception.getMessage()); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/request/HttpRequestFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/request/HttpRequestFactoryTest.java similarity index 87% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/request/HttpRequestFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/request/HttpRequestFactoryTest.java index ecd779072..f055f0cc2 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/request/HttpRequestFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/http/request/HttpRequestFactoryTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.http.request; +package com.gotocompany.dagger.core.processors.external.http.request; -import io.odpf.dagger.core.exception.InvalidHttpVerbException; -import io.odpf.dagger.core.processors.external.http.HttpSourceConfig; +import com.gotocompany.dagger.core.exception.InvalidHttpVerbException; +import com.gotocompany.dagger.core.processors.external.http.HttpSourceConfig; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.BoundRequestBuilder; import org.junit.Before; @@ -41,7 +41,7 @@ public void setup() { @Test public void shouldReturnPostRequestOnTheBasisOfConfiguration() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", null, "POST", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", null, "POST", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); when(httpClient.preparePost("http://localhost:8080/test")).thenReturn(request); when(request.setBody("{\"key\": \"123456\"}")).thenReturn(request); HttpRequestFactory.createRequest(httpSourceConfig, httpClient, requestVariablesValues.toArray(), headerVariablesValues.toArray(), endpointVariablesValues.toArray()); @@ -53,7 +53,7 @@ public void shouldReturnPostRequestOnTheBasisOfConfiguration() { @Test public void shouldReturnPostRequestWithMultiEndpointVariablesOnTheBasisOfConfiguration() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test/%s/%s", "exp, 222", "POST", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test/%s/%s", "exp, 222", "POST", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); when(httpClient.preparePost("http://localhost:8080/test/exp/222")).thenReturn(request); when(request.setBody("{\"key\": \"123456\"}")).thenReturn(request); endpointVariablesValues.add("exp"); @@ -67,7 +67,7 @@ public void shouldReturnPostRequestWithMultiEndpointVariablesOnTheBasisOfConfigu @Test public void shouldReturnGetRequestOnTheBasisOfConfiguration() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", null, "GET", "/key/%s", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", null, "GET", "/key/%s", "1", "", "", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); when(httpClient.prepareGet("http://localhost:8080/test/key/1")).thenReturn(request); HttpRequestFactory.createRequest(httpSourceConfig, httpClient, requestVariablesValues.toArray(), headerVariablesValues.toArray(), endpointVariablesValues.toArray()); @@ -78,7 +78,7 @@ public void shouldReturnGetRequestOnTheBasisOfConfiguration() { @Test public void shouldReturnGetRequestWithMultiEndpointVariablesOnTheBasisOfConfiguration() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test/%s/%s", "123, 332", "GET", "/key/%s", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test/%s/%s", "123, 332", "GET", "/key/%s", "1", "", "", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); when(httpClient.prepareGet("http://localhost:8080/test/key/1")).thenReturn(request); endpointVariablesValues.add("123"); endpointVariablesValues.add("332"); @@ -90,7 +90,7 @@ public void shouldReturnGetRequestWithMultiEndpointVariablesOnTheBasisOfConfigur @Test public void shouldReturnPutRequestOnTheBasisOfConfiguration() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test/%s", "123", "PUT", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test/%s", "123", "PUT", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); when(httpClient.preparePut("http://localhost:8080/test/123")).thenReturn(request); when(request.setBody("{\"key\": \"123456\"}")).thenReturn(request); endpointVariablesValues.add("123"); @@ -103,7 +103,7 @@ public void shouldReturnPutRequestOnTheBasisOfConfiguration() { @Test public void shouldReturnPutRequestWithMultiEndpointVariablesOnTheBasisOfConfiguration() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test/%s/abc/%s", "123, 321, asd", "PUT", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test/%s/abc/%s", "123, 321, asd", "PUT", "{\"key\": \"%s\"}", "1", "", "", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); when(httpClient.preparePut("http://localhost:8080/test/123/abc/asd")).thenReturn(request); when(request.setBody("{\"key\": \"123456\"}")).thenReturn(request); endpointVariablesValues.add("123"); @@ -118,7 +118,7 @@ public void shouldReturnPutRequestWithMultiEndpointVariablesOnTheBasisOfConfigur @Test public void shouldThrowExceptionForUnsupportedHttpVerb() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "PATCH", "/key/%s", "1", "", "", "123", "234", false, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); + httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "PATCH", "/key/%s", "1", "", "", "123", "234", false, null, "type", "345", new HashMap<>(), null, "metricId_01", retainResponseType); assertThrows(InvalidHttpVerbException.class, () -> HttpRequestFactory.createRequest(httpSourceConfig, httpClient, requestVariablesValues.toArray(), headerVariablesValues.toArray(), endpointVariablesValues.toArray())); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgAsyncConnectorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgAsyncConnectorTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgAsyncConnectorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgAsyncConnectorTest.java index 21909f98e..afa1f1832 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgAsyncConnectorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgAsyncConnectorTest.java @@ -1,17 +1,17 @@ -package io.odpf.dagger.core.processors.external.pg; - -import io.odpf.stencil.client.StencilClient; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; +package com.gotocompany.dagger.core.processors.external.pg; + +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; +import com.gotocompany.stencil.client.StencilClient; import io.vertx.pgclient.PgPool; import io.vertx.sqlclient.RowSet; import org.apache.flink.configuration.Configuration; @@ -28,7 +28,6 @@ import java.util.List; import java.util.concurrent.TimeoutException; -import static io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects.*; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; @@ -79,14 +78,14 @@ public void setUp() { boolean telemetryEnabled = true; long shutDownPeriod = 0L; - inputProtoClasses = new String[]{"io.odpf.consumer.TestLogMessage"}; + inputProtoClasses = new String[]{"com.gotocompany.consumer.TestLogMessage"}; metricId = "metricId-pg-01"; externalMetricConfig = new ExternalMetricConfig(metricId, shutDownPeriod, telemetryEnabled); stencilClient = mock(StencilClient.class); when(schemaConfig.getInputProtoClasses()).thenReturn(inputProtoClasses); when(schemaConfig.getColumnNameManager()).thenReturn(new ColumnNameManager(inputColumnNames, new ArrayList<>())); when(schemaConfig.getStencilClientOrchestrator()).thenReturn(stencilClientOrchestrator); - when(schemaConfig.getOutputProtoClassName()).thenReturn("io.odpf.consumer.TestBookingLogMessage"); + when(schemaConfig.getOutputProtoClassName()).thenReturn("com.gotocompany.consumer.TestBookingLogMessage"); when(stencilClient.get(inputProtoClasses[0])).thenReturn(TestBookingLogMessage.getDescriptor()); when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); } @@ -98,7 +97,7 @@ private PgSourceConfigBuilder getPgSourceConfigBuilder() { .setUser("user") .setPassword("password") .setDatabase("db") - .setType("io.odpf.consumer.TestFlattenLogMessage") + .setType("com.gotocompany.consumer.TestFlattenLogMessage") .setCapacity("30") .setStreamTimeout("5000") .setOutputMapping(outputMapping) @@ -121,7 +120,7 @@ public void shouldFetchDescriptorInInvoke() throws Exception { pgAsyncConnector.open(configuration); pgAsyncConnector.asyncInvoke(streamRow, resultFuture); - verify(stencilClient, times(1)).get("io.odpf.consumer.TestFlattenLogMessage"); + verify(stencilClient, times(1)).get("com.gotocompany.consumer.TestFlattenLogMessage"); } @Test @@ -134,15 +133,15 @@ public void shouldCompleteExceptionallyIfDescriptorNotFound() throws Exception { pgAsyncConnector.open(configuration); - assertThrows(NullPointerException.class, + assertThrows(NullPointerException.class, () -> pgAsyncConnector.asyncInvoke(streamRow, resultFuture)); ArgumentCaptor reportFatalExceptionCaptor = ArgumentCaptor.forClass(DescriptorNotFoundException.class); verify(errorReporter, times(1)).reportFatalException(reportFatalExceptionCaptor.capture()); - assertEquals("No Descriptor found for class io.odpf.consumer.TestLogMessage", reportFatalExceptionCaptor.getValue().getMessage()); + assertEquals("No Descriptor found for class com.gotocompany.consumer.TestLogMessage", reportFatalExceptionCaptor.getValue().getMessage()); ArgumentCaptor decriptorNotFoundCaptor = ArgumentCaptor.forClass(DescriptorNotFoundException.class); verify(resultFuture, times(1)).completeExceptionally(decriptorNotFoundCaptor.capture()); - assertEquals("No Descriptor found for class io.odpf.consumer.TestLogMessage", decriptorNotFoundCaptor.getValue().getMessage()); + assertEquals("No Descriptor found for class com.gotocompany.consumer.TestLogMessage", decriptorNotFoundCaptor.getValue().getMessage()); } @@ -210,12 +209,12 @@ public void shouldEnrichWhenQueryVariableFieldIsNullOrRemovedButNotRequiredInPat verify(meterStatsManager, times(0)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); ArgumentCaptor invalidConfigCaptor = ArgumentCaptor.forClass(DescriptorNotFoundException.class); verify(resultFuture, times(1)).completeExceptionally(invalidConfigCaptor.capture()); - assertEquals("No Descriptor found for class io.odpf.consumer.TestFlattenLogMessage", + assertEquals("No Descriptor found for class com.gotocompany.consumer.TestFlattenLogMessage", invalidConfigCaptor.getValue().getMessage()); ArgumentCaptor reportExceptionCaptor = ArgumentCaptor.forClass(DescriptorNotFoundException.class); verify(errorReporter, times(1)).reportFatalException(reportExceptionCaptor.capture()); - assertEquals("No Descriptor found for class io.odpf.consumer.TestFlattenLogMessage", + assertEquals("No Descriptor found for class com.gotocompany.consumer.TestFlattenLogMessage", reportExceptionCaptor.getValue().getMessage()); verify(pgClient, times(1)).query(any(String.class)); @@ -237,7 +236,7 @@ public void shouldGiveErrorWhenQueryPatternIsInvalid() throws Exception { verify(resultFuture, times(1)).completeExceptionally(invalidConfigCaptor.capture()); assertEquals("pattern config 'select * from public.customers where customer_id = %' is invalid", invalidConfigCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); ArgumentCaptor reportExceptionCaptor = ArgumentCaptor.forClass(InvalidConfigurationException.class); verify(errorReporter, times(1)).reportFatalException(reportExceptionCaptor.capture()); assertEquals("pattern config 'select * from public.customers where customer_id = %' is invalid", @@ -262,7 +261,7 @@ public void shouldGiveErrorWhenQueryPatternIsIncompatible() throws Exception { verify(resultFuture, times(1)).completeExceptionally(invalidConfigCaptor.capture()); assertEquals("pattern config 'select * from public.customers where customer_id = '%d'' is incompatible with the variable config 'customer_id'", invalidConfigCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); ArgumentCaptor reportExceptionCaptor = ArgumentCaptor.forClass(InvalidConfigurationException.class); verify(errorReporter, times(1)).reportFatalException(reportExceptionCaptor.capture()); assertEquals("pattern config 'select * from public.customers where customer_id = '%d'' is incompatible with the variable config 'customer_id'", @@ -327,7 +326,7 @@ public void shouldNotEnrichOutputOnTimeout() throws Exception { pgAsyncConnector.open(configuration); pgAsyncConnector.timeout(streamRow, resultFuture); - verify(meterStatsManager, times(1)).markEvent(TIMEOUTS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TIMEOUTS); ArgumentCaptor timeoutCaptor = ArgumentCaptor.forClass(TimeoutException.class); verify(errorReporter, times(1)).reportNonFatalException(timeoutCaptor.capture()); assertEquals("Timeout in external source call!", timeoutCaptor.getValue().getMessage()); @@ -362,7 +361,7 @@ public void shouldClosePgClientAndSetItToNullMarkingCloseConnectionEvent() { verify(pgClient, times(1)).close(); assertNull(pgAsyncConnector.getPgClient()); - verify(meterStatsManager, times(1)).markEvent(CLOSE_CONNECTION_ON_EXTERNAL_CLIENT); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.CLOSE_CONNECTION_ON_EXTERNAL_CLIENT); } @Test @@ -375,7 +374,7 @@ public void shouldReportFatalExceptionAndCompleteExceptionallyWhenFailOnErrorsIs pgAsyncConnector.open(configuration); pgAsyncConnector.timeout(streamRow, resultFuture); - verify(meterStatsManager, times(1)).markEvent(TIMEOUTS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TIMEOUTS); ArgumentCaptor timeoutCaptor = ArgumentCaptor.forClass(TimeoutException.class); verify(errorReporter, times(1)).reportFatalException(timeoutCaptor.capture()); assertEquals("Timeout in external source call!", timeoutCaptor.getValue().getMessage()); @@ -397,7 +396,7 @@ public void shouldGetDescriptorFromOutputProtoIfTypeNotGiven() throws Exception pgAsyncConnector.open(configuration); pgAsyncConnector.asyncInvoke(streamRow, resultFuture); - verify(stencilClient, times(1)).get("io.odpf.consumer.TestBookingLogMessage"); + verify(stencilClient, times(1)).get("com.gotocompany.consumer.TestBookingLogMessage"); } @Test @@ -409,7 +408,7 @@ public void shouldGetDescriptorFromTypeIfGiven() throws Exception { pgAsyncConnector.open(configuration); pgAsyncConnector.asyncInvoke(streamRow, resultFuture); - verify(stencilClient, times(1)).get("io.odpf.consumer.TestFlattenLogMessage"); + verify(stencilClient, times(1)).get("com.gotocompany.consumer.TestFlattenLogMessage"); } } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgResponseHandlerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgResponseHandlerTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgResponseHandlerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgResponseHandlerTest.java index 693060719..466f68ad5 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgResponseHandlerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgResponseHandlerTest.java @@ -1,15 +1,16 @@ -package io.odpf.dagger.core.processors.external.pg; +package com.gotocompany.dagger.core.processors.external.pg; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.metrics.aspects.Aspects; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestSurgeFactorLogMessage; -import io.odpf.dagger.core.exception.HttpFailureException; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.exception.HttpFailureException; +import com.gotocompany.dagger.core.metrics.aspects.ExternalSourceAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.PostResponseTelemetry; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestSurgeFactorLogMessage; import io.vertx.core.AsyncResult; import io.vertx.sqlclient.RowIterator; import io.vertx.sqlclient.RowSet; @@ -25,7 +26,6 @@ import java.util.HashMap; import java.util.List; -import static io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects.*; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; @@ -119,7 +119,7 @@ public void shouldGoToSuccessHandlerAndMarkSuccessResponseIfEventSucceedsAndResu pgResponseHandler.handle(event); verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); } @@ -134,7 +134,7 @@ public void shouldGoToSuccessHandlerButReturnWithMarkingInvalidConfigIfEventSucc pgResponseHandler.handle(event); verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(meterStatsManager, never()).updateHistogram(any(Aspects.class), any(Long.class)); } @@ -149,7 +149,7 @@ public void shouldGoToSuccessHandlerButCompleteWithNonFatalErrorWhenFailOnErrorI pgResponseHandler.handle(event); verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(meterStatsManager, never()).updateHistogram(any(Aspects.class), any(Long.class)); verify(errorReporter, times(1)).reportNonFatalException(any(Exception.class)); } @@ -168,7 +168,7 @@ public void shouldGoToSuccessHandlerButCompleteExceptionallyWithFatalErrorWhenFa pgResponseHandler.handle(event); verify(resultFuture, times(1)).completeExceptionally(any(Exception.class)); - verify(meterStatsManager, times(1)).markEvent(INVALID_CONFIGURATION); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.INVALID_CONFIGURATION); verify(meterStatsManager, never()).updateHistogram(any(Aspects.class), any(Long.class)); verify(errorReporter, times(1)).reportFatalException(any(Exception.class)); } @@ -183,7 +183,7 @@ public void shouldGoToFailureHandlerIfEventFails() { pgResponseHandler.handle(event); verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); } @@ -209,7 +209,7 @@ public void shouldReportFatalExceptionAndCompleteExceptionallyWhenEventComesToFa assertEquals("PgResponseHandler : Failed with error. failure message!", resultFutureCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); } @@ -227,7 +227,7 @@ public void shouldReportNonFatalExceptionAndCompleteWhenEventComesToFailureHandl assertEquals("PgResponseHandler : Failed with error. failure message!", nonFatalExcepCaptor.getValue().getMessage()); verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.TOTAL_FAILED_REQUESTS); verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); } @@ -269,7 +269,7 @@ public void shouldPopulateResultAsObjectIfTypeIsNotPassedAndRetainResponseTypeIs pgResponseHandler.startTimer(); pgResponseHandler.handle(event); verify(resultFuture, times(1)).complete(Collections.singleton(outputStreamRow)); - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); } @@ -312,7 +312,7 @@ public void shouldNotPopulateResultAsObjectIfTypeIsNotPassedAndRetainResponseTyp pgResponseHandler.startTimer(); pgResponseHandler.handle(event); verify(resultFuture, times(1)).complete(Collections.singleton(outputStreamRow)); - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); + verify(meterStatsManager, times(1)).markEvent(ExternalSourceAspects.SUCCESS_RESPONSE); verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgSourceConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgSourceConfigTest.java similarity index 99% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgSourceConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgSourceConfigTest.java index a39381c4b..9c1850783 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgSourceConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgSourceConfigTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.external.pg; +package com.gotocompany.dagger.core.processors.external.pg; import org.junit.Before; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgStreamDecoratorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgStreamDecoratorTest.java similarity index 88% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgStreamDecoratorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgStreamDecoratorTest.java index 242eccc6c..386142e58 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/pg/PgStreamDecoratorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/external/pg/PgStreamDecoratorTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.external.pg; +package com.gotocompany.dagger.core.processors.external.pg; -import io.odpf.dagger.core.processors.external.ExternalMetricConfig; -import io.odpf.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.external.ExternalMetricConfig; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/InternalDecoratorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/InternalDecoratorTest.java similarity index 88% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/InternalDecoratorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/InternalDecoratorTest.java index 685cf5b23..e961e2cdd 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/InternalDecoratorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/InternalDecoratorTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.internal; +package com.gotocompany.dagger.core.processors.internal; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.processor.InternalConfigProcessor; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.processor.InternalConfigProcessor; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/InternalPostProcessorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/InternalPostProcessorTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/InternalPostProcessorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/InternalPostProcessorTest.java index 582c2e901..27894f734 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/InternalPostProcessorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/InternalPostProcessorTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.internal; +package com.gotocompany.dagger.core.processors.internal; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.PostProcessorConfig; -import io.odpf.dagger.core.processors.external.ExternalSourceConfig; -import io.odpf.dagger.core.processors.transformers.TransformConfig; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.PostProcessorConfig; +import com.gotocompany.dagger.core.processors.external.ExternalSourceConfig; +import com.gotocompany.dagger.core.processors.transformers.TransformConfig; import org.apache.flink.streaming.api.datastream.DataStream; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/InternalSourceConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/InternalSourceConfigTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/InternalSourceConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/InternalSourceConfigTest.java index 0b7b44921..bd75538ed 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/InternalSourceConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/InternalSourceConfigTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.internal; +package com.gotocompany.dagger.core.processors.internal; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/InternalConfigHandlerFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/InternalConfigHandlerFactoryTest.java similarity index 71% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/InternalConfigHandlerFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/InternalConfigHandlerFactoryTest.java index 25ee2f4f1..c9eb6cabe 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/InternalConfigHandlerFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/InternalConfigHandlerFactoryTest.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.processors.internal.processor; +package com.gotocompany.dagger.core.processors.internal.processor; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.constant.ConstantInternalConfigProcessor; -import io.odpf.dagger.core.processors.internal.processor.function.FunctionInternalConfigProcessor; -import io.odpf.dagger.core.processors.internal.processor.invalid.InvalidInternalConfigProcessor; -import io.odpf.dagger.core.processors.internal.processor.sql.fields.SqlInternalConfigProcessor; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.constant.ConstantInternalConfigProcessor; +import com.gotocompany.dagger.core.processors.internal.processor.function.FunctionInternalConfigProcessor; +import com.gotocompany.dagger.core.processors.internal.processor.invalid.InvalidInternalConfigProcessor; +import com.gotocompany.dagger.core.processors.internal.processor.sql.fields.SqlInternalConfigProcessor; import org.junit.Test; import static org.junit.Assert.*; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessorTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessorTest.java index 613dd14e8..cf3ade2a6 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/constant/ConstantInternalConfigProcessorTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.internal.processor.constant; +package com.gotocompany.dagger.core.processors.internal.processor.constant; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessorTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessorTest.java index 9f5891b74..af2c06b72 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionInternalConfigProcessorTest.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.core.processors.internal.processor.function; - -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.function.functions.CurrentTimestampFunction; -import io.odpf.dagger.core.processors.internal.processor.function.functions.InvalidFunction; -import io.odpf.dagger.core.processors.internal.processor.function.functions.JsonPayloadFunction; +package com.gotocompany.dagger.core.processors.internal.processor.function; + +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.function.functions.CurrentTimestampFunction; +import com.gotocompany.dagger.core.processors.internal.processor.function.functions.InvalidFunction; +import com.gotocompany.dagger.core.processors.internal.processor.function.functions.JsonPayloadFunction; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionProcessorFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionProcessorFactoryTest.java similarity index 76% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionProcessorFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionProcessorFactoryTest.java index 67a6f1e01..9226ac453 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/FunctionProcessorFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/FunctionProcessorFactoryTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.internal.processor.function; +package com.gotocompany.dagger.core.processors.internal.processor.function; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.function.functions.CurrentTimestampFunction; -import io.odpf.dagger.core.processors.internal.processor.function.functions.InvalidFunction; -import io.odpf.dagger.core.processors.internal.processor.function.functions.JsonPayloadFunction; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.function.functions.CurrentTimestampFunction; +import com.gotocompany.dagger.core.processors.internal.processor.function.functions.InvalidFunction; +import com.gotocompany.dagger.core.processors.internal.processor.function.functions.JsonPayloadFunction; import org.junit.Test; import static org.junit.Assert.*; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunctionTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunctionTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunctionTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunctionTest.java index a9e1a533e..76702ed2a 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunctionTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/CurrentTimestampFunctionTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.internal.processor.function.functions; +package com.gotocompany.dagger.core.processors.internal.processor.function.functions; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/functions/InvalidFunctionTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/InvalidFunctionTest.java similarity index 87% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/functions/InvalidFunctionTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/InvalidFunctionTest.java index 9fc25b157..78abca7a1 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/functions/InvalidFunctionTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/InvalidFunctionTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.internal.processor.function.functions; +package com.gotocompany.dagger.core.processors.internal.processor.function.functions; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunctionTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunctionTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunctionTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunctionTest.java index a13da4371..da606653a 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunctionTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/function/functions/JsonPayloadFunctionTest.java @@ -1,16 +1,16 @@ -package io.odpf.dagger.core.processors.internal.processor.function.functions; +package com.gotocompany.dagger.core.processors.internal.processor.function.functions; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.typehandler.RowFactory; -import io.odpf.dagger.consumer.*; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.common.SchemaConfig; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.stencil.client.StencilClient; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.typehandler.RowFactory; +import com.gotocompany.dagger.consumer.*; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.common.SchemaConfig; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.stencil.client.StencilClient; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; @@ -38,9 +38,9 @@ public class JsonPayloadFunctionTest { @Before public void setup() throws InvalidProtocolBufferException { initMocks(this); - commonInternalSourceConfig = getInternalSourceConfigForProtoClass("io.odpf.dagger.consumer.TestBookingLogMessage"); + commonInternalSourceConfig = getInternalSourceConfigForProtoClass("com.gotocompany.dagger.consumer.TestBookingLogMessage"); - commonSchemaConfig = getSchemaConfigForProtoAndDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage", TestBookingLogMessage.getDescriptor()); + commonSchemaConfig = getSchemaConfigForProtoAndDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage", TestBookingLogMessage.getDescriptor()); TestBookingLogMessage customerLogMessage = TestBookingLogMessage.newBuilder().build(); DynamicMessage dynamicMessage = DynamicMessage.parseFrom(TestBookingLogMessage.getDescriptor(), customerLogMessage.toByteArray()); @@ -99,7 +99,7 @@ public void shouldThrowExceptionWhenInternalProcessorConfigIsNull() { InvalidConfigurationException invalidConfigException = assertThrows(InvalidConfigurationException.class, () -> { - jsonPayloadFunction.getResult(commonRowManager); + jsonPayloadFunction.getResult(commonRowManager); }); assertEquals("Invalid internal source configuration: missing internal processor config", invalidConfigException.getMessage()); @@ -132,7 +132,7 @@ public void shouldThrowExceptionWhenStencilClientIsNull() { InvalidConfigurationException invalidConfigException = assertThrows(InvalidConfigurationException.class, () -> { - jsonPayloadFunction.getResult(commonRowManager); + jsonPayloadFunction.getResult(commonRowManager); }); assertEquals("Invalid configuration: stencil client is null", invalidConfigException.getMessage()); @@ -149,7 +149,7 @@ public void shouldGetJsonPayloadAsResult() { @Test public void shouldGetCorrectJsonPayloadForStringFields() throws InvalidProtocolBufferException { - String protoClass = "io.odpf.dagger.consumer.TestMessage"; + String protoClass = "com.gotocompany.dagger.consumer.TestMessage"; InternalSourceConfig internalSourceConfig = getInternalSourceConfigForProtoClass(protoClass); SchemaConfig schemaConfig = getSchemaConfigForProtoAndDescriptor(protoClass, TestMessage.getDescriptor()); @@ -166,7 +166,7 @@ public void shouldGetCorrectJsonPayloadForStringFields() throws InvalidProtocolB @Test public void shouldGetCorrectJsonPayloadForNestedFields() throws InvalidProtocolBufferException { - String protoClass = "io.odpf.dagger.consumer.TestNestedMessage"; + String protoClass = "com.gotocompany.dagger.consumer.TestNestedMessage"; InternalSourceConfig internalSourceConfig = getInternalSourceConfigForProtoClass(protoClass); SchemaConfig schemaConfig = getSchemaConfigForProtoAndDescriptor(protoClass, TestNestedMessage.getDescriptor()); @@ -188,7 +188,7 @@ public void shouldGetCorrectJsonPayloadForNestedFields() throws InvalidProtocolB @Test public void shouldGetCorrectJsonPayloadForRepeatedFields() throws InvalidProtocolBufferException { - String protoClass = "io.odpf.dagger.consumer.TestNestedRepeatedMessage"; + String protoClass = "com.gotocompany.dagger.consumer.TestNestedRepeatedMessage"; InternalSourceConfig internalSourceConfig = getInternalSourceConfigForProtoClass(protoClass); SchemaConfig schemaConfig = getSchemaConfigForProtoAndDescriptor(protoClass, TestNestedRepeatedMessage.getDescriptor()); @@ -213,7 +213,7 @@ public void shouldGetCorrectJsonPayloadForRepeatedFields() throws InvalidProtoco @Test public void shouldGetCorrectJsonPayloadForMapFields() throws InvalidProtocolBufferException { - String protoClass = "io.odpf.dagger.consumer.TestMapMessage"; + String protoClass = "com.gotocompany.dagger.consumer.TestMapMessage"; InternalSourceConfig internalSourceConfig = getInternalSourceConfigForProtoClass(protoClass); SchemaConfig schemaConfig = getSchemaConfigForProtoAndDescriptor(protoClass, TestMapMessage.getDescriptor()); @@ -234,7 +234,7 @@ public void shouldGetCorrectJsonPayloadForMapFields() throws InvalidProtocolBuff @Test public void shouldGetCorrectJsonPayloadForRepeatedEnumFields() throws InvalidProtocolBufferException { - String protoClass = "io.odpf.dagger.consumer.TestRepeatedEnumMessage"; + String protoClass = "com.gotocompany.dagger.consumer.TestRepeatedEnumMessage"; InternalSourceConfig internalSourceConfig = getInternalSourceConfigForProtoClass(protoClass); SchemaConfig schemaConfig = getSchemaConfigForProtoAndDescriptor(protoClass, TestRepeatedEnumMessage.getDescriptor()); @@ -256,7 +256,7 @@ public void shouldGetCorrectJsonPayloadForRepeatedEnumFields() throws InvalidPro @Test public void shouldGetCorrectJsonPayloadForComplexFields() throws InvalidProtocolBufferException { - String protoClass = "io.odpf.dagger.consumer.TestComplexMap"; + String protoClass = "com.gotocompany.dagger.consumer.TestComplexMap"; InternalSourceConfig internalSourceConfig = getInternalSourceConfigForProtoClass(protoClass); SchemaConfig schemaConfig = getSchemaConfigForProtoAndDescriptor(protoClass, TestComplexMap.getDescriptor()); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessorTest.java similarity index 90% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessorTest.java index 50fdbe693..228eaaa7d 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/invalid/InvalidInternalConfigProcessorTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.processors.internal.processor.invalid; +package com.gotocompany.dagger.core.processors.internal.processor.invalid; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; import org.junit.Test; import static org.junit.Assert.*; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParserTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParserTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParserTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParserTest.java index 69d671da7..c203b81a6 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParserTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/SqlConfigTypePathParserTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.internal.processor.sql; +package com.gotocompany.dagger.core.processors.internal.processor.sql; -import io.odpf.dagger.core.exception.InvalidConfigurationException; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImportTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImportTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImportTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImportTest.java index 2ffccea56..3c7a4f404 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImportTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalAutoFieldImportTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.internal.processor.sql.fields; +package com.gotocompany.dagger.core.processors.internal.processor.sql.fields; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessorTest.java similarity index 88% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessorTest.java index 6b2d02027..3ef86aeeb 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalConfigProcessorTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.internal.processor.sql.fields; +package com.gotocompany.dagger.core.processors.internal.processor.sql.fields; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactoryTest.java similarity index 80% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactoryTest.java index 398689fe4..075502a30 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldFactoryTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.internal.processor.sql.fields; +package com.gotocompany.dagger.core.processors.internal.processor.sql.fields; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlInternalFieldConfig; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlInternalFieldConfig; import org.junit.Test; import java.util.Arrays; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImportTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImportTest.java similarity index 86% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImportTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImportTest.java index 4528b4697..e7f0c3e1c 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImportTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/internal/processor/sql/fields/SqlInternalFieldImportTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.internal.processor.sql.fields; +package com.gotocompany.dagger.core.processors.internal.processor.sql.fields; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.processors.internal.InternalSourceConfig; -import io.odpf.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; +import com.gotocompany.dagger.core.processors.ColumnNameManager; +import com.gotocompany.dagger.core.processors.common.RowManager; +import com.gotocompany.dagger.core.processors.internal.InternalSourceConfig; +import com.gotocompany.dagger.core.processors.internal.processor.sql.SqlConfigTypePathParser; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowFactoryTest.java similarity index 86% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowFactoryTest.java index bfa86bd40..34dafa6a2 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowFactoryTest.java @@ -1,21 +1,21 @@ -package io.odpf.dagger.core.processors.longbow; +package com.gotocompany.dagger.core.processors.longbow; +import com.gotocompany.dagger.core.processors.types.PostProcessor; import org.apache.flink.streaming.api.datastream.DataStream; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.longbow.processor.LongbowReader; -import io.odpf.dagger.core.processors.longbow.processor.LongbowWriter; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.longbow.processor.LongbowReader; +import com.gotocompany.dagger.core.processors.longbow.processor.LongbowWriter; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import java.util.concurrent.TimeUnit; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.*; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowProcessorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowProcessorTest.java similarity index 88% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowProcessorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowProcessorTest.java index d162de83a..94f7fd662 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowProcessorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowProcessorTest.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.core.processors.longbow; +package com.gotocompany.dagger.core.processors.longbow; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.functions.async.RichAsyncFunction; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.longbow.columnmodifier.ColumnModifier; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.longbow.columnmodifier.ColumnModifier; import org.junit.Before; import org.junit.Rule; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowReadColumnModifierTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowReadColumnModifierTest.java similarity index 86% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowReadColumnModifierTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowReadColumnModifierTest.java index c52d7cc2c..0e58c631f 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowReadColumnModifierTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowReadColumnModifierTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow; +package com.gotocompany.dagger.core.processors.longbow; -import io.odpf.dagger.core.processors.longbow.columnmodifier.LongbowReadColumnModifier; +import com.gotocompany.dagger.core.processors.longbow.columnmodifier.LongbowReadColumnModifier; import org.junit.Test; import static org.junit.Assert.*; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowSchemaTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowSchemaTest.java similarity index 95% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowSchemaTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowSchemaTest.java index 477ac5fe1..9591abc49 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowSchemaTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowSchemaTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.longbow; +package com.gotocompany.dagger.core.processors.longbow; -import io.odpf.dagger.core.exception.DaggerConfigurationException; -import io.odpf.dagger.core.exception.InvalidLongbowDurationException; -import io.odpf.dagger.core.processors.longbow.validator.LongbowType; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.core.exception.InvalidLongbowDurationException; +import com.gotocompany.dagger.core.processors.longbow.validator.LongbowType; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Rule; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowWriteColumnModifierTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowWriteColumnModifierTest.java similarity index 87% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowWriteColumnModifierTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowWriteColumnModifierTest.java index b678d661f..d4b320cdb 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/LongbowWriteColumnModifierTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/LongbowWriteColumnModifierTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow; +package com.gotocompany.dagger.core.processors.longbow; -import io.odpf.dagger.core.processors.longbow.columnmodifier.LongbowWriteColumnModifier; +import com.gotocompany.dagger.core.processors.longbow.columnmodifier.LongbowWriteColumnModifier; import org.junit.Test; import static org.junit.Assert.assertArrayEquals; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/data/LongbowDataFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowDataFactoryTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/data/LongbowDataFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowDataFactoryTest.java index c71faa9c1..244ca97fb 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/data/LongbowDataFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowDataFactoryTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.data; +package com.gotocompany.dagger.core.processors.longbow.data; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/data/LongbowProtoDataTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowProtoDataTest.java similarity index 79% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/data/LongbowProtoDataTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowProtoDataTest.java index 057c8ba50..82ad688fb 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/data/LongbowProtoDataTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowProtoDataTest.java @@ -1,5 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.data; +package com.gotocompany.dagger.core.processors.longbow.data; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; @@ -8,15 +9,13 @@ import java.util.*; -import static io.odpf.dagger.core.utils.Constants.LONGBOW_COLUMN_FAMILY_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.LONGBOW_QUALIFIER_DEFAULT; import static org.junit.Assert.*; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; public class LongbowProtoDataTest { - private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes(LONGBOW_COLUMN_FAMILY_DEFAULT); + private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes(Constants.LONGBOW_COLUMN_FAMILY_DEFAULT); @Mock private Result scanResult; @@ -31,7 +30,7 @@ public void shouldParseProtoByteDataFromBigTable() { ArrayList results = new ArrayList<>(); results.add(scanResult); byte[] mockResult = Bytes.toBytes("test"); - when(scanResult.getValue(COLUMN_FAMILY_NAME, Bytes.toBytes(LONGBOW_QUALIFIER_DEFAULT))).thenReturn(mockResult); + when(scanResult.getValue(COLUMN_FAMILY_NAME, Bytes.toBytes(Constants.LONGBOW_QUALIFIER_DEFAULT))).thenReturn(mockResult); LongbowProtoData longbowProtoData = new LongbowProtoData(); Map> actualMap = longbowProtoData.parse(results); Map> expectedMap = new HashMap>() {{ diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/data/LongbowTableDataTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowTableDataTest.java similarity index 92% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/data/LongbowTableDataTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowTableDataTest.java index 20eadf8dd..fe7a58779 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/data/LongbowTableDataTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/data/LongbowTableDataTest.java @@ -1,6 +1,7 @@ -package io.odpf.dagger.core.processors.longbow.data; +package com.gotocompany.dagger.core.processors.longbow.data; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.utils.Constants; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; @@ -9,14 +10,13 @@ import java.util.*; -import static io.odpf.dagger.core.utils.Constants.LONGBOW_COLUMN_FAMILY_DEFAULT; import static org.junit.Assert.*; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; public class LongbowTableDataTest { - private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes(LONGBOW_COLUMN_FAMILY_DEFAULT); + private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes(Constants.LONGBOW_COLUMN_FAMILY_DEFAULT); @Mock private Result result1; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputLongbowDataTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputLongbowDataTest.java similarity index 95% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputLongbowDataTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputLongbowDataTest.java index 6e2722dba..e4a4565d4 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputLongbowDataTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputLongbowDataTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.outputRow; +package com.gotocompany.dagger.core.processors.longbow.outputRow; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; import org.junit.Before; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputProtoDataTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputProtoDataTest.java similarity index 90% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputProtoDataTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputProtoDataTest.java index 8ca003374..e3fd0c18a 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputProtoDataTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputProtoDataTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.outputRow; +package com.gotocompany.dagger.core.processors.longbow.outputRow; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputSynchronizerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputSynchronizerTest.java similarity index 90% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputSynchronizerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputSynchronizerTest.java index 992e301e2..6e36962b9 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/outputRow/OutputSynchronizerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/outputRow/OutputSynchronizerTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.outputRow; +package com.gotocompany.dagger.core.processors.longbow.outputRow; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; import org.junit.Before; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/processor/LongbowReaderTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowReaderTest.java similarity index 77% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/processor/LongbowReaderTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowReaderTest.java index ea59255cc..59ce70bb2 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/processor/LongbowReaderTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowReaderTest.java @@ -1,20 +1,22 @@ -package io.odpf.dagger.core.processors.longbow.processor; +package com.gotocompany.dagger.core.processors.longbow.processor; +import com.gotocompany.dagger.core.metrics.aspects.LongbowReaderAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.data.LongbowData; -import io.odpf.dagger.core.processors.longbow.exceptions.LongbowReaderException; -import io.odpf.dagger.core.processors.longbow.outputRow.ReaderOutputRow; -import io.odpf.dagger.core.processors.longbow.range.LongbowAbsoluteRange; -import io.odpf.dagger.core.processors.longbow.request.ScanRequestFactory; -import io.odpf.dagger.core.processors.longbow.storage.LongbowStore; -import io.odpf.dagger.core.processors.longbow.storage.ScanRequest; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.data.LongbowData; +import com.gotocompany.dagger.core.processors.longbow.exceptions.LongbowReaderException; +import com.gotocompany.dagger.core.processors.longbow.outputRow.ReaderOutputRow; +import com.gotocompany.dagger.core.processors.longbow.range.LongbowAbsoluteRange; +import com.gotocompany.dagger.core.processors.longbow.request.ScanRequestFactory; +import com.gotocompany.dagger.core.processors.longbow.storage.LongbowStore; +import com.gotocompany.dagger.core.processors.longbow.storage.ScanRequest; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -28,10 +30,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeoutException; -import static io.odpf.dagger.core.metrics.aspects.LongbowReaderAspects.CLOSE_CONNECTION_ON_READER; -import static io.odpf.dagger.core.metrics.aspects.LongbowReaderAspects.FAILED_ON_READ_DOCUMENT; -import static io.odpf.dagger.core.metrics.aspects.LongbowReaderAspects.TIMEOUTS_ON_READER; -import static io.odpf.dagger.core.utils.Constants.*; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; @@ -69,9 +67,9 @@ public class LongbowReaderTest { @Before public void setup() { initMocks(this); - when(configuration.getString(PROCESSOR_LONGBOW_GCP_PROJECT_ID_KEY, PROCESSOR_LONGBOW_GCP_PROJECT_ID_DEFAULT)).thenReturn("test-project"); - when(configuration.getString(PROCESSOR_LONGBOW_GCP_INSTANCE_ID_KEY, PROCESSOR_LONGBOW_GCP_INSTANCE_ID_DEFAULT)).thenReturn("test-instance"); - when(configuration.getString(DAGGER_NAME_KEY, DAGGER_NAME_DEFAULT)).thenReturn("test-job"); + when(configuration.getString(Constants.PROCESSOR_LONGBOW_GCP_PROJECT_ID_KEY, Constants.PROCESSOR_LONGBOW_GCP_PROJECT_ID_DEFAULT)).thenReturn("test-project"); + when(configuration.getString(Constants.PROCESSOR_LONGBOW_GCP_INSTANCE_ID_KEY, Constants.PROCESSOR_LONGBOW_GCP_INSTANCE_ID_DEFAULT)).thenReturn("test-instance"); + when(configuration.getString(Constants.DAGGER_NAME_KEY, Constants.DAGGER_NAME_DEFAULT)).thenReturn("test-job"); currentTimestamp = new Timestamp(System.currentTimeMillis()); String[] columnNames = {"longbow_key", "longbow_data1", "rowtime", "longbow_duration"}; defaultLongBowSchema = new LongbowSchema(columnNames); @@ -101,7 +99,7 @@ public void completeExceptionally(Throwable error) { longBowReader.open(flinkInternalConfig); longBowReader.asyncInvoke(input, callback); countDownLatch.await(); - verify(meterStatsManager, times(1)).markEvent(FAILED_ON_READ_DOCUMENT); + verify(meterStatsManager, times(1)).markEvent(LongbowReaderAspects.FAILED_ON_READ_DOCUMENT); verify(errorReporter, times(1)).reportNonFatalException(any(LongbowReaderException.class)); } @@ -114,7 +112,7 @@ public void shouldHandleClose() throws Exception { longBowReader.close(); verify(longBowStore, times(1)).close(); - verify(meterStatsManager, times(1)).markEvent(CLOSE_CONNECTION_ON_READER); + verify(meterStatsManager, times(1)).markEvent(LongbowReaderAspects.CLOSE_CONNECTION_ON_READER); } @Test @@ -159,7 +157,7 @@ public void shouldFailOnTimeout() throws Exception { longBowReader.timeout(input, resultFuture); - verify(meterStatsManager, times(1)).markEvent(TIMEOUTS_ON_READER); + verify(meterStatsManager, times(1)).markEvent(LongbowReaderAspects.TIMEOUTS_ON_READER); verify(errorReporter, times(1)).reportFatalException(any(TimeoutException.class)); verify(resultFuture, times(1)).completeExceptionally(any(TimeoutException.class)); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/processor/LongbowWriterTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowWriterTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/processor/LongbowWriterTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowWriterTest.java index 2d6313ea6..85170c4c6 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/processor/LongbowWriterTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/processor/LongbowWriterTest.java @@ -1,22 +1,22 @@ -package io.odpf.dagger.core.processors.longbow.processor; +package com.gotocompany.dagger.core.processors.longbow.processor; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.core.metrics.aspects.LongbowWriterAspects; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetrySubscriber; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.core.metrics.aspects.LongbowWriterAspects; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.telemetry.TelemetrySubscriber; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.exceptions.LongbowWriterException; -import io.odpf.dagger.core.processors.longbow.outputRow.OutputIdentity; -import io.odpf.dagger.core.processors.longbow.outputRow.WriterOutputRow; -import io.odpf.dagger.core.processors.longbow.request.PutRequestFactory; -import io.odpf.dagger.core.processors.longbow.storage.LongbowStore; -import io.odpf.dagger.core.processors.longbow.storage.PutRequest; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.exceptions.LongbowWriterException; +import com.gotocompany.dagger.core.processors.longbow.outputRow.OutputIdentity; +import com.gotocompany.dagger.core.processors.longbow.outputRow.WriterOutputRow; +import com.gotocompany.dagger.core.processors.longbow.request.PutRequestFactory; +import com.gotocompany.dagger.core.processors.longbow.storage.LongbowStore; +import com.gotocompany.dagger.core.processors.longbow.storage.PutRequest; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/range/LongbowAbsoluteRangeTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowAbsoluteRangeTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/range/LongbowAbsoluteRangeTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowAbsoluteRangeTest.java index 091a5520e..0f9274973 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/range/LongbowAbsoluteRangeTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowAbsoluteRangeTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.range; +package com.gotocompany.dagger.core.processors.longbow.range; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/range/LongbowDurationRangeTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowDurationRangeTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/range/LongbowDurationRangeTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowDurationRangeTest.java index 5544909bf..5d571ef78 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/range/LongbowDurationRangeTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/range/LongbowDurationRangeTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.range; +package com.gotocompany.dagger.core.processors.longbow.range; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequestTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoBytePutRequestTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequestTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoBytePutRequestTest.java index 457acc48a..2616a7231 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequestTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoBytePutRequestTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; import org.apache.flink.types.Row; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ProtoByteScanRequestTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoByteScanRequestTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ProtoByteScanRequestTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoByteScanRequestTest.java index 0f820fbec..4c5e31dd7 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ProtoByteScanRequestTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/ProtoByteScanRequestTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/PutRequestFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/PutRequestFactoryTest.java similarity index 82% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/PutRequestFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/PutRequestFactoryTest.java index c4935b044..652b8d6b9 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/PutRequestFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/PutRequestFactoryTest.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; import org.apache.flink.types.Row; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.storage.PutRequest; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.storage.PutRequest; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ScanRequestFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/ScanRequestFactoryTest.java similarity index 83% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ScanRequestFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/ScanRequestFactoryTest.java index 19be3e1f0..7362f82ef 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ScanRequestFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/ScanRequestFactoryTest.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; import org.apache.flink.types.Row; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; -import io.odpf.dagger.core.processors.longbow.range.LongbowRange; -import io.odpf.dagger.core.processors.longbow.storage.ScanRequest; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.range.LongbowRange; +import com.gotocompany.dagger.core.processors.longbow.storage.ScanRequest; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/TablePutRequestTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/TablePutRequestTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/TablePutRequestTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/TablePutRequestTest.java index 5493b5b4a..06771e3a6 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/TablePutRequestTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/TablePutRequestTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.util.Bytes; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/TableScanRequestTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/TableScanRequestTest.java similarity index 95% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/TableScanRequestTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/TableScanRequestTest.java index dbc1afb6c..dc68e4476 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/TableScanRequestTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/request/TableScanRequestTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.request; +package com.gotocompany.dagger.core.processors.longbow.request; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; +import com.gotocompany.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/validator/LongbowValidatorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/validator/LongbowValidatorTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/validator/LongbowValidatorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/validator/LongbowValidatorTest.java index 258913fbc..c2644a5b7 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/validator/LongbowValidatorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/longbow/validator/LongbowValidatorTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.processors.longbow.validator; +package com.gotocompany.dagger.core.processors.longbow.validator; -import io.odpf.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; import org.junit.Test; import static org.junit.Assert.*; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/telemetry/TelemetryProcessorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/telemetry/TelemetryProcessorTest.java similarity index 82% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/telemetry/TelemetryProcessorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/telemetry/TelemetryProcessorTest.java index 68d161921..ddf013ffc 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/telemetry/TelemetryProcessorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/telemetry/TelemetryProcessorTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.core.processors.telemetry; +package com.gotocompany.dagger.core.processors.telemetry; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.PostProcessorConfig; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.PostProcessorConfig; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.types.Row; import org.junit.Before; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/telemetry/processor/MetricsTelemetryExporterTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/telemetry/processor/MetricsTelemetryExporterTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/telemetry/processor/MetricsTelemetryExporterTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/telemetry/processor/MetricsTelemetryExporterTest.java index 72cf3242e..3a01cd25c 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/telemetry/processor/MetricsTelemetryExporterTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/telemetry/processor/MetricsTelemetryExporterTest.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.processors.telemetry.processor; +package com.gotocompany.dagger.core.processors.telemetry.processor; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryPublisher; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.configuration.Configuration; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.groups.OperatorMetricGroup; import org.apache.flink.types.Row; -import io.odpf.dagger.common.metrics.managers.GaugeStatsManager; -import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/transformers/MockTransformer.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/transformers/MockTransformer.java similarity index 79% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/transformers/MockTransformer.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/transformers/MockTransformer.java index 0b5332f69..991ac1257 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/transformers/MockTransformer.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/transformers/MockTransformer.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.processors.transformers; +package com.gotocompany.dagger.core.processors.transformers; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContext; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.core.Transformer; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.Transformer; import java.util.Map; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/transformers/TransformConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/transformers/TransformConfigTest.java similarity index 78% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/transformers/TransformConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/transformers/TransformConfigTest.java index bc767ed57..f09be6f24 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/transformers/TransformConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/transformers/TransformConfigTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.processors.transformers; +package com.gotocompany.dagger.core.processors.transformers; import org.junit.Before; import org.junit.Test; @@ -6,7 +6,8 @@ import java.util.HashMap; import java.util.Map; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; public class TransformConfigTest { @@ -17,7 +18,7 @@ public class TransformConfigTest { @Before public void setUp() { - transformationClass = "io.odpf.daggers.postprocessor.XTransformer"; + transformationClass = "com.gotocompany.daggers.postprocessor.XTransformer"; transformationArguments = new HashMap<>(); transformationArguments.put("keyColumnName", "key"); transformationArguments.put("valueColumnName", "value"); @@ -27,7 +28,7 @@ public void setUp() { @Test public void shouldReturnTransformationClass() { - assertEquals("io.odpf.daggers.postprocessor.XTransformer", defaultTransformConfig.getTransformationClass()); + assertEquals("com.gotocompany.daggers.postprocessor.XTransformer", defaultTransformConfig.getTransformationClass()); } @Test @@ -50,14 +51,14 @@ public void shouldThrowExceptionIfMandatoryFieldsAreMissing() { @Test public void shouldReturnMandatoryFields() { HashMap expectedMandatoryFields = new HashMap<>(); - expectedMandatoryFields.put("transformationClass", "io.odpf.daggers.postprocessor.XTransformer"); + expectedMandatoryFields.put("transformationClass", "com.gotocompany.daggers.postprocessor.XTransformer"); HashMap actualMandatoryFields = defaultTransformConfig.getMandatoryFields(); assertEquals(expectedMandatoryFields.get("transformationClass"), actualMandatoryFields.get("transformationClass")); } @Test public void shouldThrowExceptionIfDefaultFieldsAreOverridden() { - TransformConfig config = new TransformConfig("io.odpf.TestClass", new HashMap() {{ + TransformConfig config = new TransformConfig("com.gotocompany.TestClass", new HashMap() {{ put(TransformerUtils.DefaultArgument.INPUT_SCHEMA_TABLE.toString(), "test-value"); }}); IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> config.validateFields()); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/transformers/TransformProcessorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/transformers/TransformProcessorTest.java similarity index 82% rename from dagger-core/src/test/java/io/odpf/dagger/core/processors/transformers/TransformProcessorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/processors/transformers/TransformProcessorTest.java index c18c1fb94..8a7bdad8e 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/transformers/TransformProcessorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/processors/transformers/TransformProcessorTest.java @@ -1,13 +1,12 @@ -package io.odpf.dagger.core.processors.transformers; +package com.gotocompany.dagger.core.processors.transformers; -import io.odpf.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.Transformer; +import com.gotocompany.dagger.core.metrics.telemetry.TelemetryTypes; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; - -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.core.Transformer; -import io.odpf.dagger.core.metrics.telemetry.TelemetryTypes; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -17,13 +16,10 @@ import java.util.HashMap; import java.util.List; -import static io.odpf.dagger.core.metrics.telemetry.TelemetryTypes.PRE_PROCESSOR_TYPE; +import static com.gotocompany.dagger.core.metrics.telemetry.TelemetryTypes.PRE_PROCESSOR_TYPE; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; +import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; public class TransformProcessorTest extends DaggerContextTestBase { @@ -70,11 +66,11 @@ public void shouldThrowExceptionInCaseOfWrongConstructorTypeSupported() { HashMap transformationArguments = new HashMap<>(); transformationArguments.put("keyField", "keystore"); transfromConfigs = new ArrayList<>(); - transfromConfigs.add(new TransformConfig("io.odpf.dagger.core.processors.transformers.TransformProcessor", transformationArguments)); + transfromConfigs.add(new TransformConfig("com.gotocompany.dagger.core.processors.transformers.TransformProcessor", transformationArguments)); TransformProcessor transformProcessor = new TransformProcessor(transfromConfigs, daggerContext); RuntimeException exception = assertThrows(RuntimeException.class, () -> transformProcessor.process(streamInfo)); - assertEquals("io.odpf.dagger.core.processors.transformers.TransformProcessor.(java.util.Map, [Ljava.lang.String;, io.odpf.dagger.common.core.DaggerContext)", exception.getMessage()); + assertEquals("com.gotocompany.dagger.core.processors.transformers.TransformProcessor.(java.util.Map, [Ljava.lang.String;, com.gotocompany.dagger.common.core.DaggerContext)", exception.getMessage()); } @Test @@ -154,10 +150,10 @@ public void shouldProcessTwoPostTransformers() { when(streamInfo.getColumnNames()).thenReturn(null); when(inputStream.map(any(MapFunction.class))).thenReturn(mappedDataStream); transfromConfigs = new ArrayList<>(); - transfromConfigs.add(new TransformConfig("io.odpf.dagger.core.processors.transformers.MockTransformer", new HashMap() {{ + transfromConfigs.add(new TransformConfig("com.gotocompany.dagger.core.processors.transformers.MockTransformer", new HashMap() {{ put("keyField", "keystore"); }})); - transfromConfigs.add(new TransformConfig("io.odpf.dagger.core.processors.transformers.MockTransformer", new HashMap() {{ + transfromConfigs.add(new TransformConfig("com.gotocompany.dagger.core.processors.transformers.MockTransformer", new HashMap() {{ put("keyField", "keystore"); }})); @@ -173,13 +169,13 @@ public void shouldProcessMultiplePostTransformers() { when(inputStream.map(any(MapFunction.class))).thenReturn(mappedDataStream); when(mappedDataStream.map(any(MapFunction.class))).thenReturn(mappedDataStream); transfromConfigs = new ArrayList<>(); - transfromConfigs.add(new TransformConfig("io.odpf.dagger.core.processors.transformers.MockTransformer", new HashMap() {{ + transfromConfigs.add(new TransformConfig("com.gotocompany.dagger.core.processors.transformers.MockTransformer", new HashMap() {{ put("keyField", "keystore"); }})); - transfromConfigs.add(new TransformConfig("io.odpf.dagger.core.processors.transformers.MockTransformer", new HashMap() {{ + transfromConfigs.add(new TransformConfig("com.gotocompany.dagger.core.processors.transformers.MockTransformer", new HashMap() {{ put("keyField", "keystore"); }})); - transfromConfigs.add(new TransformConfig("io.odpf.dagger.core.processors.transformers.MockTransformer", new HashMap() {{ + transfromConfigs.add(new TransformConfig("com.gotocompany.dagger.core.processors.transformers.MockTransformer", new HashMap() {{ put("keyField", "keystore"); }})); @@ -191,14 +187,14 @@ public void shouldProcessMultiplePostTransformers() { @Test public void shouldPopulateDefaultArguments() { - TransformConfig config = new TransformConfig("io.odpf.TestProcessor", new HashMap() {{ + TransformConfig config = new TransformConfig("com.gotocompany.TestProcessor", new HashMap() {{ put("test-key", "test-value"); }}); TransformProcessor processor = new TransformProcessor("test_table", PRE_PROCESSOR_TYPE, Collections.singletonList(config), daggerContext); assertEquals("test_table", processor.tableName); assertEquals(PRE_PROCESSOR_TYPE, processor.type); assertEquals(1, processor.transformConfigs.size()); - assertEquals("io.odpf.TestProcessor", processor.transformConfigs.get(0).getTransformationClass()); + assertEquals("com.gotocompany.TestProcessor", processor.transformConfigs.get(0).getTransformationClass()); assertEquals("test_table", processor.transformConfigs.get(0).getTransformationArguments().get("table_name")); assertEquals("test-value", processor.transformConfigs.get(0).getTransformationArguments().get("test-key")); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/SinkOrchestratorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/SinkOrchestratorTest.java similarity index 60% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/SinkOrchestratorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/SinkOrchestratorTest.java index 6979a1c48..1a660c543 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/SinkOrchestratorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/SinkOrchestratorTest.java @@ -1,25 +1,23 @@ -package io.odpf.dagger.core.sink; - -import io.odpf.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; -import io.odpf.dagger.core.sink.bigquery.BigQuerySink; +package com.gotocompany.dagger.core.sink; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.sink.bigquery.BigQuerySink; +import com.gotocompany.dagger.core.sink.influx.InfluxDBSink; +import com.gotocompany.dagger.core.sink.log.LogSink; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.connector.sink.Sink; import org.apache.flink.api.java.utils.ParameterTool; -import org.apache.flink.connector.kafka.sink.KafkaSink; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.sink.influx.InfluxDBSink; -import io.odpf.dagger.core.sink.log.LogSink; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import java.util.*; -import static io.odpf.dagger.common.core.Constants.*; -import static io.odpf.dagger.core.utils.Constants.SINK_KAFKA_BROKERS_KEY; -import static io.odpf.dagger.core.utils.Constants.SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE_KEY; +import static com.gotocompany.dagger.common.core.Constants.*; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.junit.Assert.assertEquals; @@ -28,6 +26,9 @@ public class SinkOrchestratorTest { + private static final String SINK_KAFKA_PRODUCER_CONFIG_SASL_LOGIN_CALLBACK_HANDLER_CLASS = "SINK_KAFKA_PRODUCER_CONFIG_SASL_LOGIN_CALLBACK_HANDLER_CLASS"; + private static final String SASL_LOGIN_CALLBACK_HANDLER_CLASS_VALUE = "com.gotocompany.dagger.core.utils.SinkKafkaConfigUtil"; + private Configuration configuration; private StencilClientOrchestrator stencilClientOrchestrator; private SinkOrchestrator sinkOrchestrator; @@ -73,25 +74,28 @@ public void shouldGiveInfluxWhenConfiguredToUseNothing() throws Exception { @Test public void shouldSetKafkaProducerConfigurations() throws Exception { - when(configuration.getString(eq(SINK_KAFKA_BROKERS_KEY), anyString())).thenReturn("10.200.216.87:6668"); - when(configuration.getBoolean(eq(SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE_KEY), anyBoolean())).thenReturn(true); + Map additionalParameters = new HashMap<>(); + additionalParameters.put(SINK_KAFKA_PRODUCER_CONFIG_SASL_LOGIN_CALLBACK_HANDLER_CLASS, SASL_LOGIN_CALLBACK_HANDLER_CLASS_VALUE); + when(configuration.getString(eq(Constants.SINK_KAFKA_BROKERS_KEY), anyString())).thenReturn("10.200.216.87:6668"); + when(configuration.getBoolean(eq(Constants.SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE_KEY), anyBoolean())).thenReturn(true); + when(configuration.getString(eq(Constants.SINK_KAFKA_LINGER_MS_KEY), anyString())).thenReturn("1000"); + when(configuration.getParam()).thenReturn(ParameterTool.fromMap(additionalParameters)); + when(configuration.getString(eq(SINK_KAFKA_PRODUCER_CONFIG_SASL_LOGIN_CALLBACK_HANDLER_CLASS), anyString())).thenReturn(SASL_LOGIN_CALLBACK_HANDLER_CLASS_VALUE); Properties producerProperties = sinkOrchestrator.getProducerProperties(configuration); assertEquals(producerProperties.getProperty("compression.type"), "snappy"); assertEquals(producerProperties.getProperty("max.request.size"), "20971520"); + assertEquals(producerProperties.getProperty("linger.ms"), "1000"); + assertEquals(producerProperties.getProperty("sasl.login.callback.handler.class"), SASL_LOGIN_CALLBACK_HANDLER_CLASS_VALUE); } @Test - public void shouldGiveKafkaProducerWhenConfiguredToUseKafkaSink() throws Exception { - when(configuration.getString(eq("SINK_TYPE"), anyString())).thenReturn("kafka"); - when(configuration.getString(eq("SINK_KAFKA_PROTO_MESSAGE"), anyString())).thenReturn("output_proto"); - when(configuration.getString(eq("SINK_KAFKA_BROKERS"), anyString())).thenReturn("output_broker:2667"); - when(configuration.getString(eq("SINK_KAFKA_TOPIC"), anyString())).thenReturn("output_topic"); - when(configuration.getString(eq("SINK_KAFKA_DATA_TYPE"), anyString())).thenReturn("PROTO"); - - Sink sinkFunction = sinkOrchestrator.getSink(configuration, new String[]{}, stencilClientOrchestrator, daggerStatsDReporter); - - assertThat(sinkFunction, instanceOf(KafkaSink.class)); + public void shouldThrowIllegalArgumentExceptionForInvalidLingerMs() throws Exception { + when(configuration.getString(eq(Constants.SINK_KAFKA_BROKERS_KEY), anyString())).thenReturn("10.200.216.87:6668"); + when(configuration.getBoolean(eq(Constants.SINK_KAFKA_PRODUCE_LARGE_MESSAGE_ENABLE_KEY), anyBoolean())).thenReturn(true); + when(configuration.getString(eq(Constants.SINK_KAFKA_LINGER_MS_KEY), anyString())).thenReturn("abc"); + Assert.assertThrows("Expected Illegal ArgumentException", IllegalArgumentException.class, + () -> sinkOrchestrator.getProducerProperties(configuration)); } @Test diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkBuilderTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkBuilderTest.java similarity index 81% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkBuilderTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkBuilderTest.java index e43f4ec61..ebc5bb661 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkBuilderTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkBuilderTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.sink.bigquery; +package com.gotocompany.dagger.core.sink.bigquery; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; import org.apache.flink.api.java.utils.ParameterTool; import org.junit.Assert; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkTest.java similarity index 83% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkTest.java index abc3f66a8..9d96bd607 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkTest.java @@ -1,10 +1,9 @@ -package io.odpf.dagger.core.sink.bigquery; +package com.gotocompany.dagger.core.sink.bigquery; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; -import io.odpf.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; -import io.odpf.depot.OdpfSink; -import io.odpf.depot.bigquery.BigQuerySinkFactory; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.core.metrics.reporters.statsd.DaggerStatsDReporter; +import com.gotocompany.depot.bigquery.BigQuerySinkFactory; import org.apache.flink.api.connector.sink.Sink; import org.apache.flink.api.connector.sink.SinkWriter; import org.apache.flink.api.java.utils.ParameterTool; @@ -44,10 +43,10 @@ public void shouldCreateSinkWriter() { Sink.InitContext context = Mockito.mock(Sink.InitContext.class); SinkWriterMetricGroup metricGroup = Mockito.mock(SinkWriterMetricGroup.class); Mockito.when(context.metricGroup()).thenReturn(metricGroup); - OdpfSink odpfSink = Mockito.mock(OdpfSink.class); + com.gotocompany.depot.Sink mockSink = Mockito.mock(com.gotocompany.depot.Sink.class); Map configMap = new HashMap<>(); Configuration configuration = new Configuration(ParameterTool.fromMap(configMap)); - Mockito.when(sinkFactory.create()).thenReturn(odpfSink); + Mockito.when(sinkFactory.create()).thenReturn(mockSink); BigQuerySink sink = new BigQuerySink(configuration, protoSerializer, sinkFactory, daggerStatsDReporter); SinkWriter writer = sink.createWriter(context, null); Assert.assertTrue(writer instanceof BigQuerySinkWriter); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkWriterTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkWriterTest.java similarity index 88% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkWriterTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkWriterTest.java index 571dab20c..94f1a5eb6 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/bigquery/BigQuerySinkWriterTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/bigquery/BigQuerySinkWriterTest.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.sink.bigquery; +package com.gotocompany.dagger.core.sink.bigquery; -import io.odpf.dagger.common.serde.proto.serialization.ProtoSerializer; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.depot.OdpfSink; -import io.odpf.depot.OdpfSinkResponse; -import io.odpf.depot.error.ErrorInfo; -import io.odpf.depot.error.ErrorType; +import com.gotocompany.dagger.common.serde.proto.serialization.ProtoSerializer; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.depot.Sink; +import com.gotocompany.depot.SinkResponse; +import com.gotocompany.depot.error.ErrorInfo; +import com.gotocompany.depot.error.ErrorType; import org.apache.flink.types.Row; import org.junit.Assert; import org.junit.Test; @@ -17,15 +17,15 @@ public class BigQuerySinkWriterTest { @Test - public void shouldWriteToOdpfSinkInBatches() throws IOException { + public void shouldWriteToSinkInBatches() throws IOException { ProtoSerializer protoSerializer = Mockito.mock(ProtoSerializer.class); - OdpfSink sink = Mockito.mock(OdpfSink.class); + Sink sink = Mockito.mock(Sink.class); BigQuerySinkWriter bigquerySinkWriter = new BigQuerySinkWriter(protoSerializer, sink, 3, null, null); Row row = new Row(1); row.setField(0, "some field"); Mockito.when(protoSerializer.serializeKey(row)).thenReturn("test".getBytes()); Mockito.when(protoSerializer.serializeValue(row)).thenReturn("testMessage".getBytes()); - OdpfSinkResponse response = Mockito.mock(OdpfSinkResponse.class); + SinkResponse response = Mockito.mock(SinkResponse.class); Mockito.when(response.hasErrors()).thenReturn(false); Mockito.when(sink.pushToSink(Mockito.anyList())).thenReturn(response); bigquerySinkWriter.write(row, null); @@ -42,7 +42,7 @@ public void shouldWriteToOdpfSinkInBatches() throws IOException { @Test public void shouldNotWriteIfCurrentSizeIsLessThanTheBatchSize() throws IOException { ProtoSerializer protoSerializer = Mockito.mock(ProtoSerializer.class); - OdpfSink sink = Mockito.mock(OdpfSink.class); + Sink sink = Mockito.mock(Sink.class); BigQuerySinkWriter bigquerySinkWriter = new BigQuerySinkWriter(protoSerializer, sink, 10, null, null); Row row = new Row(1); row.setField(0, "some field"); @@ -60,7 +60,7 @@ public void shouldNotWriteIfCurrentSizeIsLessThanTheBatchSize() throws IOExcepti @Test public void shouldThrowExceptionWhenSinkResponseHasErrors() throws IOException { ProtoSerializer protoSerializer = Mockito.mock(ProtoSerializer.class); - OdpfSink sink = Mockito.mock(OdpfSink.class); + Sink sink = Mockito.mock(Sink.class); ErrorReporter reporter = Mockito.mock(ErrorReporter.class); Set errorTypesForFailing = new HashSet() {{ add(ErrorType.DESERIALIZATION_ERROR); @@ -71,7 +71,7 @@ public void shouldThrowExceptionWhenSinkResponseHasErrors() throws IOException { row.setField(0, "some field"); Mockito.when(protoSerializer.serializeKey(row)).thenReturn("test".getBytes()); Mockito.when(protoSerializer.serializeValue(row)).thenReturn("testMessage".getBytes()); - OdpfSinkResponse response = Mockito.mock(OdpfSinkResponse.class); + SinkResponse response = Mockito.mock(SinkResponse.class); Mockito.when(response.hasErrors()).thenReturn(true); Map errorInfoMap = new HashMap() {{ put(1L, new ErrorInfo(new Exception("test1"), ErrorType.DESERIALIZATION_ERROR)); @@ -93,7 +93,7 @@ public void shouldThrowExceptionWhenSinkResponseHasErrors() throws IOException { @Test public void shouldNotThrowExceptionIfErrorTypeNotConfigured() throws IOException { ProtoSerializer protoSerializer = Mockito.mock(ProtoSerializer.class); - OdpfSink sink = Mockito.mock(OdpfSink.class); + Sink sink = Mockito.mock(Sink.class); ErrorReporter reporter = Mockito.mock(ErrorReporter.class); Set errorTypesForFailing = Collections.emptySet(); BigQuerySinkWriter bigquerySinkWriter = new BigQuerySinkWriter(protoSerializer, sink, 3, reporter, errorTypesForFailing); @@ -101,7 +101,7 @@ public void shouldNotThrowExceptionIfErrorTypeNotConfigured() throws IOException row.setField(0, "some field"); Mockito.when(protoSerializer.serializeKey(row)).thenReturn("test".getBytes()); Mockito.when(protoSerializer.serializeValue(row)).thenReturn("testMessage".getBytes()); - OdpfSinkResponse response = Mockito.mock(OdpfSinkResponse.class); + SinkResponse response = Mockito.mock(SinkResponse.class); Mockito.when(response.hasErrors()).thenReturn(true); Map errorInfoMap = new HashMap() {{ put(1L, new ErrorInfo(new Exception("test1"), ErrorType.DESERIALIZATION_ERROR)); @@ -122,7 +122,7 @@ public void shouldNotThrowExceptionIfErrorTypeNotConfigured() throws IOException @Test public void shouldCallClose() throws Exception { ProtoSerializer protoSerializer = Mockito.mock(ProtoSerializer.class); - OdpfSink sink = Mockito.mock(OdpfSink.class); + Sink sink = Mockito.mock(Sink.class); BigQuerySinkWriter bigquerySinkWriter = new BigQuerySinkWriter(protoSerializer, sink, 3, null, null); bigquerySinkWriter.close(); Mockito.verify(sink, Mockito.times(1)).close(); @@ -131,7 +131,7 @@ public void shouldCallClose() throws Exception { @Test public void shouldReportExceptionThrownFromSinkConnector() throws IOException { ProtoSerializer protoSerializer = Mockito.mock(ProtoSerializer.class); - OdpfSink sink = Mockito.mock(OdpfSink.class); + Sink sink = Mockito.mock(Sink.class); ErrorReporter reporter = Mockito.mock(ErrorReporter.class); Set errorTypesForFailing = Collections.emptySet(); BigQuerySinkWriter bigquerySinkWriter = new BigQuerySinkWriter(protoSerializer, sink, 3, reporter, errorTypesForFailing); @@ -139,7 +139,7 @@ public void shouldReportExceptionThrownFromSinkConnector() throws IOException { row.setField(0, "some field"); Mockito.when(protoSerializer.serializeKey(row)).thenReturn("test".getBytes()); Mockito.when(protoSerializer.serializeValue(row)).thenReturn("testMessage".getBytes()); - OdpfSinkResponse response = Mockito.mock(OdpfSinkResponse.class); + SinkResponse response = Mockito.mock(SinkResponse.class); Mockito.when(sink.pushToSink(Mockito.anyList())).thenThrow(new RuntimeException("test")); bigquerySinkWriter.write(row, null); bigquerySinkWriter.write(row, null); @@ -153,13 +153,13 @@ public void shouldReportExceptionThrownFromSinkConnector() throws IOException { @Test public void shouldFlushWhilePrepareForCommit() throws IOException { ProtoSerializer protoSerializer = Mockito.mock(ProtoSerializer.class); - OdpfSink sink = Mockito.mock(OdpfSink.class); + Sink sink = Mockito.mock(Sink.class); BigQuerySinkWriter bigquerySinkWriter = new BigQuerySinkWriter(protoSerializer, sink, 3, null, null); Row row = new Row(1); row.setField(0, "some field"); Mockito.when(protoSerializer.serializeKey(row)).thenReturn("test".getBytes()); Mockito.when(protoSerializer.serializeValue(row)).thenReturn("testMessage".getBytes()); - OdpfSinkResponse response = Mockito.mock(OdpfSinkResponse.class); + SinkResponse response = Mockito.mock(SinkResponse.class); Mockito.when(response.hasErrors()).thenReturn(false); Mockito.when(sink.pushToSink(Mockito.anyList())).thenReturn(response); bigquerySinkWriter.write(row, null); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBSinkTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/InfluxDBSinkTest.java similarity index 78% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBSinkTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/InfluxDBSinkTest.java index d2cafe64d..257422878 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBSinkTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/InfluxDBSinkTest.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.sink.influx; +package com.gotocompany.dagger.core.sink.influx; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.connector.sink.Sink.InitContext; import org.apache.flink.api.connector.sink.SinkWriter; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.groups.SinkWriterMetricGroup; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; import org.influxdb.InfluxDB; import org.influxdb.InfluxDBException; import org.junit.Before; @@ -21,7 +21,6 @@ import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; -import static io.odpf.dagger.core.utils.Constants.*; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -57,12 +56,12 @@ public class InfluxDBSinkTest { @Before public void setUp() throws Exception { initMocks(this); - when(configuration.getString(SINK_INFLUX_URL_KEY, SINK_INFLUX_URL_DEFAULT)).thenReturn("http://localhost:1111"); - when(configuration.getString(SINK_INFLUX_USERNAME_KEY, SINK_INFLUX_USERNAME_DEFAULT)).thenReturn("usr"); - when(configuration.getString(SINK_INFLUX_PASSWORD_KEY, SINK_INFLUX_PASSWORD_DEFAULT)).thenReturn("pwd"); + when(configuration.getString(Constants.SINK_INFLUX_URL_KEY, Constants.SINK_INFLUX_URL_DEFAULT)).thenReturn("http://localhost:1111"); + when(configuration.getString(Constants.SINK_INFLUX_USERNAME_KEY, Constants.SINK_INFLUX_USERNAME_DEFAULT)).thenReturn("usr"); + when(configuration.getString(Constants.SINK_INFLUX_PASSWORD_KEY, Constants.SINK_INFLUX_PASSWORD_DEFAULT)).thenReturn("pwd"); - when(configuration.getInteger(SINK_INFLUX_BATCH_SIZE_KEY, SINK_INFLUX_BATCH_SIZE_DEFAULT)).thenReturn(100); - when(configuration.getInteger(SINK_INFLUX_FLUSH_DURATION_MS_KEY, SINK_INFLUX_FLUSH_DURATION_MS_DEFAULT)).thenReturn(1000); + when(configuration.getInteger(Constants.SINK_INFLUX_BATCH_SIZE_KEY, Constants.SINK_INFLUX_BATCH_SIZE_DEFAULT)).thenReturn(100); + when(configuration.getInteger(Constants.SINK_INFLUX_FLUSH_DURATION_MS_KEY, Constants.SINK_INFLUX_FLUSH_DURATION_MS_DEFAULT)).thenReturn(1000); when(influxDBFactory.connect(any(), any(), any())).thenReturn(influxDb); when(context.metricGroup()).thenReturn(metricGroup); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBWriterTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/InfluxDBWriterTest.java similarity index 95% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBWriterTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/InfluxDBWriterTest.java index 427752afd..c5c719104 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBWriterTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/InfluxDBWriterTest.java @@ -1,15 +1,15 @@ -package io.odpf.dagger.core.sink.influx; +package com.gotocompany.dagger.core.sink.influx; +import com.gotocompany.dagger.core.exception.InfluxWriteException; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.connector.sink.Sink.InitContext; import org.apache.flink.api.connector.sink.SinkWriter.Context; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.groups.SinkWriterMetricGroup; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.exception.InfluxWriteException; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; import org.influxdb.InfluxDB; import org.influxdb.InfluxDBException; import org.influxdb.dto.Point; @@ -26,7 +26,6 @@ import java.util.ArrayList; import java.util.concurrent.TimeUnit; -import static io.odpf.dagger.core.utils.Constants.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import static org.mockito.ArgumentMatchers.any; @@ -68,9 +67,9 @@ public class InfluxDBWriterTest { @Before public void setUp() throws Exception { initMocks(this); - when(configuration.getString(SINK_INFLUX_DB_NAME_KEY, SINK_INFLUX_DB_NAME_DEFAULT)).thenReturn("dagger_test"); - when(configuration.getString(SINK_INFLUX_RETENTION_POLICY_KEY, SINK_INFLUX_RETENTION_POLICY_DEFAULT)).thenReturn("two_day_policy"); - when(configuration.getString(SINK_INFLUX_MEASUREMENT_NAME_KEY, SINK_INFLUX_MEASUREMENT_NAME_DEFAULT)).thenReturn("test_table"); + when(configuration.getString(Constants.SINK_INFLUX_DB_NAME_KEY, Constants.SINK_INFLUX_DB_NAME_DEFAULT)).thenReturn("dagger_test"); + when(configuration.getString(Constants.SINK_INFLUX_RETENTION_POLICY_KEY, Constants.SINK_INFLUX_RETENTION_POLICY_DEFAULT)).thenReturn("two_day_policy"); + when(configuration.getString(Constants.SINK_INFLUX_MEASUREMENT_NAME_KEY, Constants.SINK_INFLUX_MEASUREMENT_NAME_DEFAULT)).thenReturn("test_table"); when(initContext.metricGroup()).thenReturn(metricGroup); when(metricGroup.addGroup(Constants.SINK_INFLUX_LATE_RECORDS_DROPPED_KEY)).thenReturn(metricGroup); when(metricGroup.addGroup(Constants.NONFATAL_EXCEPTION_METRIC_GROUP_KEY, diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropErrorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/LateRecordDropErrorTest.java similarity index 97% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropErrorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/LateRecordDropErrorTest.java index 8ab25a6ea..4ce925a7f 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropErrorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/LateRecordDropErrorTest.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.core.sink.influx.errors; +package com.gotocompany.dagger.core.sink.influx.errors; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.connector.sink.Sink.InitContext; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.metrics.SimpleCounter; import org.apache.flink.metrics.groups.SinkWriterMetricGroup; -import io.odpf.dagger.core.utils.Constants; import org.influxdb.InfluxDBException; import org.influxdb.dto.Point; import org.junit.Before; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/NoErrorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/NoErrorTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/NoErrorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/NoErrorTest.java index 13bf9b9bb..7876c8540 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/NoErrorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/NoErrorTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.sink.influx.errors; +package com.gotocompany.dagger.core.sink.influx.errors; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/ValidErrorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/ValidErrorTest.java similarity index 95% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/ValidErrorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/ValidErrorTest.java index e8a0631bd..41f7b02e6 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/ValidErrorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/ValidErrorTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.sink.influx.errors; +package com.gotocompany.dagger.core.sink.influx.errors; import org.influxdb.dto.Point; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/ValidExceptionTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/ValidExceptionTest.java similarity index 95% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/ValidExceptionTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/ValidExceptionTest.java index 731154eb3..551d8667e 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/ValidExceptionTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/influx/errors/ValidExceptionTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.sink.influx.errors; +package com.gotocompany.dagger.core.sink.influx.errors; import org.influxdb.dto.Point; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/kafka/KafkaSerializationSchemaFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/kafka/KafkaSerializationSchemaFactoryTest.java similarity index 77% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/kafka/KafkaSerializationSchemaFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/kafka/KafkaSerializationSchemaFactoryTest.java index fa7bf5702..4fe22ef6e 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/kafka/KafkaSerializationSchemaFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/kafka/KafkaSerializationSchemaFactoryTest.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.sink.kafka; +package com.gotocompany.dagger.core.sink.kafka; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.sink.kafka.builder.KafkaJsonSerializerBuilder; -import io.odpf.dagger.core.sink.kafka.builder.KafkaProtoSerializerBuilder; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.sink.kafka.builder.KafkaJsonSerializerBuilder; +import com.gotocompany.dagger.core.sink.kafka.builder.KafkaProtoSerializerBuilder; +import com.gotocompany.dagger.core.utils.Constants; import org.junit.Assert; import org.junit.Before; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilderTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilderTest.java similarity index 92% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilderTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilderTest.java index ebf47bd2e..1f1b4eef1 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilderTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaJsonSerializerBuilderTest.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.sink.kafka.builder; +package com.gotocompany.dagger.core.sink.kafka.builder; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.exceptions.serde.InvalidJSONSchemaException; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.exceptions.serde.InvalidJSONSchemaException; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilderTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilderTest.java similarity index 86% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilderTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilderTest.java index 137644827..b800f76be 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilderTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/sink/kafka/builder/KafkaProtoSerializerBuilderTest.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.sink.kafka.builder; +package com.gotocompany.dagger.core.sink.kafka.builder; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.serde.proto.serialization.KafkaProtoSerializer; -import io.odpf.dagger.core.utils.Constants; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.serde.proto.serialization.KafkaProtoSerializer; import org.junit.Assert; import org.junit.Before; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/DaggerSourceFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/DaggerSourceFactoryTest.java similarity index 73% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/DaggerSourceFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/DaggerSourceFactoryTest.java index e95d1e3d7..3da9c2acb 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/DaggerSourceFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/DaggerSourceFactoryTest.java @@ -1,19 +1,19 @@ -package io.odpf.dagger.core.source; +package com.gotocompany.dagger.core.source; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.common.serde.json.deserialization.JsonDeserializer; -import io.odpf.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; -import io.odpf.dagger.common.serde.proto.deserialization.ProtoDeserializer; -import io.odpf.dagger.core.exception.InvalidDaggerSourceException; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.flinkkafkaconsumer.FlinkKafkaConsumerDaggerSource; -import io.odpf.dagger.core.source.kafka.KafkaDaggerSource; -import io.odpf.dagger.core.source.parquet.ParquetDaggerSource; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.exception.InvalidDaggerSourceException; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.core.source.flinkkafkaconsumer.FlinkKafkaConsumerDaggerSource; +import com.gotocompany.dagger.core.source.kafka.KafkaDaggerSource; +import com.gotocompany.dagger.core.source.parquet.ParquetDaggerSource; +import com.gotocompany.dagger.common.serde.json.deserialization.JsonDeserializer; +import com.gotocompany.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; +import com.gotocompany.dagger.common.serde.proto.deserialization.ProtoDeserializer; +import com.gotocompany.depot.metrics.StatsDReporter; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/StreamTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/StreamTest.java similarity index 84% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/StreamTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/StreamTest.java index a7366cd11..32163ee4e 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/StreamTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/StreamTest.java @@ -1,18 +1,19 @@ -package io.odpf.dagger.core.source; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.flinkkafkaconsumer.FlinkKafkaConsumerDaggerSource; -import io.odpf.dagger.core.source.kafka.KafkaDaggerSource; -import io.odpf.dagger.core.source.parquet.ParquetDaggerSource; -import io.odpf.depot.metrics.StatsDReporter; -import io.odpf.stencil.client.StencilClient; +package com.gotocompany.dagger.core.source; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.core.source.flinkkafkaconsumer.FlinkKafkaConsumerDaggerSource; +import com.gotocompany.dagger.core.source.kafka.KafkaDaggerSource; +import com.gotocompany.dagger.core.source.parquet.ParquetDaggerSource; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.depot.metrics.StatsDReporter; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.stencil.config.StencilConfig; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.types.Row; @@ -41,6 +42,9 @@ public class StreamTest { @Mock private StencilClient stencilClient; + @Mock + private StencilConfig stencilConfig; + @Mock private Configuration configuration; @@ -70,6 +74,8 @@ public void shouldBeAbleToBuildAStreamWithKafkaDaggerSourceAndProtoSchema() { when(streamConfig.getDataType()).thenReturn("PROTO"); when(streamConfig.getProtoClass()).thenReturn("com.tests.TestMessage"); when(streamConfig.getSchemaTable()).thenReturn("data_stream"); + when(stencilConfig.getCacheAutoRefresh()).thenReturn(false); + when(stencilClientOrchestrator.createStencilConfig()).thenReturn(stencilConfig); when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); when(stencilClient.get("com.tests.TestMessage")).thenReturn(TestBookingLogMessage.getDescriptor()); @@ -95,6 +101,8 @@ public void shouldBeAbleToBuildAStreamWithFlinkKafkaConsumerDaggerSourceAndProto when(streamConfig.getDataType()).thenReturn("PROTO"); when(streamConfig.getProtoClass()).thenReturn("com.tests.TestMessage"); when(streamConfig.getSchemaTable()).thenReturn("data_stream"); + when(stencilConfig.getCacheAutoRefresh()).thenReturn(false); + when(stencilClientOrchestrator.createStencilConfig()).thenReturn(stencilConfig); when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); when(stencilClient.get("com.tests.TestMessage")).thenReturn(TestBookingLogMessage.getDescriptor()); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/StreamsFactoryTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/StreamsFactoryTest.java similarity index 85% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/StreamsFactoryTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/StreamsFactoryTest.java index e54d152ab..50e01841e 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/StreamsFactoryTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/StreamsFactoryTest.java @@ -1,14 +1,15 @@ -package io.odpf.dagger.core.source; +package com.gotocompany.dagger.core.source; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.source.flinkkafkaconsumer.FlinkKafkaConsumerDaggerSource; -import io.odpf.dagger.core.source.kafka.KafkaDaggerSource; -import io.odpf.depot.metrics.StatsDReporter; -import io.odpf.stencil.client.StencilClient; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.source.flinkkafkaconsumer.FlinkKafkaConsumerDaggerSource; +import com.gotocompany.dagger.core.source.kafka.KafkaDaggerSource; +import com.gotocompany.depot.metrics.StatsDReporter; +import com.gotocompany.stencil.client.StencilClient; import com.google.gson.JsonSyntaxException; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.stencil.config.StencilConfig; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -16,7 +17,7 @@ import java.util.List; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; @@ -31,6 +32,9 @@ public class StreamsFactoryTest { @Mock private StencilClient stencilClient; + @Mock + private StencilConfig stencilConfig; + @Mock private Configuration configuration; @@ -69,7 +73,8 @@ public void shouldReturnListOfStreamsCreatedFromConfiguration() { when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); when(stencilClient.get("com.tests.TestMessage")).thenReturn(TestBookingLogMessage.getDescriptor()); - + when(stencilConfig.getCacheAutoRefresh()).thenReturn(false); + when(stencilClientOrchestrator.createStencilConfig()).thenReturn(stencilConfig); List streams = StreamsFactory.getStreams(configuration, stencilClientOrchestrator, statsDReporterSupplierMock); assertEquals(2, streams.size()); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/StreamConfigTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/StreamConfigTest.java similarity index 64% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/config/StreamConfigTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/StreamConfigTest.java index 595136e8c..23527ea52 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/StreamConfigTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/StreamConfigTest.java @@ -1,10 +1,11 @@ -package io.odpf.dagger.core.source.config; +package com.gotocompany.dagger.core.source.config; import com.google.gson.JsonSyntaxException; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.source.config.models.*; -import io.odpf.dagger.core.source.parquet.SourceParquetReadOrderStrategy; -import io.odpf.dagger.core.source.parquet.SourceParquetSchemaMatchStrategy; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.source.config.models.*; +import com.gotocompany.dagger.core.source.parquet.SourceParquetReadOrderStrategy; +import com.gotocompany.dagger.core.source.parquet.SourceParquetSchemaMatchStrategy; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.kafka.clients.consumer.OffsetResetStrategy; import org.junit.Assert; import org.junit.Before; @@ -18,12 +19,7 @@ import java.util.Properties; import java.util.regex.Pattern; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.core.source.config.models.SourceName.KAFKA_CONSUMER; -import static io.odpf.dagger.core.source.config.models.SourceType.UNBOUNDED; -import static io.odpf.dagger.core.source.parquet.SourceParquetReadOrderStrategy.EARLIEST_TIME_URL_FIRST; -import static io.odpf.dagger.core.utils.Constants.SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_KEY; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThrows; @@ -98,7 +94,7 @@ public void shouldParseMultipleStreamsFromStreamConfigJson() { @Test public void shouldParseKafkaProperties() { when(configuration.getString(INPUT_STREAMS, "")).thenReturn("[ { \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_SCHEMA_TABLE\": \"data_stream\", \"INPUT_SCHEMA_PROTO_CLASS\": \"com.tests.TestMessage\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\" } ]"); - when(configuration.getBoolean(SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_KEY, SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_DEFAULT)).thenReturn(false); + when(configuration.getBoolean(Constants.SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_KEY, Constants.SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_DEFAULT)).thenReturn(false); StreamConfig[] streamConfigs = StreamConfig.parse(configuration); HashMap kafkaPropMap = new HashMap<>(); @@ -114,10 +110,77 @@ public void shouldParseKafkaProperties() { assertEquals(properties, streamConfigs[0].getKafkaProps(configuration)); } + @Test + public void shouldParseKafkaPropertiesWithSSLConfigurations() { + when(configuration.getString(INPUT_STREAMS, "")).thenReturn("[ { \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_SCHEMA_TABLE\": \"data_stream\", \"INPUT_SCHEMA_PROTO_CLASS\": \"com.tests.TestMessage\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL\": \"SSL\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEY_PASSWORD\": \"test-key-pass\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL\": \"SSL\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION\": \"test-keystore-location\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_PASSWORD\": \"test-keystore-pass\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE\": \"JKS\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_LOCATION\": \"test-truststore-location\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_PASSWORD\": \"test-truststore-pass\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE\": \"JKS\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\" } ]"); + when(configuration.getBoolean(Constants.SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_KEY, Constants.SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_DEFAULT)).thenReturn(false); + StreamConfig[] streamConfigs = StreamConfig.parse(configuration); + + HashMap kafkaPropMap = new HashMap<>(); + kafkaPropMap.put("group.id", "dummy-consumer-group"); + kafkaPropMap.put("bootstrap.servers", "localhost:9092"); + kafkaPropMap.put("auto.offset.reset", "latest"); + kafkaPropMap.put("auto.commit.enable", ""); + kafkaPropMap.put("ssl.keystore.password", "test-keystore-pass"); + kafkaPropMap.put("ssl.keystore.type", "JKS"); + kafkaPropMap.put("ssl.keystore.location", "test-keystore-location"); + kafkaPropMap.put("ssl.protocol", "SSL"); + kafkaPropMap.put("ssl.key.password", "test-key-pass"); + kafkaPropMap.put("ssl.truststore.type", "JKS"); + kafkaPropMap.put("ssl.truststore.location", "test-truststore-location"); + kafkaPropMap.put("ssl.truststore.password", "test-truststore-pass"); + kafkaPropMap.put("security.protocol", "SSL"); + + + + Properties properties = new Properties(); + properties.putAll(kafkaPropMap); + + assertEquals(properties, streamConfigs[0].getKafkaProps(configuration)); + } + + @Test + public void shouldParseMultipleStreamsFromStreamConfigWithSSLConfigurations() { + when(configuration.getString(INPUT_STREAMS, "")).thenReturn("[ { \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_SCHEMA_TABLE\": \"data_stream\", \"INPUT_SCHEMA_PROTO_CLASS\": \"com.tests.TestMessage\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"false\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL\": \"SSL\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEY_PASSWORD\": \"test-key-pass\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL\": \"SSL\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION\": \"test-keystore-location\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_PASSWORD\": \"test-keystore-pass\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE\": \"JKS\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_LOCATION\": \"test-truststore-location\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_PASSWORD\": \"test-truststore-pass\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE\": \"JKS\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\" }, {\"INPUT_SCHEMA_TABLE\": \"data_stream_1\", \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_DATATYPE\": \"JSON\", \"INPUT_SCHEMA_JSON_SCHEMA\": \"{ \\\"$schema\\\": \\\"https://json-schema.org/draft/2020-12/schema\\\", \\\"$id\\\": \\\"https://example.com/product.schema.json\\\", \\\"title\\\": \\\"Product\\\", \\\"description\\\": \\\"A product from Acme's catalog\\\", \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"id\\\": { \\\"description\\\": \\\"The unique identifier for a product\\\", \\\"type\\\": \\\"string\\\" }, \\\"time\\\": { \\\"description\\\": \\\"event timestamp of the event\\\", \\\"type\\\": \\\"string\\\", \\\"format\\\" : \\\"date-time\\\" } }, \\\"required\\\": [ \\\"id\\\", \\\"time\\\" ] }\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"true\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\" } ]"); + StreamConfig[] streamConfigs = StreamConfig.parse(configuration); + + assertEquals(2, streamConfigs.length); + + StreamConfig currConfig = streamConfigs[0]; + assertEquals("false", currConfig.getAutoCommitEnable()); + assertEquals("latest", currConfig.getAutoOffsetReset()); + assertEquals("PROTO", currConfig.getDataType()); + assertEquals("dummy-consumer-group", currConfig.getConsumerGroupId()); + assertEquals("41", currConfig.getEventTimestampFieldIndex()); + assertEquals("test-topic", currConfig.getKafkaTopicNames()); + assertEquals("data_stream", currConfig.getSchemaTable()); + assertEquals("local-kafka-stream", currConfig.getKafkaName()); + assertEquals("SSL", currConfig.getSecurityProtocol()); + assertEquals("localhost:9092", currConfig.getBootstrapServers()); + assertEquals("SSL", currConfig.getSslProtocol()); + assertEquals("test-key-pass", currConfig.getSslKeyPassword()); + assertEquals("test-keystore-location", currConfig.getSslKeystoreLocation()); + assertEquals("test-keystore-pass", currConfig.getSslKeystorePassword()); + assertEquals("JKS", currConfig.getSslKeystoreType()); + assertEquals("test-truststore-location", currConfig.getSslTruststoreLocation()); + assertEquals("test-truststore-pass", currConfig.getSslTruststorePassword()); + assertEquals("JKS", currConfig.getSslTruststoreType()); + + StreamConfig currConfigNext = streamConfigs[1]; + assertEquals("true", currConfigNext.getAutoCommitEnable()); + assertEquals("latest", currConfigNext.getAutoOffsetReset()); + assertEquals("JSON", currConfigNext.getDataType()); + assertEquals("dummy-consumer-group", currConfigNext.getConsumerGroupId()); + assertEquals("41", currConfigNext.getEventTimestampFieldIndex()); + assertEquals("test-topic", currConfigNext.getKafkaTopicNames()); + assertEquals("data_stream_1", currConfigNext.getSchemaTable()); + assertEquals("local-kafka-stream", currConfigNext.getKafkaName()); + } + @Test public void shouldParseKafkaPropertiesWithSASLConfig() { when(configuration.getString(INPUT_STREAMS, "")).thenReturn("[ { \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_SCHEMA_TABLE\": \"data_stream\", \"INPUT_SCHEMA_PROTO_CLASS\": \"com.tests.TestMessage\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL\": \"SASL_PLAINTEXT\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SASL_MECHANISM\":\"SCRAM-SHA-512\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SASL_JAAS_CONFIG\":\"org.apache.kafka.common.security.scram.ScramLoginModule required username=\\\"username\\\" password=\\\"password\\\";\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\" } ]"); - when(configuration.getBoolean(SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_KEY, SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_DEFAULT)).thenReturn(false); + when(configuration.getBoolean(Constants.SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_KEY, Constants.SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_DEFAULT)).thenReturn(false); StreamConfig[] streamConfigs = StreamConfig.parse(configuration); HashMap kafkaPropMap = new HashMap<>(); @@ -166,10 +229,46 @@ public void shouldParseMultipleStreamsFromStreamConfigWithSASLConfig() { assertEquals("local-kafka-stream", currConfigNext.getKafkaName()); } + @Test + public void shouldParseMultipleAdditionalConsumerConfigs() { + when(configuration.getString(INPUT_STREAMS, "")) + .thenReturn("[ { \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_SCHEMA_TABLE\": \"data_stream\", \"INPUT_SCHEMA_PROTO_CLASS\": \"com.tests.TestMessage\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL\": \"SASL_PLAINTEXT\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SASL_MECHANISM\":\"SCRAM-SHA-512\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SASL_JAAS_CONFIG\":\"org.apache.kafka.common.security.scram.ScramLoginModule required username=\\\"username\\\" password=\\\"password\\\";\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"false\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\", \"SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS\": {\"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_KEY\": \"ssl_keystore_key\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION\": \"ssl_keystore_location\"} }, {\"INPUT_SCHEMA_TABLE\": \"data_stream_1\", \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_DATATYPE\": \"JSON\", \"INPUT_SCHEMA_JSON_SCHEMA\": \"{ \\\"$schema\\\": \\\"https://json-schema.org/draft/2020-12/schema\\\", \\\"$id\\\": \\\"https://example.com/product.schema.json\\\", \\\"title\\\": \\\"Product\\\", \\\"description\\\": \\\"A product from Acme's catalog\\\", \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"id\\\": { \\\"description\\\": \\\"The unique identifier for a product\\\", \\\"type\\\": \\\"string\\\" }, \\\"time\\\": { \\\"description\\\": \\\"event timestamp of the event\\\", \\\"type\\\": \\\"string\\\", \\\"format\\\" : \\\"date-time\\\" } }, \\\"required\\\": [ \\\"id\\\", \\\"time\\\" ] }\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"true\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\", \"SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS\": {\"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_KEY\": \"ssl_keystore_key_2\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION\": \"ssl_keystore_location_2\", \"SOURCE_KAFKA_CONSUMER_CONFIG_OFFSET_FLUSH_INTERVAL_MS\":\"1000\"} } ]"); + StreamConfig[] streamConfigs = StreamConfig.parse(configuration); + StreamConfig firstStreamConfig = streamConfigs[0]; + StreamConfig secondStreamConfig = streamConfigs[1]; + + Properties firstStreamProperties = firstStreamConfig.getKafkaProps(configuration); + Properties secondStreamProperties = secondStreamConfig.getKafkaProps(configuration); + + assertEquals("ssl_keystore_key", firstStreamProperties.getProperty("ssl.keystore.key")); + assertEquals("ssl_keystore_location", firstStreamProperties.getProperty("ssl.keystore.location")); + assertEquals("ssl_keystore_key_2", secondStreamProperties.getProperty("ssl.keystore.key")); + assertEquals("ssl_keystore_location_2", secondStreamProperties.getProperty("ssl.keystore.location")); + assertEquals("1000", secondStreamProperties.getProperty("offset.flush.interval.ms")); + } + + @Test(expected = IllegalArgumentException.class) + public void shouldThrowIllegalArgumentExceptionIfAdditionalKafkaPropsNotMatchingPrefix() { + String streamConfig = "[ { \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_SCHEMA_TABLE\": \"data_stream\", \"INPUT_SCHEMA_PROTO_CLASS\": \"com.tests.TestMessage\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL\": \"SASL_PLAINTEXT\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SASL_MECHANISM\":\"SCRAM-SHA-512\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SASL_JAAS_CONFIG\":\"org.apache.kafka.common.security.scram.ScramLoginModule required username=\\\"username\\\" password=\\\"password\\\";\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"false\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\", \"SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS\": {\"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_KEY\": \"ssl_keystore_key\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION\": \"ssl_keystore_location\"} }, {\"INPUT_SCHEMA_TABLE\": \"data_stream_1\", \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_DATATYPE\": \"JSON\", \"INPUT_SCHEMA_JSON_SCHEMA\": \"{ \\\"$schema\\\": \\\"https://json-schema.org/draft/2020-12/schema\\\", \\\"$id\\\": \\\"https://example.com/product.schema.json\\\", \\\"title\\\": \\\"Product\\\", \\\"description\\\": \\\"A product from Acme's catalog\\\", \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"id\\\": { \\\"description\\\": \\\"The unique identifier for a product\\\", \\\"type\\\": \\\"string\\\" }, \\\"time\\\": { \\\"description\\\": \\\"event timestamp of the event\\\", \\\"type\\\": \\\"string\\\", \\\"format\\\" : \\\"date-time\\\" } }, \\\"required\\\": [ \\\"id\\\", \\\"time\\\" ] }\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"true\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\", \"SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS\": {\"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_KEY\": \"ssl_keystore_key_2\", \"CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION\": \"ssl_keystore_location_2\", \"SOURCE_KAFKA_CONSUMER_CONFIG_OFFSET_FLUSH_INTERVAL_MS\":\"1000\"} } ]"; + when(configuration.getString(INPUT_STREAMS, "")) + .thenReturn(streamConfig); + + StreamConfig.parse(configuration); + } + + @Test(expected = IllegalArgumentException.class) + public void shouldThrowIllegalArgumentExceptionIfAdditionalKafkaPropsNotMatchingPrefixMissingUnderscore() { + String streamConfig = "[ { \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_SCHEMA_TABLE\": \"data_stream\", \"INPUT_SCHEMA_PROTO_CLASS\": \"com.tests.TestMessage\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL\": \"SASL_PLAINTEXT\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SASL_MECHANISM\":\"SCRAM-SHA-512\",\"SOURCE_KAFKA_CONSUMER_CONFIG_SASL_JAAS_CONFIG\":\"org.apache.kafka.common.security.scram.ScramLoginModule required username=\\\"username\\\" password=\\\"password\\\";\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"false\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\", \"SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS\": {\"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_KEY\": \"ssl_keystore_key\", \"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION\": \"ssl_keystore_location\"} }, {\"INPUT_SCHEMA_TABLE\": \"data_stream_1\", \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_DATATYPE\": \"JSON\", \"INPUT_SCHEMA_JSON_SCHEMA\": \"{ \\\"$schema\\\": \\\"https://json-schema.org/draft/2020-12/schema\\\", \\\"$id\\\": \\\"https://example.com/product.schema.json\\\", \\\"title\\\": \\\"Product\\\", \\\"description\\\": \\\"A product from Acme's catalog\\\", \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"id\\\": { \\\"description\\\": \\\"The unique identifier for a product\\\", \\\"type\\\": \\\"string\\\" }, \\\"time\\\": { \\\"description\\\": \\\"event timestamp of the event\\\", \\\"type\\\": \\\"string\\\", \\\"format\\\" : \\\"date-time\\\" } }, \\\"required\\\": [ \\\"id\\\", \\\"time\\\" ] }\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"true\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\", \"SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS\": {\"SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_KEY\": \"ssl_keystore_key_2\", \"SOURCE_KAFKACONSUMER_CONFIG_SSL_KEYSTORE_LOCATION\": \"ssl_keystore_location_2\", \"SOURCE_KAFKA_CONSUMER_CONFIG_OFFSET_FLUSH_INTERVAL_MS\":\"1000\"} } ]"; + when(configuration.getString(INPUT_STREAMS, "")) + .thenReturn(streamConfig); + + StreamConfig.parse(configuration); + } + @Test public void shouldAddAdditionalKafkaConfigToKafkaProperties() { when(configuration.getString(INPUT_STREAMS, "")).thenReturn("[ { \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-topic\", \"INPUT_SCHEMA_TABLE\": \"data_stream\", \"INPUT_SCHEMA_PROTO_CLASS\": \"com.tests.TestMessage\", \"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\": \"41\", \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\": \"\", \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\", \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"dummy-consumer-group\", \"SOURCE_KAFKA_NAME\": \"local-kafka-stream\" } ]"); - when(configuration.getBoolean(SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_KEY, SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_DEFAULT)).thenReturn(true); + when(configuration.getBoolean(Constants.SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_KEY, Constants.SOURCE_KAFKA_CONSUME_LARGE_MESSAGE_ENABLE_DEFAULT)).thenReturn(true); StreamConfig[] streamConfigs = StreamConfig.parse(configuration); HashMap kafkaPropMap = new HashMap<>(); @@ -237,8 +336,8 @@ public void shouldGetSourceDetails() { StreamConfig[] streamConfigs = StreamConfig.parse(configuration); SourceDetails[] sourceDetails = streamConfigs[0].getSourceDetails(); - assertEquals(SourceType.valueOf("BOUNDED"), sourceDetails[0].getSourceType()); - assertEquals(SourceName.valueOf("PARQUET_SOURCE"), sourceDetails[0].getSourceName()); + Assert.assertEquals(SourceType.valueOf("BOUNDED"), sourceDetails[0].getSourceType()); + Assert.assertEquals(SourceName.valueOf("PARQUET_SOURCE"), sourceDetails[0].getSourceName()); assertEquals(SourceType.valueOf("UNBOUNDED"), sourceDetails[1].getSourceType()); assertEquals(SourceName.valueOf("KAFKA_SOURCE"), sourceDetails[1].getSourceName()); } @@ -260,8 +359,8 @@ public void shouldGetUnboundedKafkaConsumerAsSourceDetailsWhenNotGiven() { SourceDetails[] sourceDetails = streamConfigs[0].getSourceDetails(); assertEquals(1, sourceDetails.length); - assertEquals(UNBOUNDED, sourceDetails[0].getSourceType()); - assertEquals(KAFKA_CONSUMER, sourceDetails[0].getSourceName()); + Assert.assertEquals(SourceType.UNBOUNDED, sourceDetails[0].getSourceType()); + Assert.assertEquals(SourceName.KAFKA_CONSUMER, sourceDetails[0].getSourceName()); } @Test @@ -280,7 +379,7 @@ public void shouldGetEarliestTimeUrlStrategyAsParquetReadOrderStrategyWhenNotGiv StreamConfig[] streamConfigs = StreamConfig.parse(configuration); SourceParquetReadOrderStrategy actualReadOrderStrategy = streamConfigs[0].getParquetFilesReadOrderStrategy(); - assertEquals(EARLIEST_TIME_URL_FIRST, actualReadOrderStrategy); + Assert.assertEquals(SourceParquetReadOrderStrategy.EARLIEST_TIME_URL_FIRST, actualReadOrderStrategy); } @Test @@ -294,7 +393,7 @@ public void shouldGetParquetSourceProperties() { Assert.assertArrayEquals(new String[]{"gs://some-parquet-path", "gs://another-parquet-path"}, streamConfigs[0].getParquetFilePaths()); assertEquals(SourceParquetReadOrderStrategy.valueOf("EARLIEST_TIME_URL_FIRST"), streamConfigs[0].getParquetFilesReadOrderStrategy()); - assertEquals(SourceParquetSchemaMatchStrategy.valueOf("BACKWARD_COMPATIBLE_SCHEMA_WITH_FAIL_ON_TYPE_MISMATCH"), streamConfigs[0].getParquetSchemaMatchStrategy()); + Assert.assertEquals(SourceParquetSchemaMatchStrategy.valueOf("BACKWARD_COMPATIBLE_SCHEMA_WITH_FAIL_ON_TYPE_MISMATCH"), streamConfigs[0].getParquetSchemaMatchStrategy()); } @Test diff --git a/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerKafkaConsumerAdditionalConfigurationsAdaptorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerKafkaConsumerAdditionalConfigurationsAdaptorTest.java new file mode 100644 index 000000000..3b5fd859a --- /dev/null +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerKafkaConsumerAdditionalConfigurationsAdaptorTest.java @@ -0,0 +1,132 @@ +package com.gotocompany.dagger.core.source.config.adapter; + +import com.google.gson.JsonSyntaxException; +import org.junit.Rule; +import org.junit.Test; +import com.google.gson.stream.JsonReader; +import org.junit.rules.ExpectedException; + +import java.io.IOException; +import java.io.StringReader; +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +public class DaggerKafkaConsumerAdditionalConfigurationsAdaptorTest { + + private final DaggerKafkaConsumerAdditionalConfigurationsAdaptor daggerKafkaConsumerAdditionalConfigurationsAdaptor = new DaggerKafkaConsumerAdditionalConfigurationsAdaptor(); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Test + public void shouldParseEmptyProperty() throws IOException { + String input = "{}"; + JsonReader jsonReader = new JsonReader(new StringReader(input)); + + Map result = daggerKafkaConsumerAdditionalConfigurationsAdaptor.read(jsonReader); + + assertEquals(0, result.size()); + } + + @Test + public void shouldParseJsonStringToMap() throws IOException { + String input = "{\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1\":\"value1\",\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2\":\"value2\"}"; + JsonReader jsonReader = new JsonReader(new StringReader(input)); + Map expectedResult = new HashMap<>(); + expectedResult.put("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1", "value1"); + expectedResult.put("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2", "value2"); + + Map result = daggerKafkaConsumerAdditionalConfigurationsAdaptor.read(jsonReader); + + assertEquals(expectedResult, result); + } + + @Test + public void shouldParseJsonStringWithCaseInsensitiveKeyToMap() throws IOException { + String input = "{\"sOurCe_KAFKA_CONSUMER_CONFIG_key_1\":\"value1\"}"; + JsonReader jsonReader = new JsonReader(new StringReader(input)); + Map expectedResult = new HashMap<>(); + expectedResult.put("sOurCe_KAFKA_CONSUMER_CONFIG_key_1", "value1"); + + Map result = daggerKafkaConsumerAdditionalConfigurationsAdaptor.read(jsonReader); + + assertEquals(expectedResult, result); + } + + @Test + public void shouldIgnoreNullValues() throws IOException { + String input = "{\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1\":null,\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2\":\"value2\"}"; + JsonReader jsonReader = new JsonReader(new StringReader(input)); + + Map result = daggerKafkaConsumerAdditionalConfigurationsAdaptor.read(jsonReader); + + assertFalse(result.containsKey("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1")); + assertEquals("value2", result.get("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2")); + } + + @Test + public void shouldHandleSpecialCharactersInValues() throws IOException { + String input = "{\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY\":\"value with spaces and $pecial ch@racters\"}"; + JsonReader jsonReader = new JsonReader(new StringReader(input)); + + Map result = daggerKafkaConsumerAdditionalConfigurationsAdaptor.read(jsonReader); + + assertEquals("value with spaces and $pecial ch@racters", result.get("SOURCE_KAFKA_CONSUMER_CONFIG_KEY")); + } + + @Test + public void shouldHandleNumericalValue() throws IOException { + String input = "{\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1\": \"120\", \"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2\": \"120.5\"}"; + JsonReader jsonReader = new JsonReader(new StringReader(input)); + + Map result = daggerKafkaConsumerAdditionalConfigurationsAdaptor.read(jsonReader); + + assertEquals("120", result.get("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1")); + assertEquals("120.5", result.get("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2")); + } + + @Test + public void shouldHandleBooleanValue() throws IOException { + String input = "{\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1\": \"true\", \"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2\": \"false\"}"; + JsonReader jsonReader = new JsonReader(new StringReader(input)); + + Map result = daggerKafkaConsumerAdditionalConfigurationsAdaptor.read(jsonReader); + + assertEquals("true", result.get("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1")); + assertEquals("false", result.get("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2")); + } + + @Test + public void shouldWriteMapToStringJson() { + Map map = new HashMap<>(); + map.put("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1", "value1"); + map.put("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2", "120"); + map.put("source_kafka_consumer_config_key_3", "120.5"); + + String result = daggerKafkaConsumerAdditionalConfigurationsAdaptor.toJson(map); + + assertEquals("{\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1\":\"value1\",\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2\":\"120\",\"source_kafka_consumer_config_key_3\":\"120.5\"}", result); + } + + @Test + public void shouldThrowExceptionForInvalidProperties() throws IOException { + String input = "{\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1\":\"value1\",\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2\":\"value2\",\"INVALID_KEY\":\"value3\"}"; + JsonReader jsonReader = new JsonReader(new StringReader(input)); + thrown.expect(IllegalArgumentException.class); + thrown.expectMessage("Invalid additional kafka consumer configuration properties found: [INVALID_KEY]"); + + daggerKafkaConsumerAdditionalConfigurationsAdaptor.read(jsonReader); + } + + @Test(expected = JsonSyntaxException.class) + public void shouldThrowExceptionForMalformedJson() throws IOException { + String input = "\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1\":\"value1\",\"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2\":\"value2\""; + JsonReader jsonReader = new JsonReader(new StringReader(input)); + + daggerKafkaConsumerAdditionalConfigurationsAdaptor.read(jsonReader); + } + +} diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptorTest.java similarity index 90% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptorTest.java index 9a150899a..37288849b 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSASLMechanismAdaptorTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.source.config.adapter; +package com.gotocompany.dagger.core.source.config.adapter; import com.google.gson.stream.JsonReader; -import io.odpf.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLKeyStoreFileTypeAdaptorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLKeyStoreFileTypeAdaptorTest.java new file mode 100644 index 000000000..e8f743678 --- /dev/null +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLKeyStoreFileTypeAdaptorTest.java @@ -0,0 +1,49 @@ +package com.gotocompany.dagger.core.source.config.adapter; + +import com.google.gson.stream.JsonReader; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.io.IOException; + + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +public class DaggerSSLKeyStoreFileTypeAdaptorTest { + + @Mock + private JsonReader jsonReader; + + @Before + public void setup() { + initMocks(this); + } + + @Test + public void shouldAcceptConfiguredValue() throws IOException { + String storeFileType = "JKS"; + when(jsonReader.nextString()).thenReturn(storeFileType); + DaggerSSLKeyStoreFileTypeAdaptor daggerSSLKeyStoreFileTypeAdaptor = new DaggerSSLKeyStoreFileTypeAdaptor(); + String keystoreFileType = daggerSSLKeyStoreFileTypeAdaptor.read(jsonReader); + assertEquals(storeFileType, keystoreFileType); + } + + @Test + public void shouldNotAcceptValuesNotConfigured1() throws IOException { + when(jsonReader.nextString()).thenReturn("JKS12"); + DaggerSSLKeyStoreFileTypeAdaptor daggerSSLKeyStoreFileTypeAdaptor = new DaggerSSLKeyStoreFileTypeAdaptor(); + assertThrows(InvalidConfigurationException.class, () -> daggerSSLKeyStoreFileTypeAdaptor.read(jsonReader)); + } + + @Test + public void shouldNotAcceptValuesNotConfigured2() throws IOException { + when(jsonReader.nextString()).thenReturn(""); + DaggerSSLKeyStoreFileTypeAdaptor daggerSSLKeyStoreFileTypeAdaptor = new DaggerSSLKeyStoreFileTypeAdaptor(); + assertThrows(InvalidConfigurationException.class, () -> daggerSSLKeyStoreFileTypeAdaptor.read(jsonReader)); + } +} diff --git a/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLProtocolAdaptorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLProtocolAdaptorTest.java new file mode 100644 index 000000000..868787efd --- /dev/null +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLProtocolAdaptorTest.java @@ -0,0 +1,41 @@ +package com.gotocompany.dagger.core.source.config.adapter; + +import com.google.gson.stream.JsonReader; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.io.IOException; + + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +public class DaggerSSLProtocolAdaptorTest { + + @Mock + private JsonReader jsonReader; + + @Before + public void setup() { + initMocks(this); + } + + @Test + public void shouldAcceptConfiguredValue() throws IOException { + when(jsonReader.nextString()).thenReturn("SSL"); + DaggerSSLProtocolAdaptor daggerSSLProtocolAdaptor = new DaggerSSLProtocolAdaptor(); + String sslProtocol = daggerSSLProtocolAdaptor.read(jsonReader); + assertEquals("SSL", sslProtocol); + } + + @Test + public void shouldNotAcceptValuesNotConfigured() throws IOException { + when(jsonReader.nextString()).thenReturn("SSL1"); + DaggerSSLProtocolAdaptor daggerSSLProtocolAdaptor = new DaggerSSLProtocolAdaptor(); + assertThrows(InvalidConfigurationException.class, () -> daggerSSLProtocolAdaptor.read(jsonReader)); + } +} diff --git a/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLTrustStoreFileTypeAdaptorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLTrustStoreFileTypeAdaptorTest.java new file mode 100644 index 000000000..e4d2923b3 --- /dev/null +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSSLTrustStoreFileTypeAdaptorTest.java @@ -0,0 +1,42 @@ +package com.gotocompany.dagger.core.source.config.adapter; + +import com.google.gson.stream.JsonReader; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.io.IOException; + + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +public class DaggerSSLTrustStoreFileTypeAdaptorTest { + + @Mock + private JsonReader jsonReader; + + @Before + public void setup() { + initMocks(this); + } + + @Test + public void shouldAcceptConfiguredValue() throws IOException { + String storeFileType = "JKS"; + when(jsonReader.nextString()).thenReturn(storeFileType); + DaggerSSLTrustStoreFileTypeAdaptor daggerSSLTrustStoreFileTypeAdaptor = new DaggerSSLTrustStoreFileTypeAdaptor(); + String truststoreFileType = daggerSSLTrustStoreFileTypeAdaptor.read(jsonReader); + assertEquals(storeFileType, truststoreFileType); + } + + @Test + public void shouldNotAcceptValuesNotConfigured() throws IOException { + when(jsonReader.nextString()).thenReturn("JKS12"); + DaggerSSLTrustStoreFileTypeAdaptor daggerSSLTrustStoreFileTypeAdaptor = new DaggerSSLTrustStoreFileTypeAdaptor(); + assertThrows(InvalidConfigurationException.class, () -> daggerSSLTrustStoreFileTypeAdaptor.read(jsonReader)); + } +} diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptorTest.java similarity index 90% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptorTest.java index 8b4025e68..ea942197e 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/DaggerSecurityProtocolAdaptorTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.core.source.config.adapter; +package com.gotocompany.dagger.core.source.config.adapter; import com.google.gson.stream.JsonReader; -import io.odpf.dagger.core.exception.InvalidConfigurationException; +import com.gotocompany.dagger.core.exception.InvalidConfigurationException; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/FileDateRangeAdaptorTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/FileDateRangeAdaptorTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/FileDateRangeAdaptorTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/FileDateRangeAdaptorTest.java index a81f53d8f..d542c0f21 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/FileDateRangeAdaptorTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/FileDateRangeAdaptorTest.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.source.config.adapter; +package com.gotocompany.dagger.core.source.config.adapter; import com.google.gson.stream.JsonReader; -import io.odpf.dagger.core.exception.InvalidTimeRangeException; -import io.odpf.dagger.core.source.config.models.TimeRange; -import io.odpf.dagger.core.source.config.models.TimeRangePool; +import com.gotocompany.dagger.core.exception.InvalidTimeRangeException; +import com.gotocompany.dagger.core.source.config.models.TimeRange; +import com.gotocompany.dagger.core.source.config.models.TimeRangePool; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/SourceParquetFilePathsAdapterTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/SourceParquetFilePathsAdapterTest.java similarity index 97% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/SourceParquetFilePathsAdapterTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/SourceParquetFilePathsAdapterTest.java index 4212ebca2..8ccc7fd79 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/adapter/SourceParquetFilePathsAdapterTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/adapter/SourceParquetFilePathsAdapterTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.config.adapter; +package com.gotocompany.dagger.core.source.config.adapter; import com.google.gson.stream.JsonReader; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/models/TimeRangePoolTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/models/TimeRangePoolTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/config/models/TimeRangePoolTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/models/TimeRangePoolTest.java index 8f61bedf9..25b133b72 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/models/TimeRangePoolTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/models/TimeRangePoolTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.config.models; +package com.gotocompany.dagger.core.source.config.models; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/models/TimeRangeTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/models/TimeRangeTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/config/models/TimeRangeTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/models/TimeRangeTest.java index 07d94fae2..e0aada118 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/config/models/TimeRangeTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/config/models/TimeRangeTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.config.models; +package com.gotocompany.dagger.core.source.config.models; import org.junit.Test; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustomTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustomTest.java similarity index 75% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustomTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustomTest.java index c95218380..bda5d3b54 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustomTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerCustomTest.java @@ -1,9 +1,10 @@ -package io.odpf.dagger.core.source.flinkkafkaconsumer; +package com.gotocompany.dagger.core.source.flinkkafkaconsumer; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; -import io.odpf.dagger.core.metrics.reporters.NoOpErrorReporter; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporter; +import com.gotocompany.dagger.core.metrics.reporters.ErrorReporterFactory; +import com.gotocompany.dagger.core.metrics.reporters.NoOpErrorReporter; +import com.gotocompany.dagger.core.utils.Constants; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema; @@ -16,7 +17,6 @@ import java.util.Properties; import java.util.regex.Pattern; -import static io.odpf.dagger.core.utils.Constants.*; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; @@ -55,8 +55,8 @@ public void setup() { @Test public void shouldReportIfTelemetryEnabled() { - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); - when(configuration.getLong(METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT)).thenReturn(0L); + when(configuration.getBoolean(Constants.METRIC_TELEMETRY_ENABLE_KEY, Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); + when(configuration.getLong(Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT)).thenReturn(0L); Exception exception = Assert.assertThrows(Exception.class, () -> flinkKafkaConsumer011Custom.run(defaultSourceContext)); @@ -66,7 +66,7 @@ public void shouldReportIfTelemetryEnabled() { @Test public void shouldNotReportIfChainedOperatorException() { - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); + when(configuration.getBoolean(Constants.METRIC_TELEMETRY_ENABLE_KEY, Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); Throwable throwable = new Throwable(); flinkKafkaConsumer011Custom = new FlinkKafkaConsumerCustomStub(Pattern.compile("test_topics"), kafkaDeserializationSchema, properties, configuration, new ExceptionInChainedOperatorException("chaining exception", throwable)); Exception exception = Assert.assertThrows(Exception.class, @@ -77,7 +77,7 @@ public void shouldNotReportIfChainedOperatorException() { @Test public void shouldNotReportIfTelemetryDisabled() { - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(false); + when(configuration.getBoolean(Constants.METRIC_TELEMETRY_ENABLE_KEY, Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(false); Exception exception = Assert.assertThrows(Exception.class, () -> flinkKafkaConsumer011Custom.run(defaultSourceContext)); assertEquals("test exception", exception.getMessage()); @@ -86,8 +86,8 @@ public void shouldNotReportIfTelemetryDisabled() { @Test public void shouldReturnErrorStatsReporter() { - when(configuration.getLong(METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT)).thenReturn(0L); - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); + when(configuration.getLong(Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT)).thenReturn(0L); + when(configuration.getBoolean(Constants.METRIC_TELEMETRY_ENABLE_KEY, Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); ErrorReporter expectedErrorStatsReporter = ErrorReporterFactory.getErrorReporter(defaultRuntimeContext.getMetricGroup(), configuration); FlinkKafkaConsumerCustom flinkKafkaConsumerCustom = new FlinkKafkaConsumerCustom(Pattern.compile("test_topics"), kafkaDeserializationSchema, properties, configuration); assertEquals(expectedErrorStatsReporter.getClass(), flinkKafkaConsumerCustom.getErrorReporter(defaultRuntimeContext).getClass()); @@ -112,7 +112,7 @@ protected void runBaseConsumer(SourceContext sourceContext) throws Exception { } protected ErrorReporter getErrorReporter(RuntimeContext runtimeContext) { - if (configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)) { + if (configuration.getBoolean(Constants.METRIC_TELEMETRY_ENABLE_KEY, Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)) { return errorReporter; } else { return noOpErrorReporter; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSourceTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSourceTest.java similarity index 87% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSourceTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSourceTest.java index 2c6a8492d..fa122ea42 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSourceTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/flinkkafkaconsumer/FlinkKafkaConsumerDaggerSourceTest.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.source.flinkkafkaconsumer; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.json.deserialization.JsonDeserializer; -import io.odpf.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.config.StreamConfig; +package com.gotocompany.dagger.core.source.flinkkafkaconsumer; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.json.deserialization.JsonDeserializer; +import com.gotocompany.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.types.Row; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/kafka/KafkaDaggerSourceTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/kafka/KafkaDaggerSourceTest.java similarity index 86% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/kafka/KafkaDaggerSourceTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/kafka/KafkaDaggerSourceTest.java index caaf38a25..5a96ce739 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/kafka/KafkaDaggerSourceTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/kafka/KafkaDaggerSourceTest.java @@ -1,13 +1,13 @@ -package io.odpf.dagger.core.source.kafka; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; -import io.odpf.dagger.common.serde.proto.deserialization.ProtoDeserializer; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.config.StreamConfig; +package com.gotocompany.dagger.core.source.kafka; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; +import com.gotocompany.dagger.common.serde.proto.deserialization.ProtoDeserializer; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.connector.kafka.source.KafkaSource; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/ParquetDaggerSourceTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/ParquetDaggerSourceTest.java similarity index 85% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/ParquetDaggerSourceTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/ParquetDaggerSourceTest.java index 22a0f5f33..c5613afb4 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/ParquetDaggerSourceTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/ParquetDaggerSourceTest.java @@ -1,16 +1,16 @@ -package io.odpf.dagger.core.source.parquet; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.common.serde.DaggerDeserializer; -import io.odpf.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; -import io.odpf.dagger.common.serde.proto.deserialization.ProtoDeserializer; -import io.odpf.dagger.core.exception.DaggerConfigurationException; -import io.odpf.dagger.core.source.config.StreamConfig; -import io.odpf.dagger.core.source.config.models.SourceDetails; -import io.odpf.dagger.core.source.config.models.SourceName; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.depot.metrics.StatsDReporter; +package com.gotocompany.dagger.core.source.parquet; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.exception.DaggerConfigurationException; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.source.config.StreamConfig; +import com.gotocompany.dagger.core.source.config.models.SourceDetails; +import com.gotocompany.dagger.core.source.config.models.SourceName; +import com.gotocompany.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.common.serde.DaggerDeserializer; +import com.gotocompany.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; +import com.gotocompany.dagger.common.serde.proto.deserialization.ProtoDeserializer; +import com.gotocompany.depot.metrics.StatsDReporter; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.connector.file.src.FileSource; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; @@ -24,8 +24,6 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; -import static io.odpf.dagger.core.source.parquet.SourceParquetReadOrderStrategy.EARLIEST_INDEX_FIRST; -import static io.odpf.dagger.core.source.parquet.SourceParquetReadOrderStrategy.EARLIEST_TIME_URL_FIRST; import static org.junit.Assert.*; import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; @@ -121,7 +119,7 @@ public void shouldBeAbleToRegisterSourceWithExecutionEnvironmentForCorrectConfig public void shouldUseStreamConfigurationToBuildTheFileSource() { /* the below call mocks ensure that the function calls are indeed made to build the source and the code compiles */ when(streamConfig.getSchemaTable()).thenReturn("data_stream_0"); - when(streamConfig.getParquetFilesReadOrderStrategy()).thenReturn(EARLIEST_TIME_URL_FIRST); + when(streamConfig.getParquetFilesReadOrderStrategy()).thenReturn(SourceParquetReadOrderStrategy.EARLIEST_TIME_URL_FIRST); when(streamConfig.getParquetFilePaths()).thenReturn(new String[]{"gs://sshsh", "gs://shadd"}); ParquetDaggerSource daggerSource = new ParquetDaggerSource(streamConfig, configuration, daggerDeserializer, statsDReporterSupplierMock); @@ -131,7 +129,7 @@ public void shouldUseStreamConfigurationToBuildTheFileSource() { @Test public void shouldThrowRuntimeExceptionAndReportErrorIfReadOrderStrategyIsNotSupported() { - when(streamConfig.getParquetFilesReadOrderStrategy()).thenReturn(EARLIEST_INDEX_FIRST); + when(streamConfig.getParquetFilesReadOrderStrategy()).thenReturn(SourceParquetReadOrderStrategy.EARLIEST_INDEX_FIRST); when(streamConfig.getParquetFilePaths()).thenReturn(new String[]{"gs://sshsh", "gs://shadd"}); ParquetDaggerSource daggerSource = new ParquetDaggerSource(streamConfig, configuration, daggerDeserializer, statsDReporterSupplierMock); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/ParquetFileRecordFormatTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/ParquetFileRecordFormatTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/ParquetFileRecordFormatTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/ParquetFileRecordFormatTest.java index 59a5ad7f1..0f491feb7 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/ParquetFileRecordFormatTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/ParquetFileRecordFormatTest.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.source.parquet; +package com.gotocompany.dagger.core.source.parquet; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.source.parquet.reader.ParquetReader; -import io.odpf.dagger.core.source.parquet.reader.ReaderProvider; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.source.parquet.reader.ParquetReader; +import com.gotocompany.dagger.core.source.parquet.reader.ReaderProvider; +import com.gotocompany.depot.metrics.StatsDReporter; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.configuration.Configuration; import org.apache.flink.connector.file.src.reader.FileRecordFormat; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/ParquetFileSourceTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/ParquetFileSourceTest.java similarity index 84% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/ParquetFileSourceTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/ParquetFileSourceTest.java index 2cda4ba4c..87d0e744f 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/ParquetFileSourceTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/ParquetFileSourceTest.java @@ -1,16 +1,16 @@ -package io.odpf.dagger.core.source.parquet; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.source.config.models.SourceType; -import io.odpf.dagger.core.source.parquet.ParquetFileSource.Builder; -import io.odpf.dagger.core.source.parquet.path.HourDatePathParser; -import io.odpf.dagger.core.source.parquet.splitassigner.ChronologyOrderedSplitAssigner; -import io.odpf.depot.metrics.StatsDReporter; +package com.gotocompany.dagger.core.source.parquet; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.source.config.models.SourceType; +import com.gotocompany.dagger.core.source.parquet.path.HourDatePathParser; +import com.gotocompany.dagger.core.source.parquet.splitassigner.ChronologyOrderedSplitAssigner; +import com.gotocompany.depot.metrics.StatsDReporter; import org.apache.flink.connector.file.src.assigners.LocalityAwareSplitAssigner; import org.apache.flink.connector.file.src.reader.FileRecordFormat; import org.apache.flink.core.fs.Path; import org.apache.flink.types.Row; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -40,7 +40,7 @@ public void setup() { @Test public void shouldBuildParquetFileSourceAsPerArguments() { - Builder builder = Builder.getInstance(); + ParquetFileSource.Builder builder = ParquetFileSource.Builder.getInstance(); Path[] filePaths = new Path[]{new Path("gs://aadadc"), new Path("gs://sjsjhd")}; ChronologyOrderedSplitAssigner.ChronologyOrderedSplitAssignerBuilder splitAssignerBuilder = new ChronologyOrderedSplitAssigner.ChronologyOrderedSplitAssignerBuilder(); splitAssignerBuilder @@ -58,12 +58,12 @@ public void shouldBuildParquetFileSourceAsPerArguments() { assertArrayEquals(filePaths, parquetFileSource.getFilePaths()); assertEquals(fileRecordFormat, parquetFileSource.getFileRecordFormat()); assertEquals(configuration, parquetFileSource.getConfiguration()); - assertEquals(SourceType.BOUNDED, parquetFileSource.getSourceType()); + Assert.assertEquals(SourceType.BOUNDED, parquetFileSource.getSourceType()); } @Test public void shouldThrowExceptionAndReportErrorIfSourceTypeConfiguredAsUnbounded() { - Builder builder = Builder.getInstance(); + ParquetFileSource.Builder builder = ParquetFileSource.Builder.getInstance(); Path[] filePaths = new Path[]{new Path("gs://aadadc"), new Path("gs://sjsjhd")}; IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, @@ -82,7 +82,7 @@ public void shouldThrowExceptionAndReportErrorIfSourceTypeConfiguredAsUnbounded( @Test public void shouldThrowExceptionAndReportErrorIfFileRecordFormatIsNotSet() { - Builder builder = Builder.getInstance(); + ParquetFileSource.Builder builder = ParquetFileSource.Builder.getInstance(); Path[] filePaths = new Path[]{new Path("gs://aadadc"), new Path("gs://sjsjhd")}; IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, @@ -100,7 +100,7 @@ public void shouldThrowExceptionAndReportErrorIfFileRecordFormatIsNotSet() { @Test public void shouldThrowExceptionAndReportErrorIfNoFilePathsSet() { - Builder builder = Builder.getInstance(); + ParquetFileSource.Builder builder = ParquetFileSource.Builder.getInstance(); IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, () -> builder.setConfiguration(configuration) @@ -117,7 +117,7 @@ public void shouldThrowExceptionAndReportErrorIfNoFilePathsSet() { @Test public void shouldThrowExceptionIfStatsDReporterSupplierIsNotSet() { - Builder builder = Builder.getInstance(); + ParquetFileSource.Builder builder = ParquetFileSource.Builder.getInstance(); Path[] filePaths = new Path[]{new Path("gs://aadadc"), new Path("gs://sjsjhd")}; IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, @@ -133,7 +133,7 @@ public void shouldThrowExceptionIfStatsDReporterSupplierIsNotSet() { @Test public void shouldUseDefaultValueForSomeFieldsWhichAreNotConfiguredExplicitly() { - Builder builder = Builder.getInstance(); + ParquetFileSource.Builder builder = ParquetFileSource.Builder.getInstance(); Path[] filePaths = new Path[]{new Path("gs://aadadc"), new Path("gs://sjsjhd")}; ParquetFileSource parquetFileSource = builder.setConfiguration(configuration) .setFileRecordFormat(fileRecordFormat) @@ -142,7 +142,7 @@ public void shouldUseDefaultValueForSomeFieldsWhichAreNotConfiguredExplicitly() .build(); assertTrue(parquetFileSource.getFileSplitAssigner().create(emptyList()) instanceof LocalityAwareSplitAssigner); - assertEquals(SourceType.BOUNDED, parquetFileSource.getSourceType()); + Assert.assertEquals(SourceType.BOUNDED, parquetFileSource.getSourceType()); } /* this test just verifies that the code for generating the FileSource compiles successfully and runs. */ @@ -150,7 +150,7 @@ public void shouldUseDefaultValueForSomeFieldsWhichAreNotConfiguredExplicitly() /* returned blackbox object */ @Test public void shouldReturnAFileSourceMadeFromParquetFileSource() { - Builder builder = Builder.getInstance(); + ParquetFileSource.Builder builder = ParquetFileSource.Builder.getInstance(); Path[] filePaths = new Path[]{new Path("gs://aadadc"), new Path("gs://sjsjhd")}; ParquetFileSource parquetFileSource = builder.setConfiguration(configuration) .setFileRecordFormat(fileRecordFormat) diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/path/HourDatePathParserTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/path/HourDatePathParserTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/path/HourDatePathParserTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/path/HourDatePathParserTest.java index 1b7d1f021..fd692d805 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/path/HourDatePathParserTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/path/HourDatePathParserTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.source.parquet.path; +package com.gotocompany.dagger.core.source.parquet.path; import org.apache.flink.core.fs.Path; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/reader/ParquetReaderTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/reader/ParquetReaderTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/reader/ParquetReaderTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/reader/ParquetReaderTest.java index 66413cab2..71d051eb8 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/reader/ParquetReaderTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/reader/ParquetReaderTest.java @@ -1,17 +1,19 @@ -package io.odpf.dagger.core.source.parquet.reader; +package com.gotocompany.dagger.core.source.parquet.reader; -import io.odpf.dagger.common.exceptions.serde.DaggerDeserializationException; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; -import io.odpf.dagger.core.exception.ParquetFileSourceReaderInitializationException; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.common.exceptions.serde.DaggerDeserializationException; +import com.gotocompany.dagger.core.exception.ParquetFileSourceReaderInitializationException; +import com.gotocompany.dagger.core.metrics.aspects.ParquetReaderAspects; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.common.serde.parquet.deserialization.SimpleGroupDeserializer; +import com.gotocompany.depot.metrics.StatsDReporter; import org.apache.commons.lang3.ArrayUtils; import org.apache.flink.connector.file.src.util.CheckpointedPosition; import org.apache.flink.types.Row; import org.apache.parquet.example.data.simple.SimpleGroup; import org.apache.parquet.schema.GroupType; import org.apache.parquet.schema.LogicalTypeAnnotation; +import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -24,11 +26,6 @@ import java.io.IOException; import java.util.List; -import static io.odpf.dagger.core.metrics.aspects.ParquetReaderAspects.READER_CLOSED; -import static io.odpf.dagger.core.metrics.aspects.ParquetReaderAspects.READER_CREATED; -import static io.odpf.dagger.core.metrics.aspects.ParquetReaderAspects.READER_ROWS_EMITTED; -import static io.odpf.dagger.core.metrics.aspects.ParquetReaderAspects.READER_ROW_DESERIALIZATION_TIME; -import static io.odpf.dagger.core.metrics.aspects.ParquetReaderAspects.READER_ROW_READ_TIME; import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY; import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64; import static org.apache.parquet.schema.Types.*; @@ -64,7 +61,7 @@ public void shouldRaiseMetricsWhenInitialized() { provider.getReader(filePath); - verify(statsDReporter, Mockito.times(1)).captureCount(READER_CREATED.getValue(), 1L, "component=parquet_reader"); + verify(statsDReporter, Mockito.times(1)).captureCount(ParquetReaderAspects.READER_CREATED.getValue(), 1L, "component=parquet_reader"); } @Test @@ -76,7 +73,7 @@ public void shouldRaiseMetricsWhenReadingFileAndDeserializingToRows() throws IOE ParquetReader reader = provider.getReader(filePath); reader.read(); - verify(statsDReporter, Mockito.times(1)).captureCount(READER_ROWS_EMITTED.getValue(), 1L, "component=parquet_reader"); + verify(statsDReporter, Mockito.times(1)).captureCount(ParquetReaderAspects.READER_ROWS_EMITTED.getValue(), 1L, "component=parquet_reader"); ArgumentCaptor measurementNameCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor executionTimeCaptor = ArgumentCaptor.forClass(Long.class); @@ -84,8 +81,8 @@ public void shouldRaiseMetricsWhenReadingFileAndDeserializingToRows() throws IOE verify(statsDReporter, Mockito.times(2)).captureHistogram(measurementNameCaptor.capture(), executionTimeCaptor.capture(), tagCaptor.capture()); - assertEquals(READER_ROW_READ_TIME.getValue(), measurementNameCaptor.getAllValues().get(0)); - assertEquals(READER_ROW_DESERIALIZATION_TIME.getValue(), measurementNameCaptor.getAllValues().get(1)); + Assert.assertEquals(ParquetReaderAspects.READER_ROW_READ_TIME.getValue(), measurementNameCaptor.getAllValues().get(0)); + Assert.assertEquals(ParquetReaderAspects.READER_ROW_DESERIALIZATION_TIME.getValue(), measurementNameCaptor.getAllValues().get(1)); assertEquals("component=parquet_reader", tagCaptor.getAllValues().get(0)); assertEquals("component=parquet_reader", tagCaptor.getAllValues().get(1)); } @@ -98,7 +95,7 @@ public void shouldRaiseMetricsWhenClosingTheReader() throws IOException { provider.getReader(filePath).close(); - verify(statsDReporter, Mockito.times(1)).captureCount(READER_CLOSED.getValue(), 1L, "component=parquet_reader"); + verify(statsDReporter, Mockito.times(1)).captureCount(ParquetReaderAspects.READER_CLOSED.getValue(), 1L, "component=parquet_reader"); } @Test diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssignerTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssignerTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssignerTest.java rename to dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssignerTest.java index 34ab3bade..3b7cd6367 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssignerTest.java +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/source/parquet/splitassigner/ChronologyOrderedSplitAssignerTest.java @@ -1,12 +1,13 @@ -package io.odpf.dagger.core.source.parquet.splitassigner; +package com.gotocompany.dagger.core.source.parquet.splitassigner; -import io.odpf.dagger.core.exception.PathParserNotProvidedException; -import io.odpf.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; -import io.odpf.dagger.core.source.config.models.TimeRange; -import io.odpf.dagger.core.source.config.models.TimeRangePool; -import io.odpf.dagger.core.source.parquet.path.HourDatePathParser; -import io.odpf.depot.metrics.StatsDReporter; +import com.gotocompany.dagger.core.exception.PathParserNotProvidedException; +import com.gotocompany.dagger.core.metrics.aspects.ChronologyOrderedSplitAssignerAspects; +import com.gotocompany.dagger.core.metrics.reporters.statsd.SerializedStatsDReporterSupplier; +import com.gotocompany.dagger.core.source.config.models.TimeRange; +import com.gotocompany.dagger.core.source.config.models.TimeRangePool; +import com.gotocompany.dagger.core.source.parquet.path.HourDatePathParser; +import com.gotocompany.depot.metrics.StatsDReporter; import org.apache.flink.connector.file.src.FileSourceSplit; import org.apache.flink.core.fs.Path; import org.junit.Assert; @@ -22,9 +23,6 @@ import java.util.List; import java.util.Optional; -import static io.odpf.dagger.core.metrics.aspects.ChronologyOrderedSplitAssignerAspects.SPLITS_AWAITING_ASSIGNMENT; -import static io.odpf.dagger.core.metrics.aspects.ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_DISCOVERED; -import static io.odpf.dagger.core.metrics.aspects.ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_RECORDED; import static org.junit.Assert.*; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -293,8 +291,8 @@ public void shouldRaiseMetricsForDiscoveredSplitsAndRecordedSplitsWithTagsWhenIn .addStatsDReporterSupplier(statsDReporterSupplierMock) .build(inputSplits); - verify(statsDReporter, times(1)).gauge(TOTAL_SPLITS_DISCOVERED.getValue(), 4, "component=split_assigner"); - verify(statsDReporter, times(1)).gauge(TOTAL_SPLITS_RECORDED.getValue(), 3, "component=split_assigner"); + verify(statsDReporter, times(1)).gauge(ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_DISCOVERED.getValue(), 4, "component=split_assigner"); + verify(statsDReporter, times(1)).gauge(ChronologyOrderedSplitAssignerAspects.TOTAL_SPLITS_RECORDED.getValue(), 3, "component=split_assigner"); } @Test @@ -309,7 +307,7 @@ public void shouldRaiseMetricsWithTagsAfterAssigningSplits() { .build(inputSplits); splitAssigner.getNext(null); - verify(statsDReporter, times(1)).gauge(SPLITS_AWAITING_ASSIGNMENT.getValue(), 1, "component=split_assigner"); + verify(statsDReporter, times(1)).gauge(ChronologyOrderedSplitAssignerAspects.SPLITS_AWAITING_ASSIGNMENT.getValue(), 1, "component=split_assigner"); } @Test diff --git a/dagger-core/src/test/java/com/gotocompany/dagger/core/utils/DescriptorsUtilTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/utils/DescriptorsUtilTest.java new file mode 100644 index 000000000..03844f07f --- /dev/null +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/utils/DescriptorsUtilTest.java @@ -0,0 +1,103 @@ +package com.gotocompany.dagger.core.utils; + +import com.google.protobuf.Descriptors; +import com.gotocompany.dagger.consumer.TestCustomerLogMessage; +import com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +public class DescriptorsUtilTest { + + @Test + public void shouldGetFieldDescriptor() { + String fieldName = "customer_id"; + Descriptors.Descriptor descriptor = TestCustomerLogMessage.getDescriptor(); + Descriptors.FieldDescriptor fieldDescriptor = DescriptorsUtil.getFieldDescriptor(descriptor, fieldName); + assertNotNull(fieldDescriptor); + assertEquals("customer_id", fieldDescriptor.getName()); + } + + @Test + public void shouldRunGetNestedFieldDescriptor() { + String fieldName = "customer_profile.customer_id"; + Descriptors.Descriptor descriptor = TestEnrichedBookingLogMessage.getDescriptor(); + Descriptors.FieldDescriptor fieldDescriptor = DescriptorsUtil.getFieldDescriptor(descriptor, fieldName); + assertNotNull(fieldDescriptor); + assertEquals("customer_id", fieldDescriptor.getName()); + } + + @Test + public void shouldRunGetNestedFieldColumnsDescriptor() { + Descriptors.Descriptor parentDescriptor = TestEnrichedBookingLogMessage.getDescriptor(); + String[] nestedFieldNames = {"customer_profile", "customer_id"}; + Descriptors.FieldDescriptor fieldDescriptor = DescriptorsUtil.getNestedFieldDescriptor(parentDescriptor, nestedFieldNames); + assertNotNull(fieldDescriptor); + assertEquals("customer_id", fieldDescriptor.getName()); + } + + @Test + public void shouldGiveNullForEmptyFieldFieldDescriptor() { + String nonExistentField = "customer-id"; + Descriptors.FieldDescriptor nonExistentFieldDescriptor = DescriptorsUtil.getFieldDescriptor(null, nonExistentField); + assertNull(nonExistentFieldDescriptor); + } + @Test + public void shouldGiveNullForNullColumnFieldFieldDescriptor() { + Descriptors.Descriptor descriptor = TestCustomerLogMessage.getDescriptor(); + Descriptors.FieldDescriptor nonExistentFieldDescriptor = DescriptorsUtil.getFieldDescriptor(descriptor, null); + assertNull(nonExistentFieldDescriptor); + } + @Test + public void shouldGiveNullForEmptyColumnFieldFieldDescriptor() { + Descriptors.Descriptor descriptor = TestCustomerLogMessage.getDescriptor(); + Descriptors.FieldDescriptor nonExistentFieldDescriptor = DescriptorsUtil.getFieldDescriptor(descriptor, ""); + assertNull(nonExistentFieldDescriptor); + } + @Test + public void shouldGiveNullForInvalidFieldFieldDescriptor() { + Descriptors.Descriptor descriptor = TestCustomerLogMessage.getDescriptor(); + String nonExistentField = "customer-id"; + Descriptors.FieldDescriptor nonExistentFieldDescriptor = DescriptorsUtil.getFieldDescriptor(descriptor, nonExistentField); + assertNull(nonExistentFieldDescriptor); + } + + @Test + public void shouldGiveNullForInvalidNestedFieldDescriptor() { + Descriptors.Descriptor parentDescriptor = TestEnrichedBookingLogMessage.getDescriptor(); + String fieldName = "customer_profile.customer-id"; + Descriptors.FieldDescriptor invalidFieldDescriptor = DescriptorsUtil.getFieldDescriptor(parentDescriptor, fieldName); + assertNull(invalidFieldDescriptor); + } + + + @Test + public void shouldGiveNullForInvalidNestedFieldColumnsDescriptor() { + Descriptors.Descriptor parentDescriptor = TestEnrichedBookingLogMessage.getDescriptor(); + String[] invalidNestedFieldNames = {"customer_profile", "customer-id"}; + Descriptors.FieldDescriptor invalidFieldDescriptor = DescriptorsUtil.getNestedFieldDescriptor(parentDescriptor, invalidNestedFieldNames); + assertNull(invalidFieldDescriptor); + } + + @Test + public void shouldGiveNullForNullNestedFieldDescriptor() { + String[] nonExistentField = new String[]{"customer-id"}; + Descriptors.FieldDescriptor nonExistentFieldDescriptor = DescriptorsUtil.getNestedFieldDescriptor(null, nonExistentField); + assertNull(nonExistentFieldDescriptor); + } + + @Test + public void shouldGiveNullForNullColumnNestedFieldDescriptor() { + Descriptors.Descriptor descriptor = TestCustomerLogMessage.getDescriptor(); + Descriptors.FieldDescriptor nonExistentFieldDescriptor = DescriptorsUtil.getNestedFieldDescriptor(descriptor, null); + assertNull(nonExistentFieldDescriptor); + } + @Test + public void shouldGiveNullForEmptyColumnNestedFieldDescriptor() { + Descriptors.Descriptor descriptor = TestCustomerLogMessage.getDescriptor(); + Descriptors.FieldDescriptor nonExistentFieldDescriptor = DescriptorsUtil.getNestedFieldDescriptor(descriptor, new String[]{}); + assertNull(nonExistentFieldDescriptor); + } +} diff --git a/dagger-core/src/test/java/com/gotocompany/dagger/core/utils/KafkaConfigUtilTest.java b/dagger-core/src/test/java/com/gotocompany/dagger/core/utils/KafkaConfigUtilTest.java new file mode 100644 index 000000000..50f7d198d --- /dev/null +++ b/dagger-core/src/test/java/com/gotocompany/dagger/core/utils/KafkaConfigUtilTest.java @@ -0,0 +1,105 @@ +package com.gotocompany.dagger.core.utils; + +import com.gotocompany.dagger.core.enumeration.KafkaConnectorTypesMetadata; +import org.junit.Test; + +import java.util.Properties; + +import static org.junit.Assert.assertEquals; + +public class KafkaConfigUtilTest { + + @Test + public void shouldParseMatchingSourceKafkaConsumerConfiguration() { + Properties properties = new Properties(); + properties.put("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1", "value1"); + properties.put("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2", "value2"); + properties.put("SOURCE_KAFKA_CONSUMER_CONFIGKEY_3", "value3"); + properties.put("INVALID_KEY_4", "value4"); + + Properties kafkaProperties = KafkaConfigUtil.parseKafkaConfiguration(KafkaConnectorTypesMetadata.SOURCE, properties); + + assertEquals(2, kafkaProperties.size()); + assertEquals("value1", kafkaProperties.getProperty("key.1")); + assertEquals("value2", kafkaProperties.getProperty("key.2")); + } + + @Test + public void shouldParseMatchingSinkKafkaProducerConfiguration() { + Properties properties = new Properties(); + properties.put("SINK_KAFKA_PRODUCER_CONFIG_KEY_1", "value1"); + properties.put("SINK_KAFKA_PRODUCER_CONFIG_KEY_2", "value2"); + properties.put("SINK_KAFKA_PRODUCER_CONFIGKEY_3", "value3"); + properties.put("INVALID_KEY_4", "value4"); + + Properties kafkaProperties = KafkaConfigUtil.parseKafkaConfiguration(KafkaConnectorTypesMetadata.SINK, properties); + + assertEquals(2, kafkaProperties.size()); + assertEquals("value1", kafkaProperties.getProperty("key.1")); + assertEquals("value2", kafkaProperties.getProperty("key.2")); + } + + @Test + public void shouldReturnEmptyPropertiesWhenInputIsEmpty() { + Properties properties = new Properties(); + Properties kafkaProperties = KafkaConfigUtil.parseKafkaConfiguration(KafkaConnectorTypesMetadata.SOURCE, properties); + + assertEquals(0, kafkaProperties.size()); + } + + @Test + public void shouldReturnEmptyPropertiesWhenAllKeysAreInvalid() { + Properties properties = new Properties(); + properties.put("INVALID_KEY_1", "value1"); + properties.put("INVALID_KEY_2", "value2"); + properties.put("ANOTHER_INVALID_KEY", "value3"); + + Properties kafkaProperties = KafkaConfigUtil.parseKafkaConfiguration(KafkaConnectorTypesMetadata.SOURCE, properties); + + assertEquals(0, kafkaProperties.size()); + } + + @Test + public void shouldParseOnlyValidKeysWhenMixedWithInvalidOnes() { + Properties properties = new Properties(); + properties.put("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_1", "value1"); + properties.put("INVALID_KEY", "value2"); + properties.put("SOURCE_KAFKA_CONSUMER_CONFIG_KEY_2", "value3"); + properties.put("ANOTHER_INVALID_KEY", "value4"); + + Properties kafkaProperties = KafkaConfigUtil.parseKafkaConfiguration(KafkaConnectorTypesMetadata.SOURCE, properties); + + assertEquals(2, kafkaProperties.size()); + assertEquals("value1", kafkaProperties.getProperty("key.1")); + assertEquals("value3", kafkaProperties.getProperty("key.2")); + } + + @Test + public void shouldParseCaseInsensitiveKeys() { + Properties properties = new Properties(); + properties.put("source_kafka_consumer_config_KEY_1", "value1"); + properties.put("SOURCE_KAFKA_CONSUMER_CONFIG_key_2", "value2"); + + Properties kafkaProperties = KafkaConfigUtil.parseKafkaConfiguration(KafkaConnectorTypesMetadata.SOURCE, properties); + + assertEquals(2, kafkaProperties.size()); + assertEquals("value1", kafkaProperties.getProperty("key.1")); + assertEquals("value2", kafkaProperties.getProperty("key.2")); + } + + @Test + public void shouldParseKeysWithMultipleUnderscores() { + Properties properties = new Properties(); + properties.put("SOURCE_KAFKA_CONSUMER_CONFIG_MULTI_WORD_KEY", "value1"); + properties.put("SOURCE_KAFKA_CONSUMER_CONFIG_ANOTHER_MULTI_WORD_KEY", "value2"); + properties.put("SOURCE_KAFKA_CONSUMER_CONFIG__YET___ANOTHER_MULTI__WORD_KEY", "value3"); + + Properties kafkaProperties = KafkaConfigUtil.parseKafkaConfiguration(KafkaConnectorTypesMetadata.SOURCE, properties); + + assertEquals(3, kafkaProperties.size()); + assertEquals("value1", kafkaProperties.getProperty("multi.word.key")); + assertEquals("value2", kafkaProperties.getProperty("another.multi.word.key")); + assertEquals("value3", kafkaProperties.getProperty("yet.another.multi.word.key")); + } + +} diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcClientTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcClientTest.java deleted file mode 100644 index 871dcadce..000000000 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/grpc/client/GrpcClientTest.java +++ /dev/null @@ -1,53 +0,0 @@ -package io.odpf.dagger.core.processors.external.grpc.client; - -import io.grpc.Channel; -import io.odpf.dagger.core.processors.external.grpc.GrpcSourceConfig; -import org.junit.Test; - -import static org.junit.Assert.*; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class GrpcClientTest { - - private GrpcSourceConfig grpcSourceConfig; - - @Test - public void channelShouldBeAddedForAHostAndPort() { - - grpcSourceConfig = mock(GrpcSourceConfig.class); - - GrpcClient grpcClient = new GrpcClient(grpcSourceConfig); - - when(grpcSourceConfig.getEndpoint()).thenReturn("localhost"); - when(grpcSourceConfig.getServicePort()).thenReturn(8080); - - grpcClient.addChannel(); - - Channel decoratedChannel = grpcClient.getDecoratedChannel(); - assertNotNull(decoratedChannel); - - } - - @Test - public void grpcClientCloseShouldWork() { - - grpcSourceConfig = mock(GrpcSourceConfig.class); - - GrpcClient grpcClient = new GrpcClient(grpcSourceConfig); - - when(grpcSourceConfig.getEndpoint()).thenReturn("localhost"); - when(grpcSourceConfig.getServicePort()).thenReturn(8080); - - grpcClient.addChannel(); - - Channel decoratedChannel = grpcClient.getDecoratedChannel(); - assertNotNull(decoratedChannel); - - grpcClient.close(); - decoratedChannel = grpcClient.getDecoratedChannel(); - assertNull(decoratedChannel); - - } - -} diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpResponseHandlerTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpResponseHandlerTest.java deleted file mode 100644 index aecfabf83..000000000 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/http/HttpResponseHandlerTest.java +++ /dev/null @@ -1,428 +0,0 @@ -package io.odpf.dagger.core.processors.external.http; - -import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.metrics.aspects.Aspects; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestSurgeFactorLogMessage; -import io.odpf.dagger.core.exception.HttpFailureException; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.OutputMapping; -import io.odpf.dagger.core.processors.common.PostResponseTelemetry; -import io.odpf.dagger.core.processors.common.RowManager; -import org.apache.flink.streaming.api.functions.async.ResultFuture; -import org.apache.flink.types.Row; -import org.asynchttpclient.Response; -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; - -import static io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; -import static org.mockito.MockitoAnnotations.initMocks; - -public class HttpResponseHandlerTest { - - @Mock - private ResultFuture resultFuture; - - @Mock - private Response response; - - @Mock - private MeterStatsManager meterStatsManager; - - @Mock - private HttpSourceConfig httpSourceConfig; - - @Mock - private ErrorReporter errorReporter; - - private Descriptors.Descriptor descriptor; - private List outputColumnNames; - private String[] inputColumnNames; - private HashMap outputMapping; - private HashMap headers; - private String httpConfigType; - private Row streamData; - private RowManager rowManager; - private ColumnNameManager columnNameManager; - private Row inputData; - - @Before - public void setup() { - initMocks(this); - descriptor = TestSurgeFactorLogMessage.getDescriptor(); - outputColumnNames = Collections.singletonList("value"); - inputColumnNames = new String[] {"order_id", "customer_id", "driver_id"}; - outputMapping = new HashMap<>(); - headers = new HashMap<>(); - headers.put("content-type", "application/json"); - httpConfigType = "test"; - streamData = new Row(2); - inputData = new Row(3); - inputData.setField(1, "123456"); - streamData.setField(0, inputData); - streamData.setField(1, new Row(2)); - rowManager = new RowManager(streamData); - columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - } - - @Test - public void shouldPassInputIfFailOnErrorFalseAndStatusCodeIs404() { - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - when(response.getStatusCode()).thenReturn(404); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); - verify(meterStatsManager, times(1)).markEvent(FAILURE_CODE_404); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); - ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); - verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); - assertEquals("Received status code : 404", failureCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldPassInputIfFailOnErrorFalseAndStatusCodeIs4XXOtherThan404() { - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - when(response.getStatusCode()).thenReturn(402); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); - verify(meterStatsManager, times(1)).markEvent(FAILURE_CODE_4XX); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); - ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); - verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); - assertEquals("Received status code : 402", failureCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldPassInputIfFailOnErrorFalseAndStatusCodeIs5XX() { - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - when(response.getStatusCode()).thenReturn(502); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); - verify(meterStatsManager, times(1)).markEvent(FAILURE_CODE_5XX); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); - - ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); - verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); - assertEquals("Received status code : 502", failureCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldPassInputIfFailOnErrorFalseAndStatusCodeIsOtherThan5XXAnd4XX() { - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - when(response.getStatusCode()).thenReturn(302); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(meterStatsManager, times(1)).markEvent(OTHER_ERRORS); - verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); - ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); - verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); - assertEquals("Received status code : 302", failureCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldThrowErrorIfFailOnErrorTrueAndStatusCodeIs404() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - when(response.getStatusCode()).thenReturn(404); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(resultFuture).completeExceptionally(any(HttpFailureException.class)); - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(HttpFailureException.class); - verify(errorReporter, times(1)) - .reportFatalException(argumentCaptor.capture()); - assertEquals("Received status code : 404", argumentCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).markEvent(FAILURE_CODE_404); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldThrowErrorIfFailOnErrorTrueAndStatusCodeIs4XXOtherThan404() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - when(response.getStatusCode()).thenReturn(400); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); - verify(resultFuture, times(1)).completeExceptionally(failureCaptor.capture()); - assertEquals("Received status code : 400", failureCaptor.getValue().getMessage()); - verify(errorReporter, times(1)).reportFatalException(any(HttpFailureException.class)); - verify(meterStatsManager, times(1)).markEvent(FAILURE_CODE_4XX); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldThrowErrorIfFailOnErrorTrueAndStatusCodeIs5XX() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - when(response.getStatusCode()).thenReturn(502); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); - verify(resultFuture, times(1)).completeExceptionally(failureCaptor.capture()); - assertEquals("Received status code : 502", failureCaptor.getValue().getMessage()); - verify(errorReporter, times(1)).reportFatalException(any(HttpFailureException.class)); - verify(meterStatsManager, times(1)).markEvent(FAILURE_CODE_5XX); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldThrowErrorIfFailOnErrorTrueAndStatusCodeIsOtherThan5XXAnd4XX() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - when(response.getStatusCode()).thenReturn(302); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); - verify(resultFuture, times(1)).completeExceptionally(failureCaptor.capture()); - assertEquals("Received status code : 302", failureCaptor.getValue().getMessage()); - verify(errorReporter, times(1)).reportFatalException(any(HttpFailureException.class)); - verify(meterStatsManager, times(1)).markEvent(OTHER_ERRORS); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldPassInputIfFailOnErrorFalseAndOnThrowable() { - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - Throwable throwable = new Throwable("throwable message"); - - httpResponseHandler.startTimer(); - httpResponseHandler.onThrowable(throwable); - - verify(resultFuture, times(1)).complete(Collections.singleton(streamData)); - ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); - verify(errorReporter, times(1)).reportNonFatalException(failureCaptor.capture()); - assertEquals("throwable message", failureCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldThrowErrorIfFailOnErrorTrueAndOnThrowable() { - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - Throwable throwable = new Throwable("throwable message"); - - httpResponseHandler.startTimer(); - httpResponseHandler.onThrowable(throwable); - - verify(resultFuture).completeExceptionally(any(RuntimeException.class)); - ArgumentCaptor failureCaptor = ArgumentCaptor.forClass(HttpFailureException.class); - verify(errorReporter, times(1)).reportFatalException(failureCaptor.capture()); - assertEquals("throwable message", failureCaptor.getValue().getMessage()); - verify(meterStatsManager, times(1)).markEvent(OTHER_ERRORS); - verify(meterStatsManager, times(1)).markEvent(TOTAL_FAILED_REQUESTS); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldPopulateSingleResultFromHttpCallInInputRow() { - outputMapping.put("surge_factor", new OutputMapping("$.surge")); - outputColumnNames = Collections.singletonList("surge_factor"); - columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - Row resultStreamData = new Row(2); - Row outputData = new Row(2); - outputData.setField(0, 0.732f); - resultStreamData.setField(0, inputData); - resultStreamData.setField(1, outputData); - when(response.getStatusCode()).thenReturn(200); - when(response.getResponseBody()).thenReturn("{\n" - + " \"surge\": 0.732\n" - + "}"); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); - } - - @Test - public void shouldPopulateMultipleResultsFromHttpCallInInputRow() { - outputMapping.put("surge_factor", new OutputMapping("$.surge")); - outputMapping.put("s2_id_level", new OutputMapping("$.prediction")); - outputColumnNames = Arrays.asList("surge_factor", "s2_id_level"); - columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - Row resultStreamData = new Row(2); - Row outputData = new Row(2); - outputData.setField(0, 0.732f); - outputData.setField(1, 345); - resultStreamData.setField(0, inputData); - resultStreamData.setField(1, outputData); - when(response.getStatusCode()).thenReturn(200); - when(response.getResponseBody()).thenReturn("{\n" - + " \"surge\": 0.732,\n" - + " \"prediction\": 345\n" - + "}"); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); - verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - } - - @Test - public void shouldThrowExceptionIfFieldNotFoundInFieldDescriptorWhenTypeIsPassed() { - httpConfigType = "io.odpf.dagger.consumer.TestBookingLogMessage"; - descriptor = TestBookingLogMessage.getDescriptor(); - outputMapping.put("surge_factor", new OutputMapping("$.surge")); - outputColumnNames = Collections.singletonList("surge_factor"); - columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - when(response.getStatusCode()).thenReturn(200); - when(response.getResponseBody()).thenReturn("{\n" - + " \"surge\": 0.732\n" - + "}"); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - - httpResponseHandler.startTimer(); - assertThrows(NullPointerException.class, - () -> httpResponseHandler.onCompleted(response)); - verify(resultFuture, times(1)).completeExceptionally(any(IllegalArgumentException.class)); - } - - @Test - public void shouldThrowExceptionIfPathIsWrongIfFailOnErrorsTrue() { - outputMapping.put("surge_factor", new OutputMapping("invalidPath")); - outputColumnNames = Collections.singletonList("surge_factor"); - columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", true, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - Row resultStreamData = new Row(2); - Row outputData = new Row(2); - outputData.setField(0, 0.732f); - resultStreamData.setField(0, inputData); - resultStreamData.setField(1, outputData); - when(response.getStatusCode()).thenReturn(200); - when(response.getResponseBody()).thenReturn("{\n" - + " \"surge\": 0.732\n" - + "}"); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(resultFuture, times(1)).completeExceptionally(any(RuntimeException.class)); - verify(errorReporter, times(1)).reportFatalException(any(RuntimeException.class)); - verify(meterStatsManager, times(1)).markEvent(FAILURES_ON_READING_PATH); - } - - @Test - public void shouldPopulateResultAsObjectIfTypeIsNotPassedAndRetainResponseTypeIsTrue() { - outputMapping.put("surge_factor", new OutputMapping("$.surge")); - outputColumnNames = Collections.singletonList("surge_factor"); - columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, "345", headers, outputMapping, "metricId_02", true); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - Row resultStreamData = new Row(2); - Row outputData = new Row(2); - outputData.setField(0, 0.732); - resultStreamData.setField(0, inputData); - resultStreamData.setField(1, outputData); - when(response.getStatusCode()).thenReturn(200); - when(response.getResponseBody()).thenReturn("{\n" - + " \"surge\": 0.732\n" - + "}"); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); - } - - @Test - public void shouldNotPopulateResultAsObjectIfTypeIsNotPassedAndRetainResponseTypeIsFalse() { - outputMapping.put("surge_factor", new OutputMapping("$.surge")); - outputColumnNames = Collections.singletonList("surge_factor"); - columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, null, "345", headers, outputMapping, "metricId_02", false); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - Row resultStreamData = new Row(2); - Row outputData = new Row(2); - outputData.setField(0, 0.732f); - resultStreamData.setField(0, inputData); - resultStreamData.setField(1, outputData); - when(response.getStatusCode()).thenReturn(200); - when(response.getResponseBody()).thenReturn("{\n" - + " \"surge\": 0.732\n" - + "}"); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); - } - - @Test - public void shouldHandleAnySuccessResponseCodeOtherThan200() { - outputMapping.put("surge_factor", new OutputMapping("$.surge")); - outputColumnNames = Collections.singletonList("surge_factor"); - columnNameManager = new ColumnNameManager(inputColumnNames, outputColumnNames); - httpSourceConfig = new HttpSourceConfig("http://localhost:8080/test", "", "POST", "{\"key\": \"%s\"}", "customer_id", "", "", "123", "234", false, httpConfigType, "345", headers, outputMapping, "metricId_02", false); - HttpResponseHandler httpResponseHandler = new HttpResponseHandler(httpSourceConfig, meterStatsManager, rowManager, columnNameManager, descriptor, resultFuture, errorReporter, new PostResponseTelemetry()); - Row resultStreamData = new Row(2); - Row outputData = new Row(2); - outputData.setField(0, 0.732f); - resultStreamData.setField(0, inputData); - resultStreamData.setField(1, outputData); - when(response.getStatusCode()).thenReturn(201); - when(response.getResponseBody()).thenReturn("{\n" - + " \"surge\": 0.732\n" - + "}"); - - httpResponseHandler.startTimer(); - httpResponseHandler.onCompleted(response); - - verify(meterStatsManager, times(1)).markEvent(SUCCESS_RESPONSE); - verify(meterStatsManager, times(1)).updateHistogram(any(Aspects.class), any(Long.class)); - verify(resultFuture, times(1)).complete(Collections.singleton(resultStreamData)); - } -} diff --git a/dagger-functions/build.gradle b/dagger-functions/build.gradle index 1303f9494..f884fe42f 100644 --- a/dagger-functions/build.gradle +++ b/dagger-functions/build.gradle @@ -57,13 +57,16 @@ dependencies { compileOnly group: 'org.apache.flink', name: 'flink-table-api-java-bridge_2.11', version: flinkVersion compileOnly group: 'org.apache.flink', name: 'flink-streaming-java_2.11', version: flinkVersion compileOnly group: 'org.apache.flink', name: 'flink-metrics-dropwizard', version: flinkVersion + compileOnly 'com.gotocompany:stencil:0.6.0' dependenciesFunctionsJar 'com.github.davidmoten:geo:0.7.6' dependenciesFunctionsJar 'org.apache.flink:flink-python_2.11:' + flinkVersion dependenciesFunctionsJar group: 'org.apache.commons', name: 'commons-jexl3', version: '3.1' dependenciesFunctionsJar group: 'org.isuper', name: 's2-geometry-library-java', version: '0.0.1' - dependenciesFunctionsJar group: 'com.google.cloud', name: 'google-cloud-storage', version: '1.67.0' - + dependenciesFunctionsJar group: 'com.google.cloud', name: 'google-cloud-storage', version: '2.23.0' + dependenciesFunctionsJar group: 'com.aliyun.oss', name: 'aliyun-sdk-oss', version: '3.18.1' + dependenciesFunctionsJar group: 'com.qcloud', name: 'cos_api', version: '5.6.227' + testImplementation project(':dagger-common').sourceSets.test.output testImplementation group: 'junit', name: 'junit', version: '4.12' testImplementation 'org.mockito:mockito-core:2.0.99-beta' diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/common/Constants.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/common/Constants.java similarity index 55% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/common/Constants.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/common/Constants.java index fff13666c..0befc0b42 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/common/Constants.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/common/Constants.java @@ -1,13 +1,28 @@ -package io.odpf.dagger.functions.common; +package com.gotocompany.dagger.functions.common; public class Constants { public static final Integer NUMBER_OF_DATA_TYPES_IN_FEATURE_ROW = 8; public static final Integer NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR = 3; + + @Deprecated public static final String UDF_DART_GCS_PROJECT_ID_KEY = "UDF_DART_GCS_PROJECT_ID"; + @Deprecated public static final String UDF_DART_GCS_PROJECT_ID_DEFAULT = ""; + @Deprecated public static final String UDF_DART_GCS_BUCKET_ID_KEY = "UDF_DART_GCS_BUCKET_ID"; + @Deprecated public static final String UDF_DART_GCS_BUCKET_ID_DEFAULT = ""; + public static final String UDF_DART_PROJECT_ID_KEY = "UDF_DART_PROJECT_ID"; + public static final String UDF_DART_PROJECT_ID_DEFAULT = ""; + public static final String UDF_DART_BUCKET_ID_KEY = "UDF_DART_BUCKET_ID"; + public static final String UDF_DART_BUCKET_ID_DEFAULT = ""; + + public static final String UDF_STORE_PROVIDER_KEY = "UDF_STORE_PROVIDER"; + public static final String UDF_STORE_PROVIDER_GCS = "GCS"; + public static final String UDF_STORE_PROVIDER_OSS = "OSS"; + public static final String UDF_STORE_PROVIDER_COS = "COS"; + public static final String PYTHON_UDF_CONFIG = "PYTHON_UDF_CONFIG"; public static final String PYTHON_UDF_ENABLE_KEY = "PYTHON_UDF_ENABLE"; public static final boolean PYTHON_UDF_ENABLE_DEFAULT = false; @@ -20,4 +35,14 @@ public class Constants { public static final Integer PYTHON_FN_EXECUTION_BUNDLE_SIZE_DEFAULT = 100000; public static final String PYTHON_FN_EXECUTION_BUNDLE_TIME_KEY = "PYTHON_FN_EXECUTION_BUNDLE_TIME"; public static final long PYTHON_FN_EXECUTION_BUNDLE_TIME_DEFAULT = 1000; + + public static final String OSS_ENDPOINT = "OSS_ENDPOINT"; + public static final String DEFAULT_OSS_ENDPOINT = "oss-ap-southeast-5.aliyuncs.com"; + + public static final String COS_REGION = "COS_REGION"; + public static final String DEFAULT_COS_REGION = "ap-jakarta"; + public static final String ENABLE_TKE_OIDC_PROVIDER = "ENABLE_TKE_OIDC_PROVIDER"; + + public static final String JOB_BUILDER_FQCN_KEY = "JOB_BUILDER_FQCN"; + public static final String DEFAULT_JOB_BUILDER_FQCN = "com.gotocompany.dagger.core.DaggerSqlJobBuilder"; } diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/common/CosLibClient.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/common/CosLibClient.java new file mode 100644 index 000000000..c6a6bad71 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/common/CosLibClient.java @@ -0,0 +1,52 @@ +package com.gotocompany.dagger.functions.common; + +import com.qcloud.cos.COSClient; +import com.qcloud.cos.ClientConfig; +import com.qcloud.cos.auth.BasicCOSCredentials; +import com.qcloud.cos.auth.COSCredentials; +import com.qcloud.cos.region.Region; +import com.tencentcloudapi.common.Credential; +import com.tencentcloudapi.common.exception.TencentCloudSDKException; +import com.tencentcloudapi.common.provider.OIDCRoleArnProvider; +import lombok.Getter; + +/** + * Stateless class. + */ +public class CosLibClient { + @Getter + private static CosLibClient instance = new CosLibClient(); + + private static final String ENV_COS_SECRET_ID = "COS_SECRET_ID"; + private static final String ENV_COS_SECRET_KEY = "COS_SECRET_KEY"; + + // the credential provider provides short living token. If we have a libCosClient long living object with these + // token or say if we refresh it before client usage will not have much benefits. + // Create client when using its operation. + public COSClient get(boolean enableTkeOidcProvider, String cosRegion) { + String secretId, secretKey; + + if (enableTkeOidcProvider) { + try { + Credential credentials = new OIDCRoleArnProvider().getCredentials(); + secretId = credentials.getSecretId(); + secretKey = credentials.getSecretKey(); + } catch (TencentCloudSDKException e) { + throw new RuntimeException("failed to initiate oidc credential provider", e); + } + } else { + secretId = System.getenv(ENV_COS_SECRET_ID); + secretKey = System.getenv(ENV_COS_SECRET_KEY); + } + + COSCredentials cosCredentials = new BasicCOSCredentials(secretId, secretKey); + ClientConfig clientConfig = new ClientConfig(new Region(cosRegion)); + return new COSClient(cosCredentials, clientConfig); + } + + // unit test helper method; Additionally, method has no side effects. + // the current mockito version doesn't support mockStatic. + public static void testOnlySetInstance(CosLibClient cosLibClient) { + CosLibClient.instance = cosLibClient; + } +} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/ArrayAggregationException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/ArrayAggregationException.java similarity index 91% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/ArrayAggregationException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/ArrayAggregationException.java index a3716b912..62a83b331 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/ArrayAggregationException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/ArrayAggregationException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The Exception class for ArrayAggregate udf. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/ArrayOperateException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/ArrayOperateException.java similarity index 90% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/ArrayOperateException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/ArrayOperateException.java index 1f1a9ff5e..4b84d751a 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/ArrayOperateException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/ArrayOperateException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The Exception class for ArrayOperate udf. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/BucketDoesNotExistException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/BucketDoesNotExistException.java similarity index 86% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/BucketDoesNotExistException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/BucketDoesNotExistException.java index bdb40e927..b4c301103 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/BucketDoesNotExistException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/BucketDoesNotExistException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The class Exception if Bucket does not exist. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/InvalidHashFieldException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/InvalidHashFieldException.java similarity index 91% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/InvalidHashFieldException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/InvalidHashFieldException.java index 60eaaf6e1..2cc214833 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/InvalidHashFieldException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/InvalidHashFieldException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The class Exception for Invalid hash field. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/InvalidNumberOfArgumentsException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/InvalidNumberOfArgumentsException.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/InvalidNumberOfArgumentsException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/InvalidNumberOfArgumentsException.java index 01d283317..0869d4e04 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/InvalidNumberOfArgumentsException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/InvalidNumberOfArgumentsException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The class Exception for Invalid number of arguments. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/KeyDoesNotExistException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/KeyDoesNotExistException.java similarity index 85% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/KeyDoesNotExistException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/KeyDoesNotExistException.java index fa03281d4..7d6759f95 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/KeyDoesNotExistException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/KeyDoesNotExistException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The class Exception if Key does not exist. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/LongbowException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/LongbowException.java similarity index 84% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/LongbowException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/LongbowException.java index f6205827c..0d7b6c251 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/LongbowException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/LongbowException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The Exception class for Longbow. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/MadZeroException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/MadZeroException.java similarity index 85% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/MadZeroException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/MadZeroException.java index dbc500435..f87279a81 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/MadZeroException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/MadZeroException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The class Exception if Mad is zero on OutlierMad Udf. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/MedianNotFound.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/MedianNotFound.java similarity index 85% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/MedianNotFound.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/MedianNotFound.java index 228fc9af6..2a5532db9 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/MedianNotFound.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/MedianNotFound.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The class Exception if Median not found on OutlierMad Udf. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/OddNumberOfArgumentsException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/OddNumberOfArgumentsException.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/OddNumberOfArgumentsException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/OddNumberOfArgumentsException.java index f139d812e..5f481a84a 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/OddNumberOfArgumentsException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/OddNumberOfArgumentsException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The class Exception for Odd number of arguments on Features Udf. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/PythonFilesEmptyException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/PythonFilesEmptyException.java similarity index 85% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/PythonFilesEmptyException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/PythonFilesEmptyException.java index e849dc208..9c8011f66 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/PythonFilesEmptyException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/PythonFilesEmptyException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The type Python files empty exception. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/PythonFilesFormatException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/PythonFilesFormatException.java similarity index 85% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/PythonFilesFormatException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/PythonFilesFormatException.java index 02771fda6..85ea19c7f 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/PythonFilesFormatException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/PythonFilesFormatException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The type Python files format exception. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/RowHashException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/RowHashException.java similarity index 93% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/RowHashException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/RowHashException.java index 3837c4d98..c91adb3ea 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/RowHashException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/RowHashException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The class Exception if failed on hashing the Row. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/TagDoesNotExistException.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/TagDoesNotExistException.java similarity index 66% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/TagDoesNotExistException.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/TagDoesNotExistException.java index 4ae1f0273..6bdb1eae1 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/exceptions/TagDoesNotExistException.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/exceptions/TagDoesNotExistException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.exceptions; +package com.gotocompany.dagger.functions.exceptions; /** * The class Exception if Tag does not exist. @@ -12,4 +12,8 @@ public class TagDoesNotExistException extends RuntimeException { public TagDoesNotExistException(String message) { super(message); } + + public TagDoesNotExistException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/ClearColumnTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/ClearColumnTransformer.java similarity index 90% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/ClearColumnTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/ClearColumnTransformer.java index e37446eca..409ed1985 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/ClearColumnTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/ClearColumnTransformer.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContext; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.core.Transformer; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.Transformer; import java.util.Arrays; import java.util.Map; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/DeDuplicationTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/DeDuplicationTransformer.java similarity index 93% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/DeDuplicationTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/DeDuplicationTransformer.java index 611b28434..1980fde0c 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/DeDuplicationTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/DeDuplicationTransformer.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContext; import org.apache.flink.api.common.functions.RichFilterFunction; import org.apache.flink.api.common.state.MapState; import org.apache.flink.api.common.state.MapStateDescriptor; @@ -11,8 +11,8 @@ import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.core.Transformer; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.Transformer; import java.util.Arrays; import java.util.Map; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/FeatureTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/FeatureTransformer.java similarity index 89% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/FeatureTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/FeatureTransformer.java index bba768a45..40aab4aed 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/FeatureTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/FeatureTransformer.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.FeatureUtils; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.core.Transformer; -import io.odpf.dagger.functions.udfs.aggregate.feast.FeatureUtils; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.Transformer; import java.util.ArrayList; import java.util.Arrays; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/FeatureWithTypeTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/FeatureWithTypeTransformer.java similarity index 85% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/FeatureWithTypeTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/FeatureWithTypeTransformer.java index 4a0a5f789..75730c833 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/FeatureWithTypeTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/FeatureWithTypeTransformer.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.functions.transformers.feature.FeatureWithTypeHandler; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.core.Transformer; -import io.odpf.dagger.functions.transformers.feature.FeatureWithTypeHandler; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.Transformer; import java.util.ArrayList; import java.util.Map; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/HashTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/HashTransformer.java similarity index 86% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/HashTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/HashTransformer.java index c4b7e333c..00e492293 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/HashTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/HashTransformer.java @@ -1,18 +1,18 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.functions.transformers.hash.PathReader; +import com.gotocompany.dagger.functions.transformers.hash.field.RowHasher; import org.apache.flink.api.common.functions.RichMapFunction; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.core.Transformer; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.functions.transformers.hash.PathReader; -import io.odpf.dagger.functions.transformers.hash.field.RowHasher; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.Transformer; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; import java.io.Serializable; import java.util.ArrayList; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/InvalidRecordFilterTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/InvalidRecordFilterTransformer.java similarity index 77% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/InvalidRecordFilterTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/InvalidRecordFilterTransformer.java index 5fbf1434f..dc8519025 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/InvalidRecordFilterTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/InvalidRecordFilterTransformer.java @@ -1,21 +1,20 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.functions.transformers.filter.FilterAspects; import org.apache.flink.api.common.functions.RichFilterFunction; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.core.Transformer; -import io.odpf.dagger.common.metrics.managers.CounterStatsManager; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.Transformer; +import com.gotocompany.dagger.common.metrics.managers.CounterStatsManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Arrays; import java.util.Map; -import static io.odpf.dagger.functions.transformers.filter.FilterAspects.FILTERED_INVALID_RECORDS; - /** * Filter the invalid records produced by dagger. @@ -45,16 +44,16 @@ public InvalidRecordFilterTransformer(Map transformationArgument public void open(org.apache.flink.configuration.Configuration internalFlinkConfig) throws Exception { MetricGroup metricGroup = getRuntimeContext().getMetricGroup(); metricsManager = new CounterStatsManager(metricGroup); - metricsManager.register(FILTERED_INVALID_RECORDS, PER_TABLE, tableName); + metricsManager.register(FilterAspects.FILTERED_INVALID_RECORDS, PER_TABLE, tableName); } @Override public boolean filter(Row value) { if (!(boolean) value.getField(validationIndex)) { - metricsManager.inc(FILTERED_INVALID_RECORDS); + metricsManager.inc(FilterAspects.FILTERED_INVALID_RECORDS); LOGGER.info("Filtering invalid record for table " + this.tableName + "\n" - + "Total = ", metricsManager.getCount(FILTERED_INVALID_RECORDS)); + + "Total = ", metricsManager.getCount(FilterAspects.FILTERED_INVALID_RECORDS)); return false; } return true; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/SQLTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/SQLTransformer.java similarity index 88% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/SQLTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/SQLTransformer.java index 5ab75d350..ab879958b 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/SQLTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/SQLTransformer.java @@ -1,16 +1,16 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContext; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.table.api.Table; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.core.Transformer; -import io.odpf.dagger.common.watermark.RowtimeFieldWatermark; -import io.odpf.dagger.common.watermark.StreamWatermarkAssigner; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.Transformer; +import com.gotocompany.dagger.common.watermark.RowtimeFieldWatermark; +import com.gotocompany.dagger.common.watermark.StreamWatermarkAssigner; import java.io.Serializable; import java.util.Arrays; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/feature/FeatureWithTypeHandler.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/feature/FeatureWithTypeHandler.java similarity index 93% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/feature/FeatureWithTypeHandler.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/feature/FeatureWithTypeHandler.java index cc7f06d30..05c7c07ad 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/feature/FeatureWithTypeHandler.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/feature/FeatureWithTypeHandler.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.transformers.feature; +package com.gotocompany.dagger.functions.transformers.feature; -import io.odpf.dagger.functions.udfs.aggregate.feast.FeatureUtils; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.FeatureUtils; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; import org.apache.flink.api.java.tuple.Tuple3; import org.apache.flink.types.Row; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/filter/FilterAspects.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/filter/FilterAspects.java similarity index 74% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/filter/FilterAspects.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/filter/FilterAspects.java index 860d847ac..280464632 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/filter/FilterAspects.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/filter/FilterAspects.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.transformers.filter; +package com.gotocompany.dagger.functions.transformers.filter; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; /** * The enum Filter aspects. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/FieldHasherFactory.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/FieldHasherFactory.java similarity index 64% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/FieldHasherFactory.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/FieldHasherFactory.java index c8535f29f..0e320ea6b 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/FieldHasherFactory.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/FieldHasherFactory.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.functions.transformers.hash; +package com.gotocompany.dagger.functions.transformers.hash; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.transformers.hash.field.FieldHasher; -import io.odpf.dagger.functions.transformers.hash.field.IntegerFieldHasher; -import io.odpf.dagger.functions.transformers.hash.field.LongFieldHasher; -import io.odpf.dagger.functions.transformers.hash.field.RowHasher; -import io.odpf.dagger.functions.transformers.hash.field.StringFieldHasher; -import io.odpf.dagger.functions.transformers.hash.field.UnsupportedDataTypeHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.FieldHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.IntegerFieldHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.LongFieldHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.RowHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.StringFieldHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.UnsupportedDataTypeHasher; import java.util.Arrays; import java.util.List; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/PathReader.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/PathReader.java similarity index 87% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/PathReader.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/PathReader.java index 1a4cf678f..5eb8ce0a8 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/PathReader.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/PathReader.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.transformers.hash; +package com.gotocompany.dagger.functions.transformers.hash; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.transformers.hash.field.FieldHasher; -import io.odpf.dagger.functions.transformers.hash.field.RowHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.FieldHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.RowHasher; import java.util.HashMap; import java.util.List; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/FieldHasher.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/FieldHasher.java similarity index 95% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/FieldHasher.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/FieldHasher.java index 0fb211ea6..e9e5b379c 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/FieldHasher.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/FieldHasher.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/IntegerFieldHasher.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/IntegerFieldHasher.java similarity index 89% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/IntegerFieldHasher.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/IntegerFieldHasher.java index 4dd861c92..5b8d1555c 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/IntegerFieldHasher.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/IntegerFieldHasher.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.exceptions.RowHashException; +import com.gotocompany.dagger.functions.exceptions.RowHashException; /** * The Integer field hasher. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/LongFieldHasher.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/LongFieldHasher.java similarity index 89% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/LongFieldHasher.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/LongFieldHasher.java index 433a0beec..b1a137b3b 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/LongFieldHasher.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/LongFieldHasher.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.exceptions.RowHashException; +import com.gotocompany.dagger.functions.exceptions.RowHashException; /** * The Long field hasher. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/RowHasher.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/RowHasher.java similarity index 93% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/RowHasher.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/RowHasher.java index 63fe442f5..7e8db616c 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/RowHasher.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/RowHasher.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.transformers.hash.FieldHasherFactory; +import com.gotocompany.dagger.functions.transformers.hash.FieldHasherFactory; import org.apache.flink.types.Row; import java.util.Arrays; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/StringFieldHasher.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/StringFieldHasher.java similarity index 90% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/StringFieldHasher.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/StringFieldHasher.java index 4ee9b9837..f038cd7da 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/StringFieldHasher.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/StringFieldHasher.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.exceptions.RowHashException; +import com.gotocompany.dagger.functions.exceptions.RowHashException; import java.nio.charset.StandardCharsets; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasher.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasher.java similarity index 88% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasher.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasher.java index 8d1cf7e45..2e65385ea 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasher.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasher.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.exceptions.InvalidHashFieldException; +import com.gotocompany.dagger.functions.exceptions.InvalidHashFieldException; /** * The Unsupported data type hasher. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/CollectArray.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/CollectArray.java similarity index 87% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/CollectArray.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/CollectArray.java index 81e0726bb..8fe7a6c09 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/CollectArray.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/CollectArray.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.aggregate; +package com.gotocompany.dagger.functions.udfs.aggregate; -import io.odpf.dagger.common.udfs.AggregateUdf; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.ArrayAccumulator; +import com.gotocompany.dagger.common.udfs.AggregateUdf; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.ArrayAccumulator; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.annotation.FunctionHint; import org.apache.flink.table.annotation.InputGroup; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/DistinctCount.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/DistinctCount.java similarity index 85% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/DistinctCount.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/DistinctCount.java index b6c5d93a8..a01e96eb3 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/DistinctCount.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/DistinctCount.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.aggregate; +package com.gotocompany.dagger.functions.udfs.aggregate; -import io.odpf.dagger.common.udfs.AggregateUdf; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.DistinctCountAccumulator; +import com.gotocompany.dagger.common.udfs.AggregateUdf; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.DistinctCountAccumulator; /** * User-defined aggregate function to get Distinct count. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/Features.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/Features.java similarity index 86% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/Features.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/Features.java index 631613bf3..3fadb57f4 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/Features.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/Features.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.aggregate; +package com.gotocompany.dagger.functions.udfs.aggregate; -import io.odpf.dagger.common.udfs.AggregateUdf; -import io.odpf.dagger.functions.exceptions.OddNumberOfArgumentsException; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.FeatureAccumulator; +import com.gotocompany.dagger.common.udfs.AggregateUdf; +import com.gotocompany.dagger.functions.exceptions.OddNumberOfArgumentsException; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.FeatureAccumulator; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.annotation.FunctionHint; import org.apache.flink.table.annotation.InputGroup; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/FeaturesWithType.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/FeaturesWithType.java similarity index 79% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/FeaturesWithType.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/FeaturesWithType.java index 6594760cb..8894dcf8e 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/FeaturesWithType.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/FeaturesWithType.java @@ -1,16 +1,15 @@ -package io.odpf.dagger.functions.udfs.aggregate; +package com.gotocompany.dagger.functions.udfs.aggregate; -import io.odpf.dagger.common.udfs.AggregateUdf; -import io.odpf.dagger.functions.exceptions.InvalidNumberOfArgumentsException; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.FeatureWithTypeAccumulator; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.common.udfs.AggregateUdf; +import com.gotocompany.dagger.functions.common.Constants; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.FeatureWithTypeAccumulator; +import com.gotocompany.dagger.functions.exceptions.InvalidNumberOfArgumentsException; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.annotation.FunctionHint; import org.apache.flink.table.annotation.InputGroup; import org.apache.flink.types.Row; -import static io.odpf.dagger.functions.common.Constants.NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR; - /** * User-defined aggregate function to get Features with type. */ @@ -39,7 +38,7 @@ public Row[] getValue(FeatureWithTypeAccumulator featureAccumulator) { */ public void accumulate(FeatureWithTypeAccumulator featureAccumulator, @DataTypeHint(inputGroup = InputGroup.ANY) Object... objects) { validate(objects); - for (int elementIndex = 0; elementIndex < objects.length; elementIndex += NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR) { + for (int elementIndex = 0; elementIndex < objects.length; elementIndex += Constants.NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR) { featureAccumulator.add(String.valueOf(objects[elementIndex]), objects[elementIndex + 1], ValueEnum.valueOf(String.valueOf(objects[elementIndex + 2]))); } } @@ -52,7 +51,7 @@ public void accumulate(FeatureWithTypeAccumulator featureAccumulator, @DataTypeH */ public void retract(FeatureWithTypeAccumulator featureAccumulator, Object... objects) { validate(objects); - for (int elementIndex = 0; elementIndex < objects.length; elementIndex += NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR) { + for (int elementIndex = 0; elementIndex < objects.length; elementIndex += Constants.NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR) { featureAccumulator.remove(String.valueOf(objects[elementIndex]), objects[elementIndex + 1], ValueEnum.valueOf(String.valueOf(objects[elementIndex + 2]))); } } @@ -64,7 +63,7 @@ public void merge(FeatureWithTypeAccumulator featureWithTypeAccumulator, Iterabl } private void validate(Object[] objects) { - if (objects.length % NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR != 0) { + if (objects.length % Constants.NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR != 0) { throw new InvalidNumberOfArgumentsException(); } } diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/PercentileAggregator.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/PercentileAggregator.java similarity index 86% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/PercentileAggregator.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/PercentileAggregator.java index 6878dafc6..2bf1058e6 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/PercentileAggregator.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/PercentileAggregator.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.aggregate; +package com.gotocompany.dagger.functions.udfs.aggregate; -import io.odpf.dagger.common.udfs.AggregateUdf; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.PercentileAccumulator; +import com.gotocompany.dagger.common.udfs.AggregateUdf; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.PercentileAccumulator; import org.apache.flink.table.annotation.DataTypeHint; import java.math.BigDecimal; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulator.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulator.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulator.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulator.java index 417ca0386..8c64b717d 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulator.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulator.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.accumulator; +package com.gotocompany.dagger.functions.udfs.aggregate.accumulator; import org.apache.flink.table.annotation.DataTypeHint; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulator.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulator.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulator.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulator.java index b80a9fcd5..ac5a0e72d 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulator.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulator.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.accumulator; +package com.gotocompany.dagger.functions.udfs.aggregate.accumulator; import org.apache.flink.table.annotation.DataTypeHint; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulator.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulator.java similarity index 90% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulator.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulator.java index 4bca38c56..5ff15cf85 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulator.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulator.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate.accumulator; +package com.gotocompany.dagger.functions.udfs.aggregate.accumulator; -import io.odpf.dagger.functions.udfs.aggregate.feast.FeatureUtils; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.FeatureUtils; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.types.Row; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulator.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulator.java similarity index 90% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulator.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulator.java index db4356c72..59fa55d1f 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulator.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulator.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.aggregate.accumulator; +package com.gotocompany.dagger.functions.udfs.aggregate.accumulator; -import io.odpf.dagger.functions.udfs.aggregate.feast.FeatureUtils; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.FeatureUtils; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; import org.apache.flink.api.java.tuple.Tuple3; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.types.Row; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulator.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulator.java similarity index 94% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulator.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulator.java index e0bd014ad..f82eb2272 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulator.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulator.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.accumulator; +package com.gotocompany.dagger.functions.udfs.aggregate.accumulator; import org.apache.commons.math3.stat.descriptive.rank.Percentile; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/FeatureUtils.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/FeatureUtils.java similarity index 87% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/FeatureUtils.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/FeatureUtils.java index 9213d4b03..63d56f128 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/FeatureUtils.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/FeatureUtils.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast; +package com.gotocompany.dagger.functions.udfs.aggregate.feast; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueTransformer; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueTransformerFactory; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueTransformer; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueTransformerFactory; import org.apache.flink.types.Row; import java.util.ArrayList; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformer.java similarity index 81% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformer.java index 5db2653e3..770e36750 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformer.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import java.math.BigDecimal; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.DoubleType; +import static com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.DoubleType; /** * The Big decimal value transformer. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformer.java similarity index 77% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformer.java index 1d669d76c..543ce35ad 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformer.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.BooleanType; +import static com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.BooleanType; /** * The Boolean value transformer. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformer.java similarity index 74% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformer.java index 0f92c82f0..d660d32ba 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformer.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import com.google.protobuf.ByteString; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.ByteType; +import static com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.ByteType; /** * The Byte value transformer. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformer.java similarity index 77% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformer.java index fb62f1bc7..49d998b7e 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformer.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.DoubleType; +import static com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.DoubleType; /** * The Double value transformer. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformer.java similarity index 77% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformer.java index d6f07988a..76da11c4a 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformer.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.FloatType; +import static com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.FloatType; /** * The Float value transformer. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformer.java similarity index 77% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformer.java index b2fef08dc..9af0022f5 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformer.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.IntegerType; +import static com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.IntegerType; /** * The Integer value transformer. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformer.java similarity index 77% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformer.java index 56d2a680e..78fafa53b 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformer.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.LongType; +import static com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.LongType; /** * The Long value transformer. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformer.java similarity index 74% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformer.java index 3d35908a3..80d07efc2 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformer.java @@ -1,10 +1,9 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; +import com.gotocompany.dagger.functions.common.Constants; import org.apache.commons.lang3.NotImplementedException; import org.apache.flink.types.Row; -import static io.odpf.dagger.functions.common.Constants.NUMBER_OF_DATA_TYPES_IN_FEATURE_ROW; - /** * The Null value transformer. */ @@ -27,6 +26,6 @@ public Integer getIndex() { @Override public Row transform(Object value) { - return new Row(NUMBER_OF_DATA_TYPES_IN_FEATURE_ROW); + return new Row(Constants.NUMBER_OF_DATA_TYPES_IN_FEATURE_ROW); } } diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformer.java similarity index 76% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformer.java index 56e7066df..987f61c0a 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformer.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.StringType; +import static com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.StringType; /** * The String value transformer. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformer.java similarity index 77% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformer.java index a6ec25452..c7324d3c5 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformer.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import org.apache.flink.types.Row; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.TimestampType; +import static com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.TimestampType; /** * The Timestamp value transformer. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ValueEnum.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ValueEnum.java similarity index 91% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ValueEnum.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ValueEnum.java index fd70e4b7c..82f648948 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ValueEnum.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ValueEnum.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; /** * The enum Value. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ValueTransformer.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ValueTransformer.java similarity index 81% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ValueTransformer.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ValueTransformer.java index 7e5c42834..aeb641818 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ValueTransformer.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ValueTransformer.java @@ -1,9 +1,8 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; +import com.gotocompany.dagger.functions.common.Constants; import org.apache.flink.types.Row; -import static io.odpf.dagger.functions.common.Constants.NUMBER_OF_DATA_TYPES_IN_FEATURE_ROW; - /** * The interface Value transformer. */ @@ -49,7 +48,7 @@ default Object getValue(Object value) { * @return the row */ default Row transform(Object value) { - Row row = new Row(NUMBER_OF_DATA_TYPES_IN_FEATURE_ROW); + Row row = new Row(Constants.NUMBER_OF_DATA_TYPES_IN_FEATURE_ROW); row.setField(getIndex(), getValue(value)); return row; } diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ValueTransformerFactory.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ValueTransformerFactory.java similarity index 91% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ValueTransformerFactory.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ValueTransformerFactory.java index f8bb3510c..cda3d798f 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ValueTransformerFactory.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ValueTransformerFactory.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import java.util.Arrays; import java.util.List; diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/factories/FunctionFactory.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/factories/FunctionFactory.java new file mode 100644 index 000000000..fdd09a461 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/factories/FunctionFactory.java @@ -0,0 +1,162 @@ +package com.gotocompany.dagger.functions.udfs.factories; + +import com.gotocompany.dagger.functions.common.Constants; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DartDataStore; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DartDataStoreClientProvider; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DefaultDartDataStore; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; + +import com.google.gson.Gson; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.udfs.AggregateUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.TableUdf; +import com.gotocompany.dagger.common.udfs.UdfFactory; +import com.gotocompany.dagger.functions.udfs.aggregate.CollectArray; +import com.gotocompany.dagger.functions.udfs.aggregate.DistinctCount; +import com.gotocompany.dagger.functions.udfs.aggregate.Features; +import com.gotocompany.dagger.functions.udfs.aggregate.FeaturesWithType; +import com.gotocompany.dagger.functions.udfs.aggregate.PercentileAggregator; +import com.gotocompany.dagger.functions.udfs.scalar.ArrayAggregate; +import com.gotocompany.dagger.functions.udfs.scalar.ArrayOperate; +import com.gotocompany.dagger.functions.udfs.scalar.ByteToString; +import com.gotocompany.dagger.functions.udfs.scalar.CondEq; +import com.gotocompany.dagger.functions.udfs.scalar.DartContains; +import com.gotocompany.dagger.functions.udfs.scalar.DartGet; +import com.gotocompany.dagger.functions.udfs.scalar.Distance; +import com.gotocompany.dagger.functions.udfs.scalar.ElementAt; +import com.gotocompany.dagger.functions.udfs.scalar.EndOfMonth; +import com.gotocompany.dagger.functions.udfs.scalar.EndOfWeek; +import com.gotocompany.dagger.functions.udfs.scalar.ExponentialMovingAverage; +import com.gotocompany.dagger.functions.udfs.scalar.Filters; +import com.gotocompany.dagger.functions.udfs.scalar.FormatTimeInZone; +import com.gotocompany.dagger.functions.udfs.scalar.GeoHash; +import com.gotocompany.dagger.functions.udfs.scalar.LinearTrend; +import com.gotocompany.dagger.functions.udfs.scalar.ListContains; +import com.gotocompany.dagger.functions.udfs.scalar.MapGet; +import com.gotocompany.dagger.functions.udfs.scalar.S2AreaInKm2; +import com.gotocompany.dagger.functions.udfs.scalar.S2Id; +import com.gotocompany.dagger.functions.udfs.scalar.SelectFields; +import com.gotocompany.dagger.functions.udfs.scalar.SingleFeatureWithType; +import com.gotocompany.dagger.functions.udfs.scalar.Split; +import com.gotocompany.dagger.functions.udfs.scalar.StartOfMonth; +import com.gotocompany.dagger.functions.udfs.scalar.StartOfWeek; +import com.gotocompany.dagger.functions.udfs.scalar.TimeInDate; +import com.gotocompany.dagger.functions.udfs.scalar.TimestampFromUnix; +import com.gotocompany.dagger.functions.udfs.scalar.JsonQuery; +import com.gotocompany.dagger.functions.udfs.scalar.JsonUpdate; +import com.gotocompany.dagger.functions.udfs.scalar.JsonDelete; +import com.gotocompany.dagger.functions.udfs.table.HistogramBucket; +import com.gotocompany.dagger.functions.udfs.table.OutlierMad; + +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; + +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_PROTO_CLASS; +import static com.gotocompany.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_TABLE; + +/** + * The factory class for all the udf. + */ +public class FunctionFactory extends UdfFactory { + + private static final Gson GSON = new Gson(); + + private final StencilClientOrchestrator stencilClientOrchestrator; + + + /** + * Instantiates a new Function factory. + * + * @param streamTableEnvironment the stream table environment + * @param configuration the configuration + */ + public FunctionFactory(StreamTableEnvironment streamTableEnvironment, Configuration configuration) { + super(streamTableEnvironment, configuration); + stencilClientOrchestrator = new StencilClientOrchestrator(configuration); + } + + @Override + public HashSet getScalarUdfs() { + HashSet scalarUdfs = new HashSet<>(); + DartDataStore dartDataSource = getDartDataSource(); + scalarUdfs.add(new DartContains(dartDataSource)); + scalarUdfs.add(new DartGet(dartDataSource)); + scalarUdfs.add(new Distance()); + scalarUdfs.add(new ElementAt(getProtosInInputStreams(), stencilClientOrchestrator)); + scalarUdfs.add(new EndOfMonth()); + scalarUdfs.add(new EndOfWeek()); + scalarUdfs.add(new ExponentialMovingAverage()); + scalarUdfs.add(new FormatTimeInZone()); + scalarUdfs.add(new GeoHash()); + scalarUdfs.add(new LinearTrend()); + scalarUdfs.add(new ListContains()); + scalarUdfs.add(new MapGet()); + scalarUdfs.add(new S2AreaInKm2()); + scalarUdfs.add(new S2Id()); + scalarUdfs.add(new SingleFeatureWithType()); + scalarUdfs.add(new Split()); + scalarUdfs.add(new StartOfMonth()); + scalarUdfs.add(new StartOfWeek()); + scalarUdfs.add(new TimeInDate()); + scalarUdfs.add(new TimestampFromUnix()); + scalarUdfs.add(new CondEq()); + scalarUdfs.add(new Filters(stencilClientOrchestrator)); + scalarUdfs.add(new SelectFields(stencilClientOrchestrator)); + scalarUdfs.add(new ArrayAggregate()); + scalarUdfs.add(new ArrayOperate()); + scalarUdfs.add(new ByteToString()); + scalarUdfs.add(new JsonQuery()); + scalarUdfs.add(new JsonUpdate()); + scalarUdfs.add(new JsonDelete()); + return scalarUdfs; + } + + @Override + public HashSet getTableUdfs() { + HashSet tableUdfs = new HashSet<>(); + tableUdfs.add(new HistogramBucket()); + tableUdfs.add(new OutlierMad()); + return tableUdfs; + } + + @Override + public HashSet getAggregateUdfs() { + HashSet aggregateUdfs = new HashSet<>(); + aggregateUdfs.add(new CollectArray()); + aggregateUdfs.add(new DistinctCount()); + aggregateUdfs.add(new Features()); + aggregateUdfs.add(new FeaturesWithType()); + aggregateUdfs.add(new PercentileAggregator()); + return aggregateUdfs; + } + + private DartDataStore getDartDataSource() { + String projectID = getConfiguration().getString(Constants.UDF_DART_PROJECT_ID_KEY, Constants.UDF_DART_PROJECT_ID_DEFAULT); + String bucketID = getConfiguration().getString(Constants.UDF_DART_BUCKET_ID_KEY, Constants.UDF_DART_BUCKET_ID_DEFAULT); + + String udfStoreProvider = getConfiguration().getString(Constants.UDF_STORE_PROVIDER_KEY); + if (udfStoreProvider == null) { + udfStoreProvider = Constants.UDF_STORE_PROVIDER_GCS; + projectID = getConfiguration().getString(Constants.UDF_DART_GCS_PROJECT_ID_KEY, Constants.UDF_DART_GCS_PROJECT_ID_DEFAULT); + bucketID = getConfiguration().getString(Constants.UDF_DART_GCS_BUCKET_ID_KEY, Constants.UDF_DART_GCS_BUCKET_ID_DEFAULT); + } + + return new DefaultDartDataStore(new DartDataStoreClientProvider(udfStoreProvider, projectID, getConfiguration()), bucketID, getConfiguration()); + } + + private LinkedHashMap getProtosInInputStreams() { + LinkedHashMap protoClassForTable = new LinkedHashMap<>(); + String jsonArrayString = getConfiguration().getString(INPUT_STREAMS, ""); + Map[] streamsConfig = GSON.fromJson(jsonArrayString, Map[].class); + for (Map streamConfig : streamsConfig) { + String protoClassName = streamConfig.getOrDefault(STREAM_INPUT_SCHEMA_PROTO_CLASS, ""); + String tableName = streamConfig.getOrDefault(STREAM_INPUT_SCHEMA_TABLE, ""); + protoClassForTable.put(tableName, protoClassName); + } + return protoClassForTable; + } +} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/PythonUdfConfig.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfConfig.java similarity index 71% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/PythonUdfConfig.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfConfig.java index 2f33f75c8..61205aaff 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/PythonUdfConfig.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfConfig.java @@ -1,13 +1,12 @@ -package io.odpf.dagger.functions.udfs.python; +package com.gotocompany.dagger.functions.udfs.python; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.annotations.SerializedName; -import io.odpf.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.functions.common.Constants; import lombok.Getter; -import static io.odpf.dagger.functions.common.Constants.*; - /** * The type Python udf config. */ @@ -17,23 +16,23 @@ public class PythonUdfConfig { .setPrettyPrinting() .create(); - @SerializedName(PYTHON_FILES_KEY) + @SerializedName(Constants.PYTHON_FILES_KEY) private String pythonFiles; - @SerializedName(PYTHON_REQUIREMENTS_KEY) + @SerializedName(Constants.PYTHON_REQUIREMENTS_KEY) @Getter private String pythonRequirements; - @SerializedName(PYTHON_ARCHIVES_KEY) + @SerializedName(Constants.PYTHON_ARCHIVES_KEY) private String pythonArchives; - @SerializedName(PYTHON_FN_EXECUTION_ARROW_BATCH_SIZE_KEY) + @SerializedName(Constants.PYTHON_FN_EXECUTION_ARROW_BATCH_SIZE_KEY) private Integer pythonArrowBatchSize; - @SerializedName(PYTHON_FN_EXECUTION_BUNDLE_SIZE_KEY) + @SerializedName(Constants.PYTHON_FN_EXECUTION_BUNDLE_SIZE_KEY) private Integer pythonBundleSize; - @SerializedName(PYTHON_FN_EXECUTION_BUNDLE_TIME_KEY) + @SerializedName(Constants.PYTHON_FN_EXECUTION_BUNDLE_TIME_KEY) private Long pythonBundleTime; /** @@ -67,7 +66,7 @@ public String getPythonArchives() { */ public int getPythonArrowBatchSize() { if (pythonArrowBatchSize == null) { - return PYTHON_FN_EXECUTION_ARROW_BATCH_SIZE_DEFAULT; + return Constants.PYTHON_FN_EXECUTION_ARROW_BATCH_SIZE_DEFAULT; } return pythonArrowBatchSize; } @@ -79,7 +78,7 @@ public int getPythonArrowBatchSize() { */ public int getPythonBundleSize() { if (pythonBundleSize == null) { - return PYTHON_FN_EXECUTION_BUNDLE_SIZE_DEFAULT; + return Constants.PYTHON_FN_EXECUTION_BUNDLE_SIZE_DEFAULT; } return pythonBundleSize; } @@ -91,7 +90,7 @@ public int getPythonBundleSize() { */ public long getPythonBundleTime() { if (pythonBundleTime == null) { - return PYTHON_FN_EXECUTION_BUNDLE_TIME_DEFAULT; + return Constants.PYTHON_FN_EXECUTION_BUNDLE_TIME_DEFAULT; } return pythonBundleTime; } @@ -103,7 +102,7 @@ public long getPythonBundleTime() { * @return the python udf config */ public static PythonUdfConfig parse(Configuration configuration) { - String jsonString = configuration.getString(PYTHON_UDF_CONFIG, ""); + String jsonString = configuration.getString(Constants.PYTHON_UDF_CONFIG, ""); return GSON.fromJson(jsonString, PythonUdfConfig.class); } diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/PythonUdfManager.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfManager.java similarity index 85% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/PythonUdfManager.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfManager.java index 59fb99d2a..bbd44dae7 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/PythonUdfManager.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfManager.java @@ -1,8 +1,9 @@ -package io.odpf.dagger.functions.udfs.python; +package com.gotocompany.dagger.functions.udfs.python; -import io.odpf.dagger.functions.exceptions.PythonFilesEmptyException; -import io.odpf.dagger.functions.udfs.python.file.type.FileType; -import io.odpf.dagger.functions.udfs.python.file.type.FileTypeFactory; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.functions.exceptions.PythonFilesEmptyException; +import com.gotocompany.dagger.functions.udfs.python.file.type.FileType; +import com.gotocompany.dagger.functions.udfs.python.file.type.FileTypeFactory; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import java.io.IOException; @@ -16,6 +17,7 @@ public class PythonUdfManager { private StreamTableEnvironment tableEnvironment; private PythonUdfConfig pythonUdfConfig; + private final Configuration configuration; /** * Instantiates a new Python udf manager. @@ -23,9 +25,10 @@ public class PythonUdfManager { * @param tableEnvironment the table environment * @param pythonUdfConfig the python udf config */ - public PythonUdfManager(StreamTableEnvironment tableEnvironment, PythonUdfConfig pythonUdfConfig) { + public PythonUdfManager(StreamTableEnvironment tableEnvironment, PythonUdfConfig pythonUdfConfig, Configuration configuration) { this.tableEnvironment = tableEnvironment; this.pythonUdfConfig = pythonUdfConfig; + this.configuration = configuration; } /** @@ -42,7 +45,7 @@ public void registerPythonFunctions() throws IOException { } for (String pythonFile : pythonFiles) { - FileType fileType = FileTypeFactory.getFileType(pythonFile); + FileType fileType = FileTypeFactory.getFileType(pythonFile, configuration); List fileNames = fileType.getFileNames(); List sqlQueries = createQuery(fileNames); executeSql(sqlQueries); diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/FileSource.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/FileSource.java similarity index 77% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/FileSource.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/FileSource.java index 915f81d32..ac00ac5a8 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/FileSource.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/FileSource.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.python.file.source; +package com.gotocompany.dagger.functions.udfs.python.file.source; import java.io.IOException; diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/FileSourceFactory.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/FileSourceFactory.java new file mode 100644 index 000000000..eea9f25e0 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/FileSourceFactory.java @@ -0,0 +1,44 @@ +package com.gotocompany.dagger.functions.udfs.python.file.source; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.functions.common.Constants; +import com.gotocompany.dagger.functions.udfs.python.file.source.cos.CosFileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.gcs.GcsFileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.local.LocalFileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.oss.OssFileSource; + +/** + * The type File source factory. + */ +public class FileSourceFactory { + + /** + * Gets file source. + * + * @param pythonFile the python file + * @return the file source + */ + public static FileSource getFileSource(String pythonFile, Configuration configuration) { + if ("GS".equals(getFileSourcePrefix(pythonFile))) { + return new GcsFileSource(pythonFile); + } else if ("OSS".equals(getFileSourcePrefix(pythonFile))) { + return new OssFileSource( + pythonFile, + configuration.getString(Constants.OSS_ENDPOINT, Constants.DEFAULT_OSS_ENDPOINT) + ); + } else if ("COSN".equals(getFileSourcePrefix(pythonFile))) { + return new CosFileSource( + pythonFile, + configuration.getBoolean(Constants.ENABLE_TKE_OIDC_PROVIDER, false), + configuration.getString(Constants.COS_REGION, Constants.DEFAULT_COS_REGION) + ); + } else { + return new LocalFileSource(pythonFile); + } + } + + private static String getFileSourcePrefix(String pythonFile) { + String[] files = pythonFile.split("://"); + return files[0].toUpperCase(); + } +} diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileClient.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileClient.java new file mode 100644 index 000000000..6ed4a9d07 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileClient.java @@ -0,0 +1,42 @@ +package com.gotocompany.dagger.functions.udfs.python.file.source.cos; + +import com.gotocompany.dagger.functions.common.CosLibClient; +import com.qcloud.cos.COSClient; +import com.qcloud.cos.model.COSObject; +import com.qcloud.cos.model.COSObjectInputStream; +import com.qcloud.cos.utils.IOUtils; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +public class CosFileClient { + + private final boolean enableTkeOidcProvider; + private final String cosRegion; + + public CosFileClient(boolean enableTkeOidcProvider, String cosRegion) { + this.enableTkeOidcProvider = enableTkeOidcProvider; + this.cosRegion = cosRegion; + } + + /** + * Get file byte [ ]. + * + * @param pythonFile the python file + * @return the byte [ ] + */ + public byte[] getFile(String pythonFile) throws IOException { + List file = Arrays.asList(pythonFile.replace("cosn://", "").split("/")); + + String bucketName = file.get(0); + String objectName = file.stream().skip(1).collect(Collectors.joining("/")); + + COSClient cosClient = CosLibClient.getInstance().get(enableTkeOidcProvider, cosRegion); + COSObject cosObject = cosClient.getObject(bucketName, objectName); + try (COSObjectInputStream inputStream = cosObject.getObjectContent()) { + return IOUtils.toByteArray(inputStream); + } + } +} diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileSource.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileSource.java new file mode 100644 index 000000000..cbefd93b9 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileSource.java @@ -0,0 +1,54 @@ +package com.gotocompany.dagger.functions.udfs.python.file.source.cos; + +import com.gotocompany.dagger.functions.udfs.python.file.source.FileSource; + +import java.io.IOException; + +public class CosFileSource implements FileSource { + + private CosFileClient cosFileClient; + private final String pythonFile; + private final String cosRegion; + private final boolean enableTkeOidcProvider; + + /** + * Instantiates a new Cos file source. + * + * @param pythonFile the python file + */ + public CosFileSource(String pythonFile, boolean enableTkeOidcProvider, String cosRegion) { + this.pythonFile = pythonFile; + this.cosRegion = cosRegion; + this.enableTkeOidcProvider = enableTkeOidcProvider; + } + + /** + * TestONLY + * Instantiates a new Cos file source. + * + * @param pythonFile the python file + */ + public CosFileSource(String pythonFile, CosFileClient cosFileClient, boolean enableTkeOidcProvider, String cosRegion) { + this.pythonFile = pythonFile; + this.cosFileClient = cosFileClient; + this.cosRegion = cosRegion; + this.enableTkeOidcProvider = enableTkeOidcProvider; + } + + @Override + public byte[] getObjectFile() throws IOException { + return getCosClient().getFile(pythonFile); + } + + /** + * Gets cos client. + * + * @return the cos client + */ + private CosFileClient getCosClient() { + if (this.cosFileClient == null) { + this.cosFileClient = new CosFileClient(enableTkeOidcProvider, cosRegion); + } + return this.cosFileClient; + } +} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsClient.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsClient.java similarity index 94% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsClient.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsClient.java index e3bd69a3f..a3b85873a 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsClient.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsClient.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.python.file.source.gcs; +package com.gotocompany.dagger.functions.udfs.python.file.source.gcs; import com.google.cloud.storage.Blob; import com.google.cloud.storage.BlobId; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsFileSource.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsFileSource.java similarity index 87% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsFileSource.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsFileSource.java index d7b7a5bef..2eba02fea 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsFileSource.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsFileSource.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.python.file.source.gcs; +package com.gotocompany.dagger.functions.udfs.python.file.source.gcs; -import io.odpf.dagger.functions.udfs.python.file.source.FileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.FileSource; /** diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/local/LocalFileSource.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/local/LocalFileSource.java similarity index 78% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/local/LocalFileSource.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/local/LocalFileSource.java index 3f3aff624..649eda21c 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/local/LocalFileSource.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/local/LocalFileSource.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.python.file.source.local; +package com.gotocompany.dagger.functions.udfs.python.file.source.local; -import io.odpf.dagger.functions.udfs.python.file.source.FileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.FileSource; import java.io.IOException; import java.nio.file.Files; diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssClient.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssClient.java new file mode 100644 index 000000000..9a89f6c4f --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssClient.java @@ -0,0 +1,57 @@ +package com.gotocompany.dagger.functions.udfs.python.file.source.oss; + +import com.aliyun.core.utils.IOUtils; +import com.aliyun.oss.OSS; +import com.aliyun.oss.OSSClientBuilder; +import com.aliyun.oss.common.auth.CredentialsProviderFactory; +import com.aliyun.oss.model.OSSObject; +import com.aliyuncs.exceptions.ClientException; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +public class OssClient { + private final OSS libOssClient; + + /** + * Instantiates a new Oss client. + */ + public OssClient(String ossEndpoint) { + try { + libOssClient = new OSSClientBuilder().build(ossEndpoint, CredentialsProviderFactory.newEnvironmentVariableCredentialsProvider()); + } catch (ClientException e) { + throw new RuntimeException("failed to initialise oss client", e); + } + } + + /** + * Instantiates a new OSS client. + * This constructor used for unit test purposes. + * + * @param libOssClient the storage + */ + public OssClient(OSS libOssClient) { + this.libOssClient = libOssClient; + } + + /** + * Get file byte [ ]. + * + * @param pythonFile the python file + * @return the byte [ ] + */ + public byte[] getFile(String pythonFile) throws IOException { + List file = Arrays.asList(pythonFile.replace("oss://", "").split("/")); + + String bucketName = file.get(0); + String objectName = file.stream().skip(1).collect(Collectors.joining("/")); + + OSSObject ossObject = libOssClient.getObject(bucketName, objectName); + try (InputStream inputStream = ossObject.getObjectContent()) { + return IOUtils.toByteArray(inputStream); + } + } +} diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssFileSource.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssFileSource.java new file mode 100644 index 000000000..4950dcb03 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssFileSource.java @@ -0,0 +1,50 @@ +package com.gotocompany.dagger.functions.udfs.python.file.source.oss; + +import com.gotocompany.dagger.functions.udfs.python.file.source.FileSource; + +import java.io.IOException; + +public class OssFileSource implements FileSource { + private OssClient ossClient; + private final String pythonFile; + private final String ossEndpoint; + + /** + * Instantiates a new Oss file source. + * + * @param pythonFile the python file + */ + public OssFileSource(String pythonFile, String ossEndpoint) { + this.pythonFile = pythonFile; + this.ossEndpoint = ossEndpoint; + } + + /** + * TestOnly + * Instantiates a new Oss file source. + * + * @param pythonFile the python file + */ + public OssFileSource(String pythonFile, OssClient ossClient, String ossEndpoint) { + this.pythonFile = pythonFile; + this.ossClient = ossClient; + this.ossEndpoint = ossEndpoint; + } + + @Override + public byte[] getObjectFile() throws IOException { + return getOssClient().getFile(pythonFile); + } + + /** + * Gets oss client. + * + * @return the oss client + */ + private OssClient getOssClient() { + if (this.ossClient == null) { + this.ossClient = new OssClient(this.ossEndpoint); + } + return this.ossClient; + } +} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/FileType.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/FileType.java similarity index 79% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/FileType.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/FileType.java index 1123b42f1..cd9139b77 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/FileType.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/FileType.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.python.file.type; +package com.gotocompany.dagger.functions.udfs.python.file.type; import java.io.IOException; import java.util.List; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/FileTypeFactory.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/FileTypeFactory.java similarity index 61% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/FileTypeFactory.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/FileTypeFactory.java index 83543ad19..e3b73c693 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/FileTypeFactory.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/FileTypeFactory.java @@ -1,8 +1,9 @@ -package io.odpf.dagger.functions.udfs.python.file.type; +package com.gotocompany.dagger.functions.udfs.python.file.type; -import io.odpf.dagger.functions.exceptions.PythonFilesFormatException; -import io.odpf.dagger.functions.udfs.python.file.source.FileSource; -import io.odpf.dagger.functions.udfs.python.file.source.FileSourceFactory; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.functions.exceptions.PythonFilesFormatException; +import com.gotocompany.dagger.functions.udfs.python.file.source.FileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.FileSourceFactory; /** * The type File type factory. @@ -15,8 +16,8 @@ public class FileTypeFactory { * @param pythonFile the python file * @return the file type */ - public static FileType getFileType(String pythonFile) { - FileSource fileSource = FileSourceFactory.getFileSource(pythonFile); + public static FileType getFileType(String pythonFile, Configuration configuration) { + FileSource fileSource = FileSourceFactory.getFileSource(pythonFile, configuration); switch (getFileTypeFormat(pythonFile)) { case "PY": return new PythonFileType(pythonFile); diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/PythonFileType.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/PythonFileType.java similarity index 82% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/PythonFileType.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/PythonFileType.java index 2bc993e04..fd77de76f 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/PythonFileType.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/PythonFileType.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.python.file.type; +package com.gotocompany.dagger.functions.udfs.python.file.type; -import io.odpf.dagger.functions.exceptions.PythonFilesEmptyException; +import com.gotocompany.dagger.functions.exceptions.PythonFilesEmptyException; import java.util.Collections; import java.util.List; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/ZipFileType.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/ZipFileType.java similarity index 89% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/ZipFileType.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/ZipFileType.java index 31fc00cd0..259b0fca4 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/type/ZipFileType.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/python/file/type/ZipFileType.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.python.file.type; +package com.gotocompany.dagger.functions.udfs.python.file.type; -import io.odpf.dagger.functions.udfs.python.file.source.FileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.FileSource; import java.io.ByteArrayInputStream; import java.io.IOException; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ArrayAggregate.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayAggregate.java similarity index 89% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ArrayAggregate.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayAggregate.java index 36a619d13..79fe4603e 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ArrayAggregate.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayAggregate.java @@ -1,5 +1,9 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.LongbowArrayType; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression.AggregationExpression; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.processors.ArrayAggregateProcessor; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.processors.ArrayProcessor; import org.apache.flink.api.java.typeutils.GenericTypeInfo; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.catalog.DataTypeFactory; @@ -15,11 +19,7 @@ import org.apache.flink.table.types.inference.TypeInference; import org.apache.flink.table.types.inference.TypeStrategy; -import io.odpf.dagger.common.udfs.ScalarUdf; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.LongbowArrayType; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.expression.AggregationExpression; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.processors.ArrayAggregateProcessor; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.processors.ArrayProcessor; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.io.Serializable; import java.util.Arrays; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ArrayOperate.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayOperate.java similarity index 89% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ArrayOperate.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayOperate.java index 8fe3b123d..7bc8fd78e 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ArrayOperate.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayOperate.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.LongbowArrayType; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.expression.OperationExpression; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.processors.ArrayOperateProcessor; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.processors.ArrayProcessor; +import com.gotocompany.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.LongbowArrayType; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression.OperationExpression; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.processors.ArrayOperateProcessor; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.processors.ArrayProcessor; import org.apache.flink.api.java.typeutils.GenericTypeInfo; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.catalog.DataTypeFactory; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ByteToString.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ByteToString.java similarity index 95% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ByteToString.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ByteToString.java index b552a5021..5d68d577b 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ByteToString.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ByteToString.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import com.google.protobuf.ByteString; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.catalog.DataTypeFactory; import org.apache.flink.table.functions.FunctionDefinition; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/CondEq.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/CondEq.java similarity index 87% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/CondEq.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/CondEq.java index 53b1332ed..07532167a 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/CondEq.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/CondEq.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.MessageParser; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.annotation.InputGroup; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.common.udfs.ScalarUdf; -import io.odpf.dagger.functions.udfs.scalar.longbow.MessageParser; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.util.Arrays; import java.util.List; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/DartContains.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/DartContains.java similarity index 59% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/DartContains.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/DartContains.java index ba817562f..f7c41c2a5 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/DartContains.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/DartContains.java @@ -1,22 +1,18 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.common.udfs.ScalarUdf; -import io.odpf.dagger.functions.udfs.scalar.dart.DartAspects; -import io.odpf.dagger.functions.udfs.scalar.dart.store.gcs.GcsDataStore; -import io.odpf.dagger.functions.udfs.scalar.dart.types.SetCache; -import org.apache.flink.table.functions.FunctionContext; +import com.gotocompany.dagger.functions.udfs.scalar.dart.DartAspects; +import com.gotocompany.dagger.functions.udfs.scalar.dart.DartScalarUdf; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DartDataStore; +import com.gotocompany.dagger.functions.udfs.scalar.dart.types.SetCache; import java.util.HashMap; import java.util.Map; -import static io.odpf.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; - /** * The DartContains udf. */ -public class DartContains extends ScalarUdf { - private final GcsDataStore dataStore; +public class DartContains extends DartScalarUdf { + private final DartDataStore dataStore; private final Map setCache; /** @@ -24,32 +20,11 @@ public class DartContains extends ScalarUdf { * * @param dataStore the data store */ - DartContains(GcsDataStore dataStore) { + public DartContains(DartDataStore dataStore) { this.dataStore = dataStore; setCache = new HashMap<>(); } - /** - * With gcs data store dart contains. - * - * @param projectId the project id - * @param bucketId the bucket id - * @return the dart contains - */ - public static DartContains withGcsDataStore(String projectId, String bucketId) { - return new DartContains(new GcsDataStore(projectId, bucketId)); - } - - @Override - public void open(FunctionContext context) throws Exception { - super.open(context); - MeterStatsManager meterStatsManager = new MeterStatsManager(context.getMetricGroup(), true); - meterStatsManager.register(UDF_TELEMETRY_GROUP_KEY, this.getName(), DartAspects.values()); - this.dataStore.setMeterStatsManager(meterStatsManager); - this.dataStore.setGaugeStatsManager(getGaugeStatsManager()); - - } - /** * To check if a data point in the message is present in the Redis collection. * @@ -108,18 +83,17 @@ public boolean eval(String listName, String field, String regex, Integer refresh private SetCache getListData(String listName, String field, int refreshRateInHours) { if (setCache.isEmpty() || !setCache.containsKey(listName) || setCache.get(listName).hasExpired(refreshRateInHours) || setCache.get(listName).isEmpty()) { - setCache.put(listName, dataStore.getSet(listName)); - dataStore.getMeterStatsManager().markEvent(DartAspects.DART_GCS_FETCH_SUCCESS); + setCache.put(listName, dataStore.getSet(listName, getMeterStatsManager(), getGaugeStatsManager())); + getMeterStatsManager().markEvent(DartAspects.DART_GCS_FETCH_SUCCESS); } return setCache.get(listName); } private void updateMetrics(boolean isPresent) { if (isPresent) { - dataStore.getMeterStatsManager().markEvent(DartAspects.DART_CACHE_HIT); + getMeterStatsManager().markEvent(DartAspects.DART_CACHE_HIT); } else { - dataStore.getMeterStatsManager().markEvent(DartAspects.DART_CACHE_MISS); + getMeterStatsManager().markEvent(DartAspects.DART_CACHE_MISS); } } - } diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/DartGet.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/DartGet.java new file mode 100644 index 000000000..8242fcfeb --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/DartGet.java @@ -0,0 +1,65 @@ +package com.gotocompany.dagger.functions.udfs.scalar; + +import com.gotocompany.dagger.functions.exceptions.KeyDoesNotExistException; +import com.gotocompany.dagger.functions.udfs.scalar.dart.DartAspects; +import com.gotocompany.dagger.functions.udfs.scalar.dart.DartScalarUdf; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DartDataStore; +import com.gotocompany.dagger.functions.udfs.scalar.dart.types.MapCache; + +import java.util.HashMap; +import java.util.Map; + +/** + * The DartGet udf. + */ +public class DartGet extends DartScalarUdf { + private final DartDataStore dataStore; + private final Map cache; + + /** + * Instantiates a new Dart get. + * + * @param dataStore the data store + */ + public DartGet(DartDataStore dataStore) { + this.dataStore = dataStore; + cache = new HashMap<>(); + } + + /** + * To fetch a corresponding value in a collection given a key from data point. + * + * @param collectionName the collection name + * @param key the key + * @param refreshRateInHours ttl + * @return the value in string + * @author gaurav.s + * @team DE + */ + public String eval(String collectionName, String key, Integer refreshRateInHours) { + if (cache.isEmpty() || !cache.containsKey(collectionName) || cache.get(collectionName).hasExpired(refreshRateInHours) || cache.get(collectionName).isEmpty()) { + cache.put(collectionName, dataStore.getMap(collectionName, getMeterStatsManager(), getGaugeStatsManager())); + getMeterStatsManager().markEvent(DartAspects.DART_GCS_FETCH_SUCCESS); + } + getMeterStatsManager().markEvent(DartAspects.DART_CACHE_HIT); + return cache.get(collectionName).get(key); + } + + /** + * Corresponding value in a GCS bucket given a key from data point. + * + * @param collectionName the collection name + * @param key the key + * @param refreshRateInHours the refresh rate in hours + * @param defaultValue the default value + * @return the string + */ + public String eval(String collectionName, String key, Integer refreshRateInHours, String defaultValue) { + try { + return eval(collectionName, key, refreshRateInHours); + } catch (KeyDoesNotExistException e) { + getMeterStatsManager().markEvent(DartAspects.DART_CACHE_MISS); + return defaultValue; + } + } +} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/Distance.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/Distance.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/Distance.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/Distance.java index 7c2d233cd..a053d1446 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/Distance.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/Distance.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; /** * The Distance udf. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ElementAt.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ElementAt.java similarity index 96% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ElementAt.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ElementAt.java index d4927f182..3a9125483 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ElementAt.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ElementAt.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.stencil.client.StencilClient; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.udfs.ScalarUdf; -import io.odpf.dagger.functions.udfs.scalar.elementAt.MessageReader; +import com.gotocompany.dagger.functions.udfs.scalar.elementAt.MessageReader; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.catalog.DataTypeFactory; import org.apache.flink.table.functions.FunctionContext; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/EndOfMonth.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfMonth.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/EndOfMonth.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfMonth.java index 20225b9a8..e44950953 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/EndOfMonth.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfMonth.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.util.Calendar; import java.util.Date; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/EndOfWeek.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfWeek.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/EndOfWeek.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfWeek.java index e48c3dd7a..1a83380dc 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/EndOfWeek.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfWeek.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.util.Calendar; import java.util.Date; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ExponentialMovingAverage.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ExponentialMovingAverage.java similarity index 97% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ExponentialMovingAverage.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ExponentialMovingAverage.java index 51535a303..958db2339 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ExponentialMovingAverage.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ExponentialMovingAverage.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import org.apache.flink.table.annotation.DataTypeHint; import java.sql.Timestamp; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/Filters.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/Filters.java similarity index 94% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/Filters.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/Filters.java index b167931cb..b4f6e2bd5 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/Filters.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/Filters.java @@ -1,16 +1,16 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.annotation.InputGroup; import org.apache.flink.table.functions.FunctionContext; -import io.odpf.stencil.client.StencilClient; +import com.gotocompany.stencil.client.StencilClient; import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.util.LinkedList; import java.util.List; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/FormatTimeInZone.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/FormatTimeInZone.java similarity index 88% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/FormatTimeInZone.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/FormatTimeInZone.java index ed702e119..47871e8de 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/FormatTimeInZone.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/FormatTimeInZone.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.sql.Timestamp; import java.text.DateFormat; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/GeoHash.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/GeoHash.java similarity index 83% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/GeoHash.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/GeoHash.java index fbdddaecd..c6109d80c 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/GeoHash.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/GeoHash.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import static com.github.davidmoten.geo.GeoHash.encodeHash; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/JsonDelete.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/JsonDelete.java similarity index 89% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/JsonDelete.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/JsonDelete.java index 433c29f41..6b1bda437 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/JsonDelete.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/JsonDelete.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import com.jayway.jsonpath.Configuration; import com.jayway.jsonpath.JsonPath; import com.jayway.jsonpath.Option; import com.jayway.jsonpath.PathNotFoundException; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import org.apache.flink.table.annotation.DataTypeHint; import java.io.Serializable; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/JsonQuery.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/JsonQuery.java similarity index 93% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/JsonQuery.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/JsonQuery.java index 6157c6286..42f2fbe65 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/JsonQuery.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/JsonQuery.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import com.jayway.jsonpath.Configuration; import com.jayway.jsonpath.JsonPath; import com.jayway.jsonpath.Option; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonInclude; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/JsonUpdate.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/JsonUpdate.java similarity index 91% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/JsonUpdate.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/JsonUpdate.java index f8b915803..c92211e23 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/JsonUpdate.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/JsonUpdate.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import com.jayway.jsonpath.Configuration; import com.jayway.jsonpath.JsonPath; import com.jayway.jsonpath.Option; import com.jayway.jsonpath.PathNotFoundException; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.annotation.InputGroup; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/LinearTrend.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/LinearTrend.java similarity index 97% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/LinearTrend.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/LinearTrend.java index 10ca742f4..b850580f6 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/LinearTrend.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/LinearTrend.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import org.apache.flink.table.annotation.DataTypeHint; import java.sql.Timestamp; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ListContains.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ListContains.java similarity index 83% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ListContains.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ListContains.java index 74da8d641..7b0a188e0 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/ListContains.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/ListContains.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.util.Arrays; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/MapGet.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/MapGet.java similarity index 96% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/MapGet.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/MapGet.java index cf036215f..d0f27031c 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/MapGet.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/MapGet.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.catalog.DataTypeFactory; import org.apache.flink.table.functions.FunctionDefinition; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/S2AreaInKm2.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/S2AreaInKm2.java similarity index 88% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/S2AreaInKm2.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/S2AreaInKm2.java index 0327dcc14..ea6588e32 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/S2AreaInKm2.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/S2AreaInKm2.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import com.google.common.geometry.S2Cell; import com.google.common.geometry.S2CellId; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; /** * The type S2AreaInKm2 udf. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/S2Id.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/S2Id.java similarity index 85% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/S2Id.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/S2Id.java index 78508bb68..68c2d1d1f 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/S2Id.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/S2Id.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import com.google.common.geometry.S2CellId; import com.google.common.geometry.S2LatLng; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; /** * The type S2id udf. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/SelectFields.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/SelectFields.java similarity index 95% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/SelectFields.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/SelectFields.java index 83e13ca53..5a09be319 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/SelectFields.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/SelectFields.java @@ -1,5 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.MessageParser; import org.apache.flink.api.java.typeutils.GenericTypeInfo; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.annotation.InputGroup; @@ -12,14 +13,13 @@ import org.apache.flink.table.types.inference.TypeInference; import org.apache.flink.table.types.inference.TypeStrategy; -import io.odpf.stencil.client.StencilClient; +import com.gotocompany.stencil.client.StencilClient; import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.udfs.ScalarUdf; -import io.odpf.dagger.functions.udfs.scalar.longbow.MessageParser; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.util.ArrayList; import java.util.Arrays; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/SingleFeatureWithType.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/SingleFeatureWithType.java similarity index 70% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/SingleFeatureWithType.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/SingleFeatureWithType.java index 037a6913b..28dfa3011 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/SingleFeatureWithType.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/SingleFeatureWithType.java @@ -1,15 +1,14 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; -import io.odpf.dagger.functions.exceptions.InvalidNumberOfArgumentsException; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.FeatureWithTypeAccumulator; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.functions.common.Constants; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.FeatureWithTypeAccumulator; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.functions.exceptions.InvalidNumberOfArgumentsException; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.annotation.InputGroup; import org.apache.flink.types.Row; -import static io.odpf.dagger.functions.common.Constants.NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR; - /** * The Single feature with type udf. */ @@ -27,10 +26,10 @@ public class SingleFeatureWithType extends ScalarUdf { public @DataTypeHint("RAW") Row[] eval(@DataTypeHint(inputGroup = InputGroup.ANY) Object... objects) { FeatureWithTypeAccumulator featureAccumulator = new FeatureWithTypeAccumulator(); - if (objects.length % NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR != 0) { + if (objects.length % Constants.NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR != 0) { throw new InvalidNumberOfArgumentsException(); } - for (int elementIndex = 0; elementIndex < objects.length; elementIndex += NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR) { + for (int elementIndex = 0; elementIndex < objects.length; elementIndex += Constants.NUMBER_OF_ARGUMENTS_IN_FEATURE_ACCUMULATOR) { featureAccumulator.add(String.valueOf(objects[elementIndex]), objects[elementIndex + 1], ValueEnum.valueOf(String.valueOf(objects[elementIndex + 2]))); } return featureAccumulator.getFeaturesAsRows(); diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/Split.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/Split.java similarity index 90% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/Split.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/Split.java index a30870594..0858ba731 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/Split.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/Split.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/StartOfMonth.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfMonth.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/StartOfMonth.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfMonth.java index 1eeec4c88..8ff9fd7a0 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/StartOfMonth.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfMonth.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.util.Calendar; import java.util.Date; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/StartOfWeek.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfWeek.java similarity index 91% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/StartOfWeek.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfWeek.java index 738242d0b..7e5136171 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/StartOfWeek.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfWeek.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.util.Calendar; import java.util.Date; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/TimeInDate.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/TimeInDate.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/TimeInDate.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/TimeInDate.java index fefbee94a..e491a83a7 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/TimeInDate.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/TimeInDate.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.util.Calendar; import java.util.Date; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/TimestampFromUnix.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/TimestampFromUnix.java similarity index 81% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/TimestampFromUnix.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/TimestampFromUnix.java index 30276d964..5c186a5a6 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/TimestampFromUnix.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/TimestampFromUnix.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; import java.sql.Timestamp; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/DartAspects.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/DartAspects.java similarity index 76% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/DartAspects.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/DartAspects.java index ab0513ecc..2e12ec1b0 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/DartAspects.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/DartAspects.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.functions.udfs.scalar.dart; +package com.gotocompany.dagger.functions.udfs.scalar.dart; -import io.odpf.dagger.common.metrics.aspects.AspectType; -import io.odpf.dagger.common.metrics.aspects.Aspects; +import com.gotocompany.dagger.common.metrics.aspects.AspectType; +import com.gotocompany.dagger.common.metrics.aspects.Aspects; -import static io.odpf.dagger.common.metrics.aspects.AspectType.Gauge; -import static io.odpf.dagger.common.metrics.aspects.AspectType.Metric; +import static com.gotocompany.dagger.common.metrics.aspects.AspectType.Gauge; +import static com.gotocompany.dagger.common.metrics.aspects.AspectType.Metric; /** * The enum Dart aspects. diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/DartScalarUdf.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/DartScalarUdf.java new file mode 100644 index 000000000..df0bc89b1 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/DartScalarUdf.java @@ -0,0 +1,23 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart; + +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.common.udfs.ScalarUdf; +import lombok.Getter; +import lombok.Setter; +import org.apache.flink.table.functions.FunctionContext; + +import static com.gotocompany.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; + +public abstract class DartScalarUdf extends ScalarUdf { + + @Getter + @Setter // For testing purpose only + private MeterStatsManager meterStatsManager; + + @Override + public void open(FunctionContext context) throws Exception { + super.open(context); + meterStatsManager = new MeterStatsManager(context.getMetricGroup(), true); + meterStatsManager.register(UDF_TELEMETRY_GROUP_KEY, this.getName(), DartAspects.values()); + } +} diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStore.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStore.java new file mode 100644 index 000000000..fc1224d84 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStore.java @@ -0,0 +1,27 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart.store; + +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.functions.udfs.scalar.dart.types.MapCache; +import com.gotocompany.dagger.functions.udfs.scalar.dart.types.SetCache; + +/** + * The interface Data store. + */ +public interface DartDataStore { + /** + * Gets set. + * + * @param setName the set name + * @return the set + */ + SetCache getSet(String setName, MeterStatsManager meterStatsManager, GaugeStatsManager gaugeStatsManager); + + /** + * Gets map. + * + * @param mapName the map name + * @return the map + */ + MapCache getMap(String mapName, MeterStatsManager meterStatsManager, GaugeStatsManager gaugeStatsManager); +} diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStoreClient.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStoreClient.java new file mode 100644 index 000000000..f534ab6db --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStoreClient.java @@ -0,0 +1,22 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart.store; + +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.functions.exceptions.BucketDoesNotExistException; +import com.gotocompany.dagger.functions.exceptions.TagDoesNotExistException; + +public interface DartDataStoreClient { + + /** + * If a client could provide implementation to this, use the default data store, else implement DartDataStore along with client implementation. + * + * @param udfName either "DartGet" or "DartContains" + * @param gaugeStatsManager an instrumentation provider + * @param bucketName name of the object storage service bucket + * @param dartName from the bucket, this would be either dart-get/path/to/file.json or dart-contains/path/to/file.json + * @return Content of the file in String format from abc://bucket-name/dart-(get/contains)/path/to/file.json + * @throws TagDoesNotExistException if tag doesn't exist + * @throws BucketDoesNotExistException if bucket doesn't exist + */ + String fetchJsonData(String udfName, GaugeStatsManager gaugeStatsManager, String bucketName, String dartName) + throws TagDoesNotExistException, BucketDoesNotExistException; +} diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStoreClientProvider.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStoreClientProvider.java new file mode 100644 index 000000000..8b6399ac0 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStoreClientProvider.java @@ -0,0 +1,54 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart.store; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.functions.common.Constants; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.cos.CosDartClient; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.gcs.GcsDartClient; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.oss.OssDartClient; + +import java.io.Serializable; + +public class DartDataStoreClientProvider implements Serializable { + private final String udfStoreProvider; + private final String projectID; + private final Configuration configuration; + + // Do not make this final, if so then the implementation of client should be Serializable + private DartDataStoreClient dartDataStoreClient; + + public DartDataStoreClientProvider(String udfStoreProvider, String projectID, Configuration configuration) { + this.udfStoreProvider = udfStoreProvider; + this.projectID = projectID; + this.configuration = configuration; + } + + public DartDataStoreClient getDartDataStoreClient() { + // In a distributed system, we don't intend the client to be serialized and most of the implementations like + // GCP Storage implementation doesn't implement java.io.Serializable interface and you may see the below error + // Caused by: org.apache.flink.api.common.InvalidProgramException: com.google.api.services.storage.Storage@1c666a8f + // is not serializable. The object probably contains or references non serializable fields. + // Caused by: java.io.NotSerializableException: com.google.api.services.storage.Storage + if (dartDataStoreClient != null) { + return dartDataStoreClient; + } + switch (udfStoreProvider) { + case Constants.UDF_STORE_PROVIDER_GCS: + dartDataStoreClient = new GcsDartClient(projectID); + break; + case Constants.UDF_STORE_PROVIDER_OSS: + dartDataStoreClient = new OssDartClient( + configuration.getString(Constants.OSS_ENDPOINT, Constants.DEFAULT_OSS_ENDPOINT) + ); + break; + case Constants.UDF_STORE_PROVIDER_COS: + dartDataStoreClient = new CosDartClient( + configuration.getBoolean(Constants.ENABLE_TKE_OIDC_PROVIDER, false), + configuration.getString(Constants.COS_REGION, Constants.DEFAULT_COS_REGION) + ); + break; + default: + throw new IllegalArgumentException("Unknown UDF Store Provider: " + udfStoreProvider); + } + return dartDataStoreClient; + } +} diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DefaultDartDataStore.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DefaultDartDataStore.java new file mode 100644 index 000000000..2af965108 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DefaultDartDataStore.java @@ -0,0 +1,97 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart.store; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.functions.udfs.scalar.dart.DartAspects; +import com.gotocompany.dagger.functions.udfs.scalar.dart.types.MapCache; +import com.gotocompany.dagger.functions.udfs.scalar.dart.types.SetCache; +import com.gotocompany.dagger.functions.udfs.scalar.DartContains; +import com.gotocompany.dagger.functions.udfs.scalar.DartGet; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.type.TypeReference; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode; + +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * DefaultDartDataStore would be able to fetch the darts from the object storage services. + * pass the relevant client which implements {@link DartDataStoreClient} + */ +public class DefaultDartDataStore implements DartDataStore, Serializable { + + public static final String DART_GET_DIRECTORY = "dart-get/"; + public static final String DART_CONTAINS_DIRECTORY = "dart-contains/"; + + private final DartDataStoreClientProvider clientProvider; + private final String bucketId; + private final Configuration configuration; + + /** + * Instantiates a new data store. + * + * @param clientProvider a {@link DartDataStoreClient} implementation for the respective object storage provider + * @param bucketId the bucket id + */ + public DefaultDartDataStore(DartDataStoreClientProvider clientProvider, String bucketId, Configuration configuration) { + this.clientProvider = clientProvider; + this.bucketId = bucketId; + this.configuration = configuration; + } + + @Override + public SetCache getSet(String setName, MeterStatsManager meterStatsManager, GaugeStatsManager gaugeManager) { + return new SetCache(getSetOfObjects(setName, meterStatsManager, gaugeManager)); + } + + @Override + public MapCache getMap(String mapName, MeterStatsManager meterStatsManager, GaugeStatsManager gaugeManager) { + Map mapOfObjects = getMapOfObjects(mapName, meterStatsManager, gaugeManager); + return new MapCache(mapOfObjects); + } + + private Map getMapOfObjects(String dartName, MeterStatsManager meterManager, GaugeStatsManager gaugeManager) { + String jsonData = clientProvider.getDartDataStoreClient().fetchJsonData( + DartGet.class.getSimpleName(), + gaugeManager, + this.bucketId, + DART_GET_DIRECTORY + dartName); + + ObjectMapper mapper = new ObjectMapper(); + + Map map = null; + try { + map = mapper.readValue(jsonData, Map.class); + } catch (IOException e) { + meterManager.markEvent(DartAspects.DART_GCS_FETCH_FAILURES); + e.printStackTrace(); + } + return map; + } + + private Set getSetOfObjects(String dartName, MeterStatsManager meterManager, GaugeStatsManager gaugeManager) { + String jsonData = clientProvider.getDartDataStoreClient().fetchJsonData(DartContains.class.getSimpleName(), gaugeManager, this.bucketId, DART_CONTAINS_DIRECTORY + dartName); + ObjectMapper mapper = new ObjectMapper(); + try { + ObjectNode node = (ObjectNode) mapper.readTree(jsonData); + JsonNode arrayNode = node.get("data"); + List list = mapper.readValue(arrayNode.traverse(), + new TypeReference>() { + }); + + return new HashSet<>(list); + } catch (Exception e) { + meterManager.markEvent(DartAspects.DART_GCS_FETCH_FAILURES); + e.printStackTrace(); + } + + return new HashSet<>(); + } +} diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/cos/CosDartClient.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/cos/CosDartClient.java new file mode 100644 index 000000000..3890eeefa --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/cos/CosDartClient.java @@ -0,0 +1,48 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart.store.cos; + +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.functions.common.CosLibClient; +import com.gotocompany.dagger.functions.exceptions.TagDoesNotExistException; +import com.gotocompany.dagger.functions.udfs.scalar.dart.DartAspects; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DartDataStoreClient; +import com.qcloud.cos.COSClient; +import com.qcloud.cos.model.COSObject; +import com.qcloud.cos.model.COSObjectInputStream; +import com.qcloud.cos.utils.IOUtils; + +import java.io.IOException; + +import static com.gotocompany.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; + +public class CosDartClient implements DartDataStoreClient { + private static final Double BYTES_TO_KB = 1024.0; + private static final String DART_PATH = "dartpath"; + + private final boolean enableTkeOidcProvider; + private final String cosRegion; + + public CosDartClient(boolean enableTkeOidcProvider, String cosRegion) { + this.enableTkeOidcProvider = enableTkeOidcProvider; + this.cosRegion = cosRegion; + // the credential provider provides short living token. If we have a libCosClient long living object with these + // token or say if we refresh the client object before every time its usage, we're not gaining any benefit in doing that, i.e. having refresh method. + // Additionally, the current usage of the client is to download any resource/artifacts one time when the job starts. + // Create client when using its operation. + } + + public String fetchJsonData(String udfName, GaugeStatsManager gaugeStatsManager, String bucketName, String dartName) { + COSClient cosClient = CosLibClient.getInstance().get(enableTkeOidcProvider, cosRegion); + COSObject cosObject = cosClient.getObject(bucketName, dartName); + String dartJson; + byte[] contentByteArray; + try (COSObjectInputStream inputStream = cosObject.getObjectContent()) { + contentByteArray = IOUtils.toByteArray(inputStream); + dartJson = new String(contentByteArray); + } catch (IOException e) { + throw new TagDoesNotExistException("Could not find the content in cos for + dartName", e); + } + gaugeStatsManager.registerString(UDF_TELEMETRY_GROUP_KEY, udfName, DartAspects.DART_GCS_PATH.getValue(), dartName); + gaugeStatsManager.registerDouble(DART_PATH, dartName, DartAspects.DART_GCS_FILE_SIZE.getValue(), contentByteArray.length / BYTES_TO_KB); + return dartJson; + } +} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/store/gcs/GcsClient.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/gcs/GcsDartClient.java similarity index 68% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/store/gcs/GcsClient.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/gcs/GcsDartClient.java index 208580f05..94a7a6748 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/store/gcs/GcsClient.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/gcs/GcsDartClient.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.dart.store.gcs; +package com.gotocompany.dagger.functions.udfs.scalar.dart.store.gcs; import com.google.cloud.storage.Blob; @@ -6,18 +6,18 @@ import com.google.cloud.storage.Bucket; import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; -import io.odpf.dagger.common.metrics.managers.GaugeStatsManager; -import io.odpf.dagger.functions.exceptions.BucketDoesNotExistException; -import io.odpf.dagger.functions.exceptions.TagDoesNotExistException; +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.functions.exceptions.BucketDoesNotExistException; +import com.gotocompany.dagger.functions.exceptions.TagDoesNotExistException; +import com.gotocompany.dagger.functions.udfs.scalar.dart.DartAspects; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DartDataStoreClient; -import static io.odpf.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; -import static io.odpf.dagger.functions.udfs.scalar.dart.DartAspects.DART_GCS_FILE_SIZE; -import static io.odpf.dagger.functions.udfs.scalar.dart.DartAspects.DART_GCS_PATH; +import static com.gotocompany.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; /** * The type Gcs client. */ -public class GcsClient { +public class GcsDartClient implements DartDataStoreClient { private Storage storage; @@ -29,7 +29,7 @@ public class GcsClient { * * @param projectId the project id */ - public GcsClient(String projectId) { + public GcsDartClient(String projectId) { if (storage == null) { storage = StorageOptions.newBuilder() @@ -62,8 +62,8 @@ public String fetchJsonData(String udfName, GaugeStatsManager gaugeStatsManager, if (blob == null) { throw new TagDoesNotExistException(String.format("Could not find the content in gcs for %s", dartName)); } - gaugeStatsManager.registerString(UDF_TELEMETRY_GROUP_KEY, udfName, DART_GCS_PATH.getValue(), dartName); - gaugeStatsManager.registerDouble(DART_PATH, dartName, DART_GCS_FILE_SIZE.getValue(), blob.getContent().length / BYTES_TO_KB); + gaugeStatsManager.registerString(UDF_TELEMETRY_GROUP_KEY, udfName, DartAspects.DART_GCS_PATH.getValue(), dartName); + gaugeStatsManager.registerDouble(DART_PATH, dartName, DartAspects.DART_GCS_FILE_SIZE.getValue(), blob.getContent().length / BYTES_TO_KB); return new String(blob.getContent()); } diff --git a/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/oss/OssDartClient.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/oss/OssDartClient.java new file mode 100644 index 000000000..beb311112 --- /dev/null +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/oss/OssDartClient.java @@ -0,0 +1,60 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart.store.oss; + +import com.aliyun.core.utils.IOUtils; +import com.aliyun.oss.OSS; +import com.aliyun.oss.OSSClientBuilder; +import com.aliyun.oss.common.auth.CredentialsProviderFactory; +import com.aliyun.oss.model.OSSObject; +import com.aliyuncs.exceptions.ClientException; +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.functions.exceptions.TagDoesNotExistException; +import com.gotocompany.dagger.functions.udfs.scalar.dart.DartAspects; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DartDataStoreClient; + +import java.io.IOException; +import java.io.InputStream; + +import static com.gotocompany.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; + +public class OssDartClient implements DartDataStoreClient { + private static final Double BYTES_TO_KB = 1024.0; + private static final String DART_PATH = "dartpath"; + + private final OSS libOssClient; + + /** + * Instantiates a new Oss client. + */ + public OssDartClient(String ossEndpoint) { + try { + libOssClient = new OSSClientBuilder().build(ossEndpoint, CredentialsProviderFactory.newEnvironmentVariableCredentialsProvider()); + } catch (ClientException e) { + throw new RuntimeException("failed to initialise oss client", e); + } + } + + public String fetchJsonData(String udfName, GaugeStatsManager gaugeStatsManager, String bucketName, String dartName) { + OSSObject ossObject = libOssClient.getObject(bucketName, dartName); + String dartJson; + byte[] contentByteArray; + try (InputStream inputStream = ossObject.getObjectContent()) { + contentByteArray = IOUtils.toByteArray(inputStream); + dartJson = new String(contentByteArray); + } catch (IOException e) { + throw new TagDoesNotExistException("Could not find the content in oss for + dartName", e); + } + gaugeStatsManager.registerString(UDF_TELEMETRY_GROUP_KEY, udfName, DartAspects.DART_GCS_PATH.getValue(), dartName); + gaugeStatsManager.registerDouble(DART_PATH, dartName, DartAspects.DART_GCS_FILE_SIZE.getValue(), contentByteArray.length / BYTES_TO_KB); + return dartJson; + } + + /** + * Instantiates a new OSS client. + * This constructor used for unit test purposes. + * + * @param libOssClient the storage + */ + public OssDartClient(OSS libOssClient) { + this.libOssClient = libOssClient; + } +} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/types/Cache.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/Cache.java similarity index 93% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/types/Cache.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/Cache.java index c4c06adc7..94a9dc37e 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/types/Cache.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/Cache.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.dart.types; +package com.gotocompany.dagger.functions.udfs.scalar.dart.types; import org.apache.commons.lang3.time.DateUtils; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/types/MapCache.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/MapCache.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/types/MapCache.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/MapCache.java index 6a7b9f924..0927dbb7b 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/types/MapCache.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/MapCache.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar.dart.types; +package com.gotocompany.dagger.functions.udfs.scalar.dart.types; -import io.odpf.dagger.functions.exceptions.KeyDoesNotExistException; +import com.gotocompany.dagger.functions.exceptions.KeyDoesNotExistException; import java.io.Serializable; import java.util.Date; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/types/SetCache.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/SetCache.java similarity index 96% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/types/SetCache.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/SetCache.java index b66d2d67f..bca039b30 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/types/SetCache.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/SetCache.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.dart.types; +package com.gotocompany.dagger.functions.udfs.scalar.dart.types; import java.io.Serializable; import java.util.Date; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/MessageReader.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/MessageReader.java similarity index 89% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/MessageReader.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/MessageReader.java index ad9267089..7b4d3c50e 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/MessageReader.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/MessageReader.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.scalar.elementAt; +package com.gotocompany.dagger.functions.udfs.scalar.elementAt; -import io.odpf.stencil.client.StencilClient; -import io.odpf.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; -import io.odpf.dagger.functions.udfs.scalar.elementAt.row.Element; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; +import com.gotocompany.dagger.functions.udfs.scalar.elementAt.row.Element; import org.apache.flink.types.Row; import java.util.Arrays; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptor.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptor.java similarity index 97% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptor.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptor.java index bee4c6d62..b1b1c39f7 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptor.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptor.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.elementAt.descriptor; +package com.gotocompany.dagger.functions.udfs.scalar.elementAt.descriptor; import com.google.protobuf.Descriptors; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/Element.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/Element.java similarity index 93% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/Element.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/Element.java index 535aaba20..167924cb4 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/Element.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/Element.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar.elementAt.row; +package com.gotocompany.dagger.functions.udfs.scalar.elementAt.row; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; +import com.gotocompany.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; import org.apache.flink.types.Row; import java.util.Optional; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/RowElement.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/RowElement.java similarity index 82% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/RowElement.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/RowElement.java index 53bcd816b..847e81687 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/RowElement.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/RowElement.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar.elementAt.row; +package com.gotocompany.dagger.functions.udfs.scalar.elementAt.row; -import io.odpf.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; +import com.gotocompany.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; import org.apache.flink.types.Row; import java.util.Optional; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/ValueElement.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/ValueElement.java similarity index 90% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/ValueElement.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/ValueElement.java index 4c5c8f9a9..c45d5d581 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/ValueElement.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/ValueElement.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.elementAt.row; +package com.gotocompany.dagger.functions.udfs.scalar.elementAt.row; import com.google.protobuf.Descriptors; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/MessageParser.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/MessageParser.java similarity index 94% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/MessageParser.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/MessageParser.java index 4d159b169..f6c9c2a4f 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/MessageParser.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/MessageParser.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow; +package com.gotocompany.dagger.functions.udfs.scalar.longbow; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.functions.exceptions.LongbowException; +import com.gotocompany.dagger.functions.exceptions.LongbowException; import java.io.Serializable; import java.util.ArrayList; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/ProtoToRow.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/ProtoToRow.java similarity index 98% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/ProtoToRow.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/ProtoToRow.java index 1a978f0b7..b58149b2a 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/ProtoToRow.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/ProtoToRow.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow; +package com.gotocompany.dagger.functions.udfs.scalar.longbow; import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/LongbowArrayType.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/LongbowArrayType.java similarity index 92% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/LongbowArrayType.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/LongbowArrayType.java index ff189203e..23949c458 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/LongbowArrayType.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/LongbowArrayType.java @@ -1,6 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array; + +import com.gotocompany.dagger.functions.exceptions.ArrayAggregationException; -import io.odpf.dagger.functions.exceptions.ArrayAggregationException; import java.io.Serializable; import java.util.function.Function; import java.util.stream.BaseStream; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpression.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpression.java similarity index 83% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpression.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpression.java index cb62e53f1..b9a7aa85f 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpression.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpression.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array.expression; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression; /** * The Aggregation expression. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/Expression.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/Expression.java similarity index 93% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/Expression.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/Expression.java index c9610fba5..354396fc8 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/Expression.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/Expression.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array.expression; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression; import java.io.Serializable; import java.util.Arrays; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpression.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpression.java similarity index 85% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpression.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpression.java index 8bc675de3..3670c2ae7 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpression.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpression.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array.expression; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression; /** * The Operation expression. diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessor.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessor.java similarity index 87% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessor.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessor.java index ea0e6e581..215b15bd0 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessor.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessor.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array.processors; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array.processors; -import io.odpf.dagger.functions.exceptions.ArrayAggregationException; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.expression.Expression; +import com.gotocompany.dagger.functions.exceptions.ArrayAggregationException; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression.Expression; import org.apache.commons.jexl3.JexlContext; import org.apache.commons.jexl3.JexlEngine; import org.apache.commons.jexl3.JexlScript; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessor.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessor.java similarity index 82% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessor.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessor.java index 3b6dcf5af..46e1a4959 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessor.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessor.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array.processors; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array.processors; -import io.odpf.dagger.functions.exceptions.ArrayOperateException; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.expression.Expression; +import com.gotocompany.dagger.functions.exceptions.ArrayOperateException; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression.Expression; import org.apache.commons.jexl3.JexlContext; import org.apache.commons.jexl3.JexlEngine; import org.apache.commons.jexl3.JexlScript; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayProcessor.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayProcessor.java similarity index 89% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayProcessor.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayProcessor.java index 8c2cc13cd..9ea495c68 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayProcessor.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayProcessor.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array.processors; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array.processors; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.LongbowArrayType; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.expression.Expression; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.LongbowArrayType; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression.Expression; import org.apache.commons.jexl3.JexlContext; import org.apache.commons.jexl3.JexlEngine; import org.apache.commons.jexl3.JexlScript; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/HistogramBucket.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/HistogramBucket.java similarity index 89% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/HistogramBucket.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/HistogramBucket.java index 70112b3b6..d081820bc 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/HistogramBucket.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/HistogramBucket.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.table; +package com.gotocompany.dagger.functions.udfs.table; -import io.odpf.dagger.common.udfs.TableUdf; +import com.gotocompany.dagger.common.udfs.TableUdf; import org.apache.flink.api.java.tuple.Tuple1; import java.util.Arrays; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/OutlierMad.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/OutlierMad.java similarity index 94% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/OutlierMad.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/OutlierMad.java index 7f924b803..7b3dca156 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/OutlierMad.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/OutlierMad.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.table; +package com.gotocompany.dagger.functions.udfs.table; -import io.odpf.dagger.common.udfs.TableUdf; -import io.odpf.dagger.functions.udfs.table.outlier.mad.Mad; -import io.odpf.dagger.functions.udfs.table.outlier.mad.Point; +import com.gotocompany.dagger.common.udfs.TableUdf; +import com.gotocompany.dagger.functions.udfs.table.outlier.mad.Mad; +import com.gotocompany.dagger.functions.udfs.table.outlier.mad.Point; import org.apache.flink.api.java.tuple.Tuple5; import org.apache.flink.table.annotation.DataTypeHint; import org.slf4j.Logger; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/outlier/mad/Mad.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/outlier/mad/Mad.java similarity index 95% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/outlier/mad/Mad.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/outlier/mad/Mad.java index ecc414b50..d0b90187d 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/outlier/mad/Mad.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/outlier/mad/Mad.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.table.outlier.mad; +package com.gotocompany.dagger.functions.udfs.table.outlier.mad; -import io.odpf.dagger.functions.exceptions.MadZeroException; -import io.odpf.dagger.functions.exceptions.MedianNotFound; +import com.gotocompany.dagger.functions.exceptions.MadZeroException; +import com.gotocompany.dagger.functions.exceptions.MedianNotFound; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/outlier/mad/Point.java b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/outlier/mad/Point.java similarity index 97% rename from dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/outlier/mad/Point.java rename to dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/outlier/mad/Point.java index 61b1b0a4a..db7609281 100644 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/table/outlier/mad/Point.java +++ b/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/table/outlier/mad/Point.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.table.outlier.mad; +package com.gotocompany.dagger.functions.udfs.table.outlier.mad; import java.sql.Timestamp; diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/factories/FunctionFactory.java b/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/factories/FunctionFactory.java deleted file mode 100644 index be9e37ac8..000000000 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/factories/FunctionFactory.java +++ /dev/null @@ -1,155 +0,0 @@ -package io.odpf.dagger.functions.udfs.factories; - -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; - -import com.google.gson.Gson; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.udfs.AggregateUdf; -import io.odpf.dagger.common.udfs.ScalarUdf; -import io.odpf.dagger.common.udfs.TableUdf; -import io.odpf.dagger.common.udfs.UdfFactory; -import io.odpf.dagger.functions.udfs.aggregate.CollectArray; -import io.odpf.dagger.functions.udfs.aggregate.DistinctCount; -import io.odpf.dagger.functions.udfs.aggregate.Features; -import io.odpf.dagger.functions.udfs.aggregate.FeaturesWithType; -import io.odpf.dagger.functions.udfs.aggregate.PercentileAggregator; -import io.odpf.dagger.functions.udfs.scalar.ArrayAggregate; -import io.odpf.dagger.functions.udfs.scalar.ArrayOperate; -import io.odpf.dagger.functions.udfs.scalar.ByteToString; -import io.odpf.dagger.functions.udfs.scalar.CondEq; -import io.odpf.dagger.functions.udfs.scalar.DartContains; -import io.odpf.dagger.functions.udfs.scalar.DartGet; -import io.odpf.dagger.functions.udfs.scalar.Distance; -import io.odpf.dagger.functions.udfs.scalar.ElementAt; -import io.odpf.dagger.functions.udfs.scalar.EndOfMonth; -import io.odpf.dagger.functions.udfs.scalar.EndOfWeek; -import io.odpf.dagger.functions.udfs.scalar.ExponentialMovingAverage; -import io.odpf.dagger.functions.udfs.scalar.Filters; -import io.odpf.dagger.functions.udfs.scalar.FormatTimeInZone; -import io.odpf.dagger.functions.udfs.scalar.GeoHash; -import io.odpf.dagger.functions.udfs.scalar.LinearTrend; -import io.odpf.dagger.functions.udfs.scalar.ListContains; -import io.odpf.dagger.functions.udfs.scalar.MapGet; -import io.odpf.dagger.functions.udfs.scalar.S2AreaInKm2; -import io.odpf.dagger.functions.udfs.scalar.S2Id; -import io.odpf.dagger.functions.udfs.scalar.SelectFields; -import io.odpf.dagger.functions.udfs.scalar.SingleFeatureWithType; -import io.odpf.dagger.functions.udfs.scalar.Split; -import io.odpf.dagger.functions.udfs.scalar.StartOfMonth; -import io.odpf.dagger.functions.udfs.scalar.StartOfWeek; -import io.odpf.dagger.functions.udfs.scalar.TimeInDate; -import io.odpf.dagger.functions.udfs.scalar.TimestampFromUnix; -import io.odpf.dagger.functions.udfs.scalar.JsonQuery; -import io.odpf.dagger.functions.udfs.scalar.JsonUpdate; -import io.odpf.dagger.functions.udfs.scalar.JsonDelete; -import io.odpf.dagger.functions.udfs.table.HistogramBucket; -import io.odpf.dagger.functions.udfs.table.OutlierMad; - -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.Map; - -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_PROTO_CLASS; -import static io.odpf.dagger.common.core.Constants.STREAM_INPUT_SCHEMA_TABLE; -import static io.odpf.dagger.functions.common.Constants.UDF_DART_GCS_BUCKET_ID_DEFAULT; -import static io.odpf.dagger.functions.common.Constants.UDF_DART_GCS_BUCKET_ID_KEY; -import static io.odpf.dagger.functions.common.Constants.UDF_DART_GCS_PROJECT_ID_DEFAULT; -import static io.odpf.dagger.functions.common.Constants.UDF_DART_GCS_PROJECT_ID_KEY; - -/** - * The factory class for all the udf. - */ -public class FunctionFactory extends UdfFactory { - - private static final Gson GSON = new Gson(); - - private final StencilClientOrchestrator stencilClientOrchestrator; - - - /** - * Instantiates a new Function factory. - * - * @param streamTableEnvironment the stream table environment - * @param configuration the configuration - */ - public FunctionFactory(StreamTableEnvironment streamTableEnvironment, Configuration configuration) { - super(streamTableEnvironment, configuration); - stencilClientOrchestrator = new StencilClientOrchestrator(configuration); - } - - @Override - public HashSet getScalarUdfs() { - HashSet scalarUdfs = new HashSet<>(); - scalarUdfs.add(DartContains.withGcsDataStore(getGcsProjectId(), getGcsBucketId())); - scalarUdfs.add(DartGet.withGcsDataStore(getGcsProjectId(), getGcsBucketId())); - scalarUdfs.add(new Distance()); - scalarUdfs.add(new ElementAt(getProtosInInputStreams(), stencilClientOrchestrator)); - scalarUdfs.add(new EndOfMonth()); - scalarUdfs.add(new EndOfWeek()); - scalarUdfs.add(new ExponentialMovingAverage()); - scalarUdfs.add(new FormatTimeInZone()); - scalarUdfs.add(new GeoHash()); - scalarUdfs.add(new LinearTrend()); - scalarUdfs.add(new ListContains()); - scalarUdfs.add(new MapGet()); - scalarUdfs.add(new S2AreaInKm2()); - scalarUdfs.add(new S2Id()); - scalarUdfs.add(new SingleFeatureWithType()); - scalarUdfs.add(new Split()); - scalarUdfs.add(new StartOfMonth()); - scalarUdfs.add(new StartOfWeek()); - scalarUdfs.add(new TimeInDate()); - scalarUdfs.add(new TimestampFromUnix()); - scalarUdfs.add(new CondEq()); - scalarUdfs.add(new Filters(stencilClientOrchestrator)); - scalarUdfs.add(new SelectFields(stencilClientOrchestrator)); - scalarUdfs.add(new ArrayAggregate()); - scalarUdfs.add(new ArrayOperate()); - scalarUdfs.add(new ByteToString()); - scalarUdfs.add(new JsonQuery()); - scalarUdfs.add(new JsonUpdate()); - scalarUdfs.add(new JsonDelete()); - return scalarUdfs; - } - - @Override - public HashSet getTableUdfs() { - HashSet tableUdfs = new HashSet<>(); - tableUdfs.add(new HistogramBucket()); - tableUdfs.add(new OutlierMad()); - return tableUdfs; - } - - @Override - public HashSet getAggregateUdfs() { - HashSet aggregateUdfs = new HashSet<>(); - aggregateUdfs.add(new CollectArray()); - aggregateUdfs.add(new DistinctCount()); - aggregateUdfs.add(new Features()); - aggregateUdfs.add(new FeaturesWithType()); - aggregateUdfs.add(new PercentileAggregator()); - return aggregateUdfs; - } - - private String getGcsProjectId() { - return getConfiguration().getString(UDF_DART_GCS_PROJECT_ID_KEY, UDF_DART_GCS_PROJECT_ID_DEFAULT); - } - - private String getGcsBucketId() { - return getConfiguration().getString(UDF_DART_GCS_BUCKET_ID_KEY, UDF_DART_GCS_BUCKET_ID_DEFAULT); - } - - private LinkedHashMap getProtosInInputStreams() { - LinkedHashMap protoClassForTable = new LinkedHashMap<>(); - String jsonArrayString = getConfiguration().getString(INPUT_STREAMS, ""); - Map[] streamsConfig = GSON.fromJson(jsonArrayString, Map[].class); - for (Map streamConfig : streamsConfig) { - String protoClassName = streamConfig.getOrDefault(STREAM_INPUT_SCHEMA_PROTO_CLASS, ""); - String tableName = streamConfig.getOrDefault(STREAM_INPUT_SCHEMA_TABLE, ""); - protoClassForTable.put(tableName, protoClassName); - } - return protoClassForTable; - } -} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/FileSourceFactory.java b/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/FileSourceFactory.java deleted file mode 100644 index 8053f19fd..000000000 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/python/file/source/FileSourceFactory.java +++ /dev/null @@ -1,29 +0,0 @@ -package io.odpf.dagger.functions.udfs.python.file.source; - -import io.odpf.dagger.functions.udfs.python.file.source.gcs.GcsFileSource; -import io.odpf.dagger.functions.udfs.python.file.source.local.LocalFileSource; - -/** - * The type File source factory. - */ -public class FileSourceFactory { - - /** - * Gets file source. - * - * @param pythonFile the python file - * @return the file source - */ - public static FileSource getFileSource(String pythonFile) { - if ("GS".equals(getFileSourcePrefix(pythonFile))) { - return new GcsFileSource(pythonFile); - } else { - return new LocalFileSource(pythonFile); - } - } - - private static String getFileSourcePrefix(String pythonFile) { - String[] files = pythonFile.split("://"); - return files[0].toUpperCase(); - } -} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/DartGet.java b/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/DartGet.java deleted file mode 100644 index b35ddf4c0..000000000 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/DartGet.java +++ /dev/null @@ -1,90 +0,0 @@ -package io.odpf.dagger.functions.udfs.scalar; - -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.common.udfs.ScalarUdf; -import io.odpf.dagger.functions.exceptions.KeyDoesNotExistException; -import io.odpf.dagger.functions.udfs.scalar.dart.DartAspects; -import io.odpf.dagger.functions.udfs.scalar.dart.store.gcs.GcsDataStore; -import io.odpf.dagger.functions.udfs.scalar.dart.types.MapCache; -import org.apache.flink.table.functions.FunctionContext; - -import java.util.HashMap; -import java.util.Map; - -import static io.odpf.dagger.common.core.Constants.UDF_TELEMETRY_GROUP_KEY; - -/** - * The DartGet udf. - */ -public class DartGet extends ScalarUdf { - private final GcsDataStore dataStore; - private final Map cache; - - /** - * Instantiates a new Dart get. - * - * @param dataStore the data store - */ - public DartGet(GcsDataStore dataStore) { - this.dataStore = dataStore; - cache = new HashMap<>(); - } - - /** - * With gcs data store dart get. - * - * @param projectId the project id - * @param bucketId the bucket id - * @return the dart get - */ - public static DartGet withGcsDataStore(String projectId, String bucketId) { - return new DartGet(new GcsDataStore(projectId, bucketId)); - } - - @Override - public void open(FunctionContext context) throws Exception { - super.open(context); - MeterStatsManager meterStatsManager = new MeterStatsManager(context.getMetricGroup(), true); - meterStatsManager.register(UDF_TELEMETRY_GROUP_KEY, this.getName(), DartAspects.values()); - dataStore.setMeterStatsManager(meterStatsManager); - dataStore.setGaugeStatsManager(getGaugeStatsManager()); - } - - /** - * To fetch a corresponding value in a collection given a key from data point. - * - * @param collectionName the collection name - * @param key the key - * @param refreshRateInHours ttl - * @return the value in string - * @author gaurav.s - * @team DE - */ - public String eval(String collectionName, String key, Integer refreshRateInHours) { - if (cache.isEmpty() || !cache.containsKey(collectionName) || cache.get(collectionName).hasExpired(refreshRateInHours) || cache.get(collectionName).isEmpty()) { - cache.put(collectionName, dataStore.getMap(collectionName)); - dataStore.getMeterStatsManager().markEvent(DartAspects.DART_GCS_FETCH_SUCCESS); - } - dataStore.getMeterStatsManager().markEvent(DartAspects.DART_CACHE_HIT); - return cache.get(collectionName).get(key); - } - - /** - * Corresponding value in a GCS bucket given a key from data point. - * - * @param collectionName the collection name - * @param key the key - * @param refreshRateInHours the refresh rate in hours - * @param defaultValue the default value - * @return the string - */ - public String eval(String collectionName, String key, Integer refreshRateInHours, String defaultValue) { - try { - return eval(collectionName, key, refreshRateInHours); - } catch (KeyDoesNotExistException e) { - dataStore.getMeterStatsManager().markEvent(DartAspects.DART_CACHE_MISS); - return defaultValue; - } - } - -} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/store/DataStore.java b/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/store/DataStore.java deleted file mode 100644 index d0aabacec..000000000 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/store/DataStore.java +++ /dev/null @@ -1,25 +0,0 @@ -package io.odpf.dagger.functions.udfs.scalar.dart.store; - -import io.odpf.dagger.functions.udfs.scalar.dart.types.MapCache; -import io.odpf.dagger.functions.udfs.scalar.dart.types.SetCache; - -/** - * The interface Data store. - */ -public interface DataStore { - /** - * Gets set. - * - * @param setName the set name - * @return the set - */ - SetCache getSet(String setName); - - /** - * Gets map. - * - * @param mapName the map name - * @return the map - */ - MapCache getMap(String mapName); -} diff --git a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/store/gcs/GcsDataStore.java b/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/store/gcs/GcsDataStore.java deleted file mode 100644 index 20648c2ba..000000000 --- a/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/scalar/dart/store/gcs/GcsDataStore.java +++ /dev/null @@ -1,145 +0,0 @@ -package io.odpf.dagger.functions.udfs.scalar.dart.store.gcs; - -import io.odpf.dagger.common.metrics.managers.GaugeStatsManager; -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.functions.udfs.scalar.DartContains; -import io.odpf.dagger.functions.udfs.scalar.DartGet; -import io.odpf.dagger.functions.udfs.scalar.dart.DartAspects; -import io.odpf.dagger.functions.udfs.scalar.dart.store.DataStore; -import io.odpf.dagger.functions.udfs.scalar.dart.types.MapCache; -import io.odpf.dagger.functions.udfs.scalar.dart.types.SetCache; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.type.TypeReference; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode; - -import java.io.IOException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - - -/** - * The type Gcs data store. - */ -public class GcsDataStore implements DataStore, Serializable { - - private final String projectId; - - private final String bucketId; - - private GcsClient gcsClient; - - private MeterStatsManager meterStatsManager; - private GaugeStatsManager gaugeStatsManager; - - /** - * Instantiates a new Gcs data store. - * - * @param projectId the project id - * @param bucketId the bucket id - */ - public GcsDataStore(String projectId, String bucketId) { - this.projectId = projectId; - this.bucketId = bucketId; - } - - @Override - public SetCache getSet(String setName) { - - return new SetCache(getSetOfObjects(setName)); - } - - @Override - public MapCache getMap(String mapName) { - Map mapOfObjects = getMapOfObjects(mapName); - return new MapCache(mapOfObjects); - } - - /** - * Sets meter stats manager. - * - * @param meterStatsManager the meter stats manager - */ - public void setMeterStatsManager(MeterStatsManager meterStatsManager) { - this.meterStatsManager = meterStatsManager; - } - - /** - * Gets meter stats manager. - * - * @return the meter stats manager - */ - public MeterStatsManager getMeterStatsManager() { - return this.meterStatsManager; - } - - /** - * Sets gauge stats manager. - * - * @param gaugeStatsManager the gauge stats manager - */ - public void setGaugeStatsManager(GaugeStatsManager gaugeStatsManager) { - this.gaugeStatsManager = gaugeStatsManager; - } - - /** - * Gets gauge stats manager. - * - * @return the gauge stats manager - */ - public GaugeStatsManager getGaugeStatsManager() { - return gaugeStatsManager; - } - - private Map getMapOfObjects(String dartName) { - - String jsonData = getGcsClient().fetchJsonData(DartGet.class.getSimpleName(), getGaugeStatsManager(), this.bucketId, "dart-get/" + dartName); - - ObjectMapper mapper = new ObjectMapper(); - - Map map = null; - try { - map = mapper.readValue(jsonData, Map.class); - } catch (IOException e) { - getMeterStatsManager().markEvent(DartAspects.DART_GCS_FETCH_FAILURES); - e.printStackTrace(); - } - return map; - } - - private Set getSetOfObjects(String dartName) { - - String jsonData = getGcsClient().fetchJsonData(DartContains.class.getSimpleName(), getGaugeStatsManager(), this.bucketId, "dart-contains/" + dartName); - ObjectMapper mapper = new ObjectMapper(); - try { - ObjectNode node = (ObjectNode) mapper.readTree(jsonData); - JsonNode arrayNode = node.get("data"); - List list = mapper.readValue(arrayNode.traverse(), - new TypeReference>() { - }); - - return new HashSet<>(list); - } catch (Exception e) { - getMeterStatsManager().markEvent(DartAspects.DART_GCS_FETCH_FAILURES); - e.printStackTrace(); - } - - return new HashSet<>(); - } - - /** - * Gets gcs client. - * - * @return the gcs client - */ - GcsClient getGcsClient() { - if (this.gcsClient == null) { - this.gcsClient = new GcsClient(this.projectId); - } - return this.gcsClient; - } -} diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/ClearColumnTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/ClearColumnTransformerTest.java similarity index 95% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/ClearColumnTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/ClearColumnTransformerTest.java index 89e0af749..9af9c9e52 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/ClearColumnTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/ClearColumnTransformerTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.StreamInfo; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/DeDuplicationTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/DeDuplicationTransformerTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/DeDuplicationTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/DeDuplicationTransformerTest.java index 17bedef79..64ac13930 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/DeDuplicationTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/DeDuplicationTransformerTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.api.common.state.MapState; import org.apache.flink.api.common.state.MapStateDescriptor; @@ -8,7 +8,7 @@ import org.apache.flink.streaming.api.datastream.KeyedStream; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.StreamInfo; import org.junit.Assert; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/FeatureTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/FeatureTransformerTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/FeatureTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/FeatureTransformerTest.java index fbde1dc32..bc3ab06f0 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/FeatureTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/FeatureTransformerTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.StreamInfo; import org.junit.Assert; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/FeatureWithTypeTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/FeatureWithTypeTransformerTest.java similarity index 98% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/FeatureWithTypeTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/FeatureWithTypeTransformerTest.java index 83f99eb15..a6f295191 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/FeatureWithTypeTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/FeatureWithTypeTransformerTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.StreamInfo; import org.junit.Assert; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/HashTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/HashTransformerTest.java similarity index 89% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/HashTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/HashTransformerTest.java index 8d8de8459..070313f21 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/HashTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/HashTransformerTest.java @@ -1,13 +1,12 @@ -package io.odpf.dagger.functions.transformers; - -import io.odpf.dagger.common.core.DaggerContextTestBase; -import org.apache.flink.types.Row; +package com.gotocompany.dagger.functions.transformers; import com.google.protobuf.Timestamp; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.functions.exceptions.InvalidHashFieldException; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.exceptions.DescriptorNotFoundException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.functions.exceptions.InvalidHashFieldException; +import org.apache.flink.types.Row; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -17,6 +16,7 @@ import java.util.ArrayList; import java.util.HashMap; +import java.util.concurrent.TimeUnit; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -34,11 +34,18 @@ public void setup() { initMocks(this); when(configuration.getString("SINK_KAFKA_PROTO_MESSAGE", "")) - .thenReturn("io.odpf.dagger.consumer.TestBookingLogMessage"); + .thenReturn("com.gotocompany.dagger.consumer.TestBookingLogMessage"); when(configuration.getBoolean("SCHEMA_REGISTRY_STENCIL_ENABLE", false)) .thenReturn(false); when(configuration.getString("SCHEMA_REGISTRY_STENCIL_URLS", "")) .thenReturn(""); + when(configuration.getBoolean("SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH", false)) + .thenReturn(false); + when(configuration.getLong("SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS", TimeUnit.HOURS.toMillis(2))) + .thenReturn(TimeUnit.HOURS.toMillis(2)); + when(configuration.getString("SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY", "LONG_POLLING")) + .thenReturn("LONG_POLLING"); + } @Test @@ -135,7 +142,7 @@ public void shouldHashAllFieldsOfSupportedDataTypesInInputRow() throws Exception @Test public void shouldHashNestedFields() throws Exception { when(configuration.getString("SINK_KAFKA_PROTO_MESSAGE", "")) - .thenReturn("io.odpf.dagger.consumer.TestEnrichedBookingLogMessage"); + .thenReturn("com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage"); HashMap transformationArguments = new HashMap<>(); ArrayList fieldsToEncrypt = new ArrayList<>(); @@ -214,9 +221,9 @@ public void shouldThrowErrorIfUnableToCreateRowHasherMap() throws Exception { @Test public void shouldThrowErrorIfUnableToFindOpDescriptor() throws Exception { when(configuration.getString("SINK_KAFKA_PROTO_MESSAGE", "")) - .thenReturn("io.odpf.dagger.consumer.RandomTestMessage"); + .thenReturn("com.gotocompany.dagger.consumer.RandomTestMessage"); thrown.expect(DescriptorNotFoundException.class); - thrown.expectMessage("Output Descriptor for class: io.odpf.dagger.consumer.RandomTestMessage not found"); + thrown.expectMessage("Output Descriptor for class: com.gotocompany.dagger.consumer.RandomTestMessage not found"); HashMap transformationArguments = new HashMap<>(); ArrayList fieldsToEncrypt = new ArrayList<>(); diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/InvalidRecordFilterTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/InvalidRecordFilterTransformerTest.java similarity index 94% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/InvalidRecordFilterTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/InvalidRecordFilterTransformerTest.java index 22bc83e7b..21ee0d794 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/InvalidRecordFilterTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/InvalidRecordFilterTransformerTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.groups.OperatorMetricGroup; @@ -8,7 +8,7 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -21,7 +21,7 @@ import java.util.List; import java.util.stream.Collectors; -import static io.odpf.dagger.functions.transformers.filter.FilterAspects.FILTERED_INVALID_RECORDS; +import static com.gotocompany.dagger.functions.transformers.filter.FilterAspects.FILTERED_INVALID_RECORDS; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/SQLTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/SQLTransformerTest.java similarity index 98% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/SQLTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/SQLTransformerTest.java index 8330fa7c8..c042b0e6b 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/SQLTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/SQLTransformerTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.transformers; +package com.gotocompany.dagger.functions.transformers; -import io.odpf.dagger.common.core.DaggerContextTestBase; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.DaggerContext; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; @@ -12,7 +12,7 @@ import org.apache.flink.table.functions.ScalarFunction; import org.apache.flink.types.Row; -import io.odpf.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.common.core.StreamInfo; import org.junit.After; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/feature/FeatureWithTypeHandlerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/feature/FeatureWithTypeHandlerTest.java similarity index 98% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/feature/FeatureWithTypeHandlerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/feature/FeatureWithTypeHandlerTest.java index ea2796f9a..4ac19585e 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/feature/FeatureWithTypeHandlerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/feature/FeatureWithTypeHandlerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.transformers.feature; +package com.gotocompany.dagger.functions.transformers.feature; import org.apache.flink.types.Row; import org.junit.Assert; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/FieldHasherFactoryTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/FieldHasherFactoryTest.java similarity index 83% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/FieldHasherFactoryTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/FieldHasherFactoryTest.java index efb3ac4b4..2d3836d4f 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/FieldHasherFactoryTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/FieldHasherFactoryTest.java @@ -1,14 +1,14 @@ -package io.odpf.dagger.functions.transformers.hash; +package com.gotocompany.dagger.functions.transformers.hash; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestEnrichedBookingLogMessage; -import io.odpf.dagger.functions.exceptions.InvalidHashFieldException; -import io.odpf.dagger.functions.transformers.hash.field.FieldHasher; -import io.odpf.dagger.functions.transformers.hash.field.IntegerFieldHasher; -import io.odpf.dagger.functions.transformers.hash.field.LongFieldHasher; -import io.odpf.dagger.functions.transformers.hash.field.RowHasher; -import io.odpf.dagger.functions.transformers.hash.field.StringFieldHasher; +import com.gotocompany.dagger.functions.exceptions.InvalidHashFieldException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage; +import com.gotocompany.dagger.functions.transformers.hash.field.FieldHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.IntegerFieldHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.LongFieldHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.RowHasher; +import com.gotocompany.dagger.functions.transformers.hash.field.StringFieldHasher; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/PathReaderTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/PathReaderTest.java similarity index 88% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/PathReaderTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/PathReaderTest.java index 6c4bd0c52..83828b560 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/PathReaderTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/PathReaderTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.transformers.hash; +package com.gotocompany.dagger.functions.transformers.hash; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.functions.exceptions.InvalidHashFieldException; -import io.odpf.dagger.functions.transformers.hash.field.RowHasher; +import com.gotocompany.dagger.functions.exceptions.InvalidHashFieldException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.functions.transformers.hash.field.RowHasher; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/IntegerFieldHasherTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/IntegerFieldHasherTest.java similarity index 94% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/IntegerFieldHasherTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/IntegerFieldHasherTest.java index 3d26a6305..a1ed1dd28 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/IntegerFieldHasherTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/IntegerFieldHasherTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.protobuf.Descriptors; import com.google.protobuf.Timestamp; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.functions.exceptions.RowHashException; +import com.gotocompany.dagger.functions.exceptions.RowHashException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/LongFieldHasherTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/LongFieldHasherTest.java similarity index 94% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/LongFieldHasherTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/LongFieldHasherTest.java index 6de2b5b67..139b5d585 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/LongFieldHasherTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/LongFieldHasherTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.protobuf.Descriptors; import com.google.protobuf.Timestamp; -import io.odpf.dagger.consumer.TestMessage; -import io.odpf.dagger.functions.exceptions.RowHashException; +import com.gotocompany.dagger.functions.exceptions.RowHashException; +import com.gotocompany.dagger.consumer.TestMessage; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/RowHasherTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/RowHasherTest.java similarity index 94% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/RowHasherTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/RowHasherTest.java index 0077542ba..ed9384d82 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/RowHasherTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/RowHasherTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/StringFieldHasherTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/StringFieldHasherTest.java similarity index 92% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/StringFieldHasherTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/StringFieldHasherTest.java index adc8d6c05..185aca91a 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/StringFieldHasherTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/StringFieldHasherTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestEnrichedBookingLogMessage; -import io.odpf.dagger.functions.exceptions.RowHashException; +import com.gotocompany.dagger.functions.exceptions.RowHashException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasherTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasherTest.java similarity index 91% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasherTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasherTest.java index bd018087f..50419d7ec 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasherTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/transformers/hash/field/UnsupportedDataTypeHasherTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.transformers.hash.field; +package com.gotocompany.dagger.functions.transformers.hash.field; import com.google.protobuf.Descriptors; import com.google.protobuf.Timestamp; -import io.odpf.dagger.consumer.TestMessage; -import io.odpf.dagger.functions.exceptions.InvalidHashFieldException; +import com.gotocompany.dagger.functions.exceptions.InvalidHashFieldException; +import com.gotocompany.dagger.consumer.TestMessage; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/CollectArrayTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/CollectArrayTest.java similarity index 95% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/CollectArrayTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/CollectArrayTest.java index 3850577cf..de7aa38a1 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/CollectArrayTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/CollectArrayTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate; +package com.gotocompany.dagger.functions.udfs.aggregate; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.ArrayAccumulator; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.ArrayAccumulator; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/DistinctCountTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/DistinctCountTest.java similarity index 96% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/DistinctCountTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/DistinctCountTest.java index 409da6437..890c53361 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/DistinctCountTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/DistinctCountTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate; +package com.gotocompany.dagger.functions.udfs.aggregate; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.DistinctCountAccumulator; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.DistinctCountAccumulator; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.table.functions.FunctionContext; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/FeaturesTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/FeaturesTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/FeaturesTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/FeaturesTest.java index d6abba636..363245972 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/FeaturesTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/FeaturesTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate; +package com.gotocompany.dagger.functions.udfs.aggregate; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.FeatureAccumulator; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.FeatureAccumulator; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/FeaturesWithTypeTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/FeaturesWithTypeTest.java similarity index 96% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/FeaturesWithTypeTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/FeaturesWithTypeTest.java index abece155e..ae4047f32 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/FeaturesWithTypeTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/FeaturesWithTypeTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.aggregate; +package com.gotocompany.dagger.functions.udfs.aggregate; -import io.odpf.dagger.functions.exceptions.InvalidNumberOfArgumentsException; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.FeatureWithTypeAccumulator; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.FeatureWithTypeAccumulator; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.functions.exceptions.InvalidNumberOfArgumentsException; import org.apache.flink.types.Row; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/PercentileAggregatorTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/PercentileAggregatorTest.java similarity index 94% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/PercentileAggregatorTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/PercentileAggregatorTest.java index eec97c5a6..99be80e7b 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/PercentileAggregatorTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/PercentileAggregatorTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate; +package com.gotocompany.dagger.functions.udfs.aggregate; -import io.odpf.dagger.functions.udfs.aggregate.accumulator.PercentileAccumulator; +import com.gotocompany.dagger.functions.udfs.aggregate.accumulator.PercentileAccumulator; import org.junit.Assert; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulatorTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulatorTest.java similarity index 90% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulatorTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulatorTest.java index 472d6e6e1..2357a9388 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulatorTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/ArrayAccumulatorTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.accumulator; +package com.gotocompany.dagger.functions.udfs.aggregate.accumulator; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulatorTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulatorTest.java similarity index 94% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulatorTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulatorTest.java index dd3760ac0..202372209 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulatorTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/DistinctCountAccumulatorTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.accumulator; +package com.gotocompany.dagger.functions.udfs.aggregate.accumulator; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulatorTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulatorTest.java similarity index 93% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulatorTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulatorTest.java index 416eeca57..9beed9285 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulatorTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureAccumulatorTest.java @@ -1,5 +1,6 @@ -package io.odpf.dagger.functions.udfs.aggregate.accumulator; +package com.gotocompany.dagger.functions.udfs.aggregate.accumulator; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; import org.apache.flink.types.Row; import com.google.protobuf.ByteString; @@ -11,7 +12,6 @@ import java.io.ObjectInputStream; import java.io.ObjectOutputStream; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.StringType; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; @@ -89,9 +89,9 @@ public void shouldThrowExceptionForValuesItCannotHandle() { @Test public void shouldBeSerializableWithFeatureTypeUdf() throws IOException, ClassNotFoundException { FeatureWithTypeAccumulator featureAccumulator = new FeatureWithTypeAccumulator(); - featureAccumulator.add("key1", "value1", StringType); - featureAccumulator.add("key2", "value2", StringType); - featureAccumulator.add("key3", "value3", StringType); + featureAccumulator.add("key1", "value1", ValueEnum.StringType); + featureAccumulator.add("key2", "value2", ValueEnum.StringType); + featureAccumulator.add("key3", "value3", ValueEnum.StringType); ByteArrayOutputStream serializedAccumulatorStream = new ByteArrayOutputStream(); new ObjectOutputStream(serializedAccumulatorStream).writeObject(featureAccumulator); ObjectInputStream deserializedAccStream = new ObjectInputStream(new ByteArrayInputStream(serializedAccumulatorStream.toByteArray())); diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulatorTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulatorTest.java similarity index 77% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulatorTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulatorTest.java index f8858cd33..845320be3 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulatorTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/FeatureWithTypeAccumulatorTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.aggregate.accumulator; +package com.gotocompany.dagger.functions.udfs.aggregate.accumulator; import com.google.protobuf.ByteString; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.types.Row; import org.junit.Test; @@ -9,7 +9,6 @@ import java.util.HashMap; import java.util.Map; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -18,14 +17,14 @@ public class FeatureWithTypeAccumulatorTest { public void shouldHandleAllValueTypesWithFeatureTypeUdf() { FeatureWithTypeAccumulator featureAccumulator = new FeatureWithTypeAccumulator(); HashMap> data = new HashMap<>(); - data.put("FloatKey", new Tuple2<>(1.0F, FloatType)); - data.put("StringKey", new Tuple2<>("stringValue", StringType)); - data.put("DoubleKey", new Tuple2<>(1.00D, DoubleType)); - data.put("IntegerKey", new Tuple2<>(1, IntegerType)); - data.put("LongKey", new Tuple2<>(1L, LongType)); - data.put("ByteKey", new Tuple2<>(ByteString.copyFrom("value1".getBytes()), ByteType)); - data.put("BoolKey", new Tuple2<>(true, BooleanType)); - data.put("TimestampKey", new Tuple2<>(getTimestampAsRow(123141, 431231), TimestampType)); + data.put("FloatKey", new Tuple2<>(1.0F, ValueEnum.FloatType)); + data.put("StringKey", new Tuple2<>("stringValue", ValueEnum.StringType)); + data.put("DoubleKey", new Tuple2<>(1.00D, ValueEnum.DoubleType)); + data.put("IntegerKey", new Tuple2<>(1, ValueEnum.IntegerType)); + data.put("LongKey", new Tuple2<>(1L, ValueEnum.LongType)); + data.put("ByteKey", new Tuple2<>(ByteString.copyFrom("value1".getBytes()), ValueEnum.ByteType)); + data.put("BoolKey", new Tuple2<>(true, ValueEnum.BooleanType)); + data.put("TimestampKey", new Tuple2<>(getTimestampAsRow(123141, 431231), ValueEnum.TimestampType)); populateFeatureAccumulator(featureAccumulator, data); Row[] features = featureAccumulator.getFeaturesAsRows(); @@ -37,7 +36,7 @@ public void shouldHandleAllValueTypesWithFeatureTypeUdf() { @Test public void shouldPopulateIdAndNameWithSameValuesInFeatureRowWithFeatureTypeUdf() { FeatureWithTypeAccumulator featureAccumulator = new FeatureWithTypeAccumulator(); - featureAccumulator.add("FloatKey", 1.0F, FloatType); + featureAccumulator.add("FloatKey", 1.0F, ValueEnum.FloatType); Row[] features = featureAccumulator.getFeaturesAsRows(); @@ -49,9 +48,9 @@ public void shouldPopulateIdAndNameWithSameValuesInFeatureRowWithFeatureTypeUdf( @Test public void shouldPopulateAndRemoveKeysInFeatureRowWithFeatureTypeUdf() { FeatureWithTypeAccumulator featureAccumulator = new FeatureWithTypeAccumulator(); - featureAccumulator.add("FloatKey", 1.0F, FloatType); - featureAccumulator.add("keyToRemove", 1.0F, FloatType); - featureAccumulator.remove("keyToRemove", 1.0F, FloatType); + featureAccumulator.add("FloatKey", 1.0F, ValueEnum.FloatType); + featureAccumulator.add("keyToRemove", 1.0F, ValueEnum.FloatType); + featureAccumulator.remove("keyToRemove", 1.0F, ValueEnum.FloatType); Row[] features = featureAccumulator.getFeaturesAsRows(); @@ -64,8 +63,8 @@ public void shouldPopulateAndRemoveKeysInFeatureRowWithFeatureTypeUdf() { public void shouldPopulateDuplicateKeysWithDifferentValuesInFeatureRowWithFeatureTypeUdf() { FeatureWithTypeAccumulator featureAccumulator = new FeatureWithTypeAccumulator(); HashMap> data = new HashMap<>(); - data.put("duplicateKey", new Tuple2<>(1.0F, FloatType)); - data.put("duplicateKey", new Tuple2<>(2.0F, FloatType)); + data.put("duplicateKey", new Tuple2<>(1.0F, ValueEnum.FloatType)); + data.put("duplicateKey", new Tuple2<>(2.0F, ValueEnum.FloatType)); populateFeatureAccumulator(featureAccumulator, data); Row[] features = featureAccumulator.getFeaturesAsRows(); @@ -76,8 +75,8 @@ public void shouldPopulateDuplicateKeysWithDifferentValuesInFeatureRowWithFeatur @Test public void shouldNotPopulateDuplicateKeysWithSameValuesAndTypeInFeatureRowWithFeatureTypeUdf() { FeatureWithTypeAccumulator featureAccumulator = new FeatureWithTypeAccumulator(); - featureAccumulator.add("FloatKey", 1.0F, FloatType); - featureAccumulator.add("FloatKey", 1.0F, FloatType); + featureAccumulator.add("FloatKey", 1.0F, ValueEnum.FloatType); + featureAccumulator.add("FloatKey", 1.0F, ValueEnum.FloatType); Row[] features = featureAccumulator.getFeaturesAsRows(); diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulatorTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulatorTest.java similarity index 91% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulatorTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulatorTest.java index 2b38e5145..e869c7c3b 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulatorTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/accumulator/PercentileAccumulatorTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.accumulator; +package com.gotocompany.dagger.functions.udfs.aggregate.accumulator; import org.junit.Assert; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/FeatureUtilsTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/FeatureUtilsTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/FeatureUtilsTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/FeatureUtilsTest.java index fe4045ae2..a27fdb4a4 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/FeatureUtilsTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/FeatureUtilsTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast; +package com.gotocompany.dagger.functions.udfs.aggregate.feast; import com.google.protobuf.ByteString; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformerTest.java similarity index 94% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformerTest.java index ab451bf27..948724094 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BigDecimalValueTransformerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformerTest.java similarity index 92% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformerTest.java index 37466a9a5..feaac04f1 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/BooleanValueTransformerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformerTest.java similarity index 93% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformerTest.java index 5b741bc8a..7989d4ab2 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/ByteValueTransformerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import com.google.protobuf.ByteString; import org.apache.flink.types.Row; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformerTest.java similarity index 96% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformerTest.java index 126fd831b..e604da084 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/DoubleValueTransformerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import org.apache.flink.types.Row; import org.junit.Rule; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformerTest.java similarity index 95% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformerTest.java index 6c7601eba..72a732b05 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/FloatValueTransformerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import org.junit.Rule; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformerTest.java similarity index 95% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformerTest.java index 8745a43df..03e6b77ec 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/IntegerValueTransformerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import org.junit.Rule; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformerTest.java similarity index 95% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformerTest.java index 06d3f7535..b00bc68ef 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/LongValueTransformerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import org.junit.Rule; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformerTest.java similarity index 93% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformerTest.java index 9947a94e0..8f47621a9 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/NullValueTransformerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import org.apache.commons.lang3.NotImplementedException; import org.apache.flink.types.Row; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformerTest.java similarity index 91% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformerTest.java index 02baedc1e..078aefbd9 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/StringValueTransformerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformerTest.java similarity index 95% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformerTest.java index 9fc4d3849..ec7f2f82a 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/aggregate/feast/handler/TimestampValueTransformerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.aggregate.feast.handler; +package com.gotocompany.dagger.functions.udfs.aggregate.feast.handler; import org.apache.flink.types.Row; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/factories/FunctionFactoryTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/factories/FunctionFactoryTest.java similarity index 71% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/factories/FunctionFactoryTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/factories/FunctionFactoryTest.java index cdbd23b0d..5deb97488 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/factories/FunctionFactoryTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/factories/FunctionFactoryTest.java @@ -1,47 +1,47 @@ -package io.odpf.dagger.functions.udfs.factories; +package com.gotocompany.dagger.functions.udfs.factories; -import io.odpf.dagger.functions.udfs.aggregate.CollectArray; -import io.odpf.dagger.functions.udfs.aggregate.DistinctCount; -import io.odpf.dagger.functions.udfs.aggregate.Features; -import io.odpf.dagger.functions.udfs.aggregate.FeaturesWithType; -import io.odpf.dagger.functions.udfs.aggregate.PercentileAggregator; -import io.odpf.dagger.functions.udfs.scalar.ArrayAggregate; -import io.odpf.dagger.functions.udfs.scalar.ArrayOperate; -import io.odpf.dagger.functions.udfs.scalar.ByteToString; -import io.odpf.dagger.functions.udfs.scalar.CondEq; -import io.odpf.dagger.functions.udfs.scalar.DartContains; -import io.odpf.dagger.functions.udfs.scalar.DartGet; -import io.odpf.dagger.functions.udfs.scalar.Distance; -import io.odpf.dagger.functions.udfs.scalar.ElementAt; -import io.odpf.dagger.functions.udfs.scalar.EndOfMonth; -import io.odpf.dagger.functions.udfs.scalar.EndOfWeek; -import io.odpf.dagger.functions.udfs.scalar.ExponentialMovingAverage; -import io.odpf.dagger.functions.udfs.scalar.Filters; -import io.odpf.dagger.functions.udfs.scalar.FormatTimeInZone; -import io.odpf.dagger.functions.udfs.scalar.GeoHash; -import io.odpf.dagger.functions.udfs.scalar.LinearTrend; -import io.odpf.dagger.functions.udfs.scalar.ListContains; -import io.odpf.dagger.functions.udfs.scalar.MapGet; -import io.odpf.dagger.functions.udfs.scalar.S2AreaInKm2; -import io.odpf.dagger.functions.udfs.scalar.S2Id; -import io.odpf.dagger.functions.udfs.scalar.SelectFields; -import io.odpf.dagger.functions.udfs.scalar.SingleFeatureWithType; -import io.odpf.dagger.functions.udfs.scalar.Split; -import io.odpf.dagger.functions.udfs.scalar.StartOfMonth; -import io.odpf.dagger.functions.udfs.scalar.StartOfWeek; -import io.odpf.dagger.functions.udfs.scalar.TimeInDate; -import io.odpf.dagger.functions.udfs.scalar.TimestampFromUnix; -import io.odpf.dagger.functions.udfs.scalar.JsonQuery; -import io.odpf.dagger.functions.udfs.scalar.JsonUpdate; -import io.odpf.dagger.functions.udfs.scalar.JsonDelete; +import com.gotocompany.dagger.functions.udfs.table.HistogramBucket; +import com.gotocompany.dagger.functions.udfs.table.OutlierMad; +import com.gotocompany.dagger.functions.udfs.aggregate.CollectArray; +import com.gotocompany.dagger.functions.udfs.aggregate.DistinctCount; +import com.gotocompany.dagger.functions.udfs.aggregate.Features; +import com.gotocompany.dagger.functions.udfs.aggregate.FeaturesWithType; +import com.gotocompany.dagger.functions.udfs.aggregate.PercentileAggregator; +import com.gotocompany.dagger.functions.udfs.scalar.ArrayAggregate; +import com.gotocompany.dagger.functions.udfs.scalar.ArrayOperate; +import com.gotocompany.dagger.functions.udfs.scalar.ByteToString; +import com.gotocompany.dagger.functions.udfs.scalar.CondEq; +import com.gotocompany.dagger.functions.udfs.scalar.DartContains; +import com.gotocompany.dagger.functions.udfs.scalar.DartGet; +import com.gotocompany.dagger.functions.udfs.scalar.Distance; +import com.gotocompany.dagger.functions.udfs.scalar.ElementAt; +import com.gotocompany.dagger.functions.udfs.scalar.EndOfMonth; +import com.gotocompany.dagger.functions.udfs.scalar.EndOfWeek; +import com.gotocompany.dagger.functions.udfs.scalar.ExponentialMovingAverage; +import com.gotocompany.dagger.functions.udfs.scalar.Filters; +import com.gotocompany.dagger.functions.udfs.scalar.FormatTimeInZone; +import com.gotocompany.dagger.functions.udfs.scalar.GeoHash; +import com.gotocompany.dagger.functions.udfs.scalar.LinearTrend; +import com.gotocompany.dagger.functions.udfs.scalar.ListContains; +import com.gotocompany.dagger.functions.udfs.scalar.MapGet; +import com.gotocompany.dagger.functions.udfs.scalar.S2AreaInKm2; +import com.gotocompany.dagger.functions.udfs.scalar.S2Id; +import com.gotocompany.dagger.functions.udfs.scalar.SelectFields; +import com.gotocompany.dagger.functions.udfs.scalar.SingleFeatureWithType; +import com.gotocompany.dagger.functions.udfs.scalar.Split; +import com.gotocompany.dagger.functions.udfs.scalar.StartOfMonth; +import com.gotocompany.dagger.functions.udfs.scalar.StartOfWeek; +import com.gotocompany.dagger.functions.udfs.scalar.TimeInDate; +import com.gotocompany.dagger.functions.udfs.scalar.TimestampFromUnix; +import com.gotocompany.dagger.functions.udfs.scalar.JsonQuery; +import com.gotocompany.dagger.functions.udfs.scalar.JsonUpdate; +import com.gotocompany.dagger.functions.udfs.scalar.JsonDelete; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.udfs.AggregateUdf; -import io.odpf.dagger.common.udfs.ScalarUdf; -import io.odpf.dagger.common.udfs.TableUdf; -import io.odpf.dagger.functions.udfs.table.HistogramBucket; -import io.odpf.dagger.functions.udfs.table.OutlierMad; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.udfs.AggregateUdf; +import com.gotocompany.dagger.common.udfs.ScalarUdf; +import com.gotocompany.dagger.common.udfs.TableUdf; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -49,9 +49,9 @@ import java.util.HashSet; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.common.core.Constants.SCHEMA_REGISTRY_STENCIL_ENABLE_KEY; -import static io.odpf.dagger.common.core.Constants.SCHEMA_REGISTRY_STENCIL_URLS_KEY; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.common.core.Constants.SCHEMA_REGISTRY_STENCIL_ENABLE_KEY; +import static com.gotocompany.dagger.common.core.Constants.SCHEMA_REGISTRY_STENCIL_URLS_KEY; import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; @@ -73,7 +73,7 @@ public void setup() { + " \"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\": \"latest\",\n" + " \"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\": \"localhost:9092\",\n" + " \"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\": \"flink-sql-gp0330\",\n" - + " \"INPUT_SCHEMA_PROTO_CLASS\": \"io.odpf.test.TestLogMessage\",\n" + + " \"INPUT_SCHEMA_PROTO_CLASS\": \"com.gotocompany.test.TestLogMessage\",\n" + " \"INPUT_SCHEMA_TABLE\": \"data_stream\",\n" + " \"SOURCE_KAFKA_TOPIC_NAMES\": \"test-log\"\n" + " }\n" diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/PythonUdfConfigTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfConfigTest.java similarity index 82% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/PythonUdfConfigTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfConfigTest.java index 01be1b7af..9f3f447a8 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/PythonUdfConfigTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfConfigTest.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.functions.udfs.python; +package com.gotocompany.dagger.functions.udfs.python; -import io.odpf.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.functions.common.Constants; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; -import static io.odpf.dagger.functions.common.Constants.*; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; @@ -24,7 +24,7 @@ public void setup() { public void shouldParseConfig() { String pythonJsonConfig = "{ \"PYTHON_FILES\": \"/path/to/function.zip\", \"PYTHON_ARCHIVES\": \"/path/to/file.txt\", \"PYTHON_REQUIREMENTS\": \"requirements.txt\", \"PYTHON_FN_EXECUTION_ARROW_BATCH_SIZE\": \"10000\", \"PYTHON_FN_EXECUTION_BUNDLE_SIZE\": \"100000\", \"PYTHON_FN_EXECUTION_BUNDLE_TIME\": \"1000\" }"; - when(configuration.getString(PYTHON_UDF_CONFIG, "")).thenReturn(pythonJsonConfig); + when(configuration.getString(Constants.PYTHON_UDF_CONFIG, "")).thenReturn(pythonJsonConfig); PythonUdfConfig pythonUdfConfig = PythonUdfConfig.parse(configuration); Assert.assertNotNull(pythonUdfConfig); @@ -40,7 +40,7 @@ public void shouldParseConfig() { public void shouldUseDefaultValueIfConfigIsNotGiven() { String pythonJsonConfig = "{ \"PYTHON_FILES\": \"/path/to/function.zip\", \"PYTHON_ARCHIVES\": \"/path/to/file.txt\", \"PYTHON_REQUIREMENTS\": \"requirements.txt\" }"; - when(configuration.getString(PYTHON_UDF_CONFIG, "")).thenReturn(pythonJsonConfig); + when(configuration.getString(Constants.PYTHON_UDF_CONFIG, "")).thenReturn(pythonJsonConfig); PythonUdfConfig pythonUdfConfig = PythonUdfConfig.parse(configuration); Assert.assertEquals(pythonUdfConfig.getPythonArrowBatchSize(), 10000); @@ -52,7 +52,7 @@ public void shouldUseDefaultValueIfConfigIsNotGiven() { public void shouldReturnNullIfPythonFilesConfigIsNotGiven() { String pythonJsonConfig = "{\"PYTHON_FN_EXECUTION_ARROW_BATCH_SIZE\": \"10000\", \"PYTHON_FN_EXECUTION_BUNDLE_SIZE\": \"100000\", \"PYTHON_FN_EXECUTION_BUNDLE_TIME\": \"1000\"}"; - when(configuration.getString(PYTHON_UDF_CONFIG, "")).thenReturn(pythonJsonConfig); + when(configuration.getString(Constants.PYTHON_UDF_CONFIG, "")).thenReturn(pythonJsonConfig); PythonUdfConfig pythonUdfConfig = PythonUdfConfig.parse(configuration); Assert.assertNull(pythonUdfConfig.getPythonFiles()); @@ -64,7 +64,7 @@ public void shouldReturnNullIfPythonFilesConfigIsNotGiven() { public void shouldRemoveWhitespaceInPythonFilesConfig() { String pythonJsonConfig = "{ \"PYTHON_FILES\": \" /path/to/function.zip, /path/to/files/test.py \"}"; - when(configuration.getString(PYTHON_UDF_CONFIG, "")).thenReturn(pythonJsonConfig); + when(configuration.getString(Constants.PYTHON_UDF_CONFIG, "")).thenReturn(pythonJsonConfig); PythonUdfConfig pythonUdfConfig = PythonUdfConfig.parse(configuration); Assert.assertEquals(pythonUdfConfig.getPythonFiles(), "/path/to/function.zip,/path/to/files/test.py"); @@ -74,7 +74,7 @@ public void shouldRemoveWhitespaceInPythonFilesConfig() { public void shouldRemoveWhitespaceInPythonArchivesConfig() { String pythonJsonConfig = "{ \"PYTHON_FILES\": \"/path/to/function.zip\", \"PYTHON_ARCHIVES\": \" /path/to/data.zip, /path/to/files/second_data.zip \"}"; - when(configuration.getString(PYTHON_UDF_CONFIG, "")).thenReturn(pythonJsonConfig); + when(configuration.getString(Constants.PYTHON_UDF_CONFIG, "")).thenReturn(pythonJsonConfig); PythonUdfConfig pythonUdfConfig = PythonUdfConfig.parse(configuration); Assert.assertEquals(pythonUdfConfig.getPythonArchives(), "/path/to/data.zip,/path/to/files/second_data.zip"); diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/PythonUdfManagerTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfManagerTest.java similarity index 91% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/PythonUdfManagerTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfManagerTest.java index d7a210db6..cc6d22ce2 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/PythonUdfManagerTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/PythonUdfManagerTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.python; +package com.gotocompany.dagger.functions.udfs.python; -import io.odpf.dagger.functions.exceptions.PythonFilesFormatException; -import io.odpf.dagger.functions.exceptions.PythonFilesEmptyException; +import com.gotocompany.dagger.functions.exceptions.PythonFilesEmptyException; +import com.gotocompany.dagger.functions.exceptions.PythonFilesFormatException; import org.apache.flink.configuration.Configuration; import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; @@ -37,6 +37,9 @@ public class PythonUdfManagerTest { @Mock private Configuration configuration; + @Mock + private com.gotocompany.dagger.common.configuration.Configuration daggerConfiguration; + @Before public void setup() { initMocks(this); @@ -55,7 +58,7 @@ public void shouldRegisterPythonUdfConfig() throws IOException { when(pythonUdfConfig.getPythonBundleSize()).thenReturn(100000); when(pythonUdfConfig.getPythonBundleTime()).thenReturn(1000L); - PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig); + PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig, daggerConfiguration); pythonUdfManager.registerPythonFunctions(); verify(configuration, times(1)).setString("python.files", pathFile); @@ -74,7 +77,7 @@ public void shouldNotRegisterConfigIfNotSet() throws IOException { when(tableConfig.getConfiguration()).thenReturn(configuration); when(pythonUdfConfig.getPythonFiles()).thenReturn(pathFile); - PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig); + PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig, daggerConfiguration); pythonUdfManager.registerPythonFunctions(); verify(configuration, times(1)).setString("python.files", pathFile); @@ -91,7 +94,7 @@ public void shouldRegisterPythonUdfFromPyFile() throws IOException { when(tableConfig.getConfiguration()).thenReturn(configuration); when(pythonUdfConfig.getPythonFiles()).thenReturn(pathFile); - PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig); + PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig, daggerConfiguration); pythonUdfManager.registerPythonFunctions(); verify(configuration, times(1)).setString("python.files", pathFile); @@ -110,7 +113,7 @@ public void shouldOnlyExecutePyFormatInsideZipFile() throws IOException { when(tableConfig.getConfiguration()).thenReturn(configuration); when(pythonUdfConfig.getPythonFiles()).thenReturn(pathFile); - PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig); + PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig, daggerConfiguration); pythonUdfManager.registerPythonFunctions(); verify(configuration, times(1)).setString("python.files", pathFile); @@ -132,7 +135,7 @@ public void shouldRegisterPythonUdfFromPyAndZipFile() throws IOException { when(tableConfig.getConfiguration()).thenReturn(configuration); when(pythonUdfConfig.getPythonFiles()).thenReturn(zipPathFile + "," + pyPathFile); - PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig); + PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig, daggerConfiguration); pythonUdfManager.registerPythonFunctions(); verify(configuration, times(1)).setString("python.files", zipPathFile + "," + pyPathFile); @@ -153,7 +156,7 @@ public void shouldThrowExceptionIfPythonFilesNotInZipOrPyFormat() throws IOExcep when(tableConfig.getConfiguration()).thenReturn(configuration); when(pythonUdfConfig.getPythonFiles()).thenReturn("test_file.txt"); - PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig); + PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig, daggerConfiguration); pythonUdfManager.registerPythonFunctions(); } @@ -166,7 +169,7 @@ public void shouldThrowExceptionIfPythonFilesIsEmpty() throws IOException { when(tableConfig.getConfiguration()).thenReturn(configuration); when(pythonUdfConfig.getPythonFiles()).thenReturn(""); - PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig); + PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig, daggerConfiguration); pythonUdfManager.registerPythonFunctions(); } @@ -178,7 +181,7 @@ public void shouldThrowExceptionIfPythonFilesIsNull() throws IOException { when(tableEnvironment.getConfig()).thenReturn(tableConfig); when(tableConfig.getConfiguration()).thenReturn(configuration); - PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig); + PythonUdfManager pythonUdfManager = new PythonUdfManager(tableEnvironment, pythonUdfConfig, daggerConfiguration); pythonUdfManager.registerPythonFunctions(); } diff --git a/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/FileSourceFactoryTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/FileSourceFactoryTest.java new file mode 100644 index 000000000..864593b61 --- /dev/null +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/FileSourceFactoryTest.java @@ -0,0 +1,63 @@ +package com.gotocompany.dagger.functions.udfs.python.file.source; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.functions.common.Constants; +import com.gotocompany.dagger.functions.udfs.python.file.source.cos.CosFileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.gcs.GcsFileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.local.LocalFileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.oss.OssFileSource; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +public class FileSourceFactoryTest { + + @Mock + private Configuration configuration; + + @Before + public void setUp() { + initMocks(this); + when(configuration.getString(Constants.OSS_ENDPOINT, Constants.DEFAULT_OSS_ENDPOINT)).thenReturn("oss-ap-southeast-5.aliyuncs.com"); + } + + @Test + public void shouldGetLocalFileSource() { + String pythonFile = "/path/to/file/test_function.py"; + + FileSource fileSource = FileSourceFactory.getFileSource(pythonFile, configuration); + + Assert.assertTrue(fileSource instanceof LocalFileSource); + } + + @Test + public void shouldGetGcsFileSource() { + String pythonFile = "gs://bucket-name/path/to/file/test_function.py"; + + FileSource fileSource = FileSourceFactory.getFileSource(pythonFile, configuration); + + Assert.assertTrue(fileSource instanceof GcsFileSource); + } + + @Test + public void shouldGetOssFileSource() { + String pythonFile = "oss://bucket-name/path/to/file/test_function.py"; + + FileSource fileSource = FileSourceFactory.getFileSource(pythonFile, configuration); + + Assert.assertTrue(fileSource instanceof OssFileSource); + } + + @Test + public void shouldGetCosnFileSource() { + String pythonFile = "cosn://bucket-name/path/to/file/test_function.py"; + + FileSource fileSource = FileSourceFactory.getFileSource(pythonFile, configuration); + + Assert.assertTrue(fileSource instanceof CosFileSource); + } +} diff --git a/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileClientTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileClientTest.java new file mode 100644 index 000000000..a10be7cd0 --- /dev/null +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileClientTest.java @@ -0,0 +1,58 @@ +package com.gotocompany.dagger.functions.udfs.python.file.source.cos; + +import com.gotocompany.dagger.functions.common.CosLibClient; +import com.qcloud.cos.COSClient; +import com.qcloud.cos.model.COSObject; +import com.qcloud.cos.model.COSObjectInputStream; +import org.apache.http.client.methods.HttpRequestBase; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.Arrays; + +import static org.mockito.Mockito.*; +import static org.mockito.MockitoAnnotations.initMocks; + +public class CosFileClientTest { + @Mock + private CosLibClient cosLibClient; + + @Mock + private COSClient cosClient; + + @Mock + private COSObject cosObject; + + @Before + public void setup() { + initMocks(this); + } + + @Test + public void shouldGetObjectFile() throws IOException { + HttpRequestBase mockRequest = Mockito.mock(HttpRequestBase.class); + + String pythonFile = "cosn://bucket_name/path/to/file/python_udf.zip"; + String bucketName = "bucket_name"; + String objectName = "path/to/file/python_udf.zip"; + String expectedValue = Arrays.toString("objectFile".getBytes()); + + CosLibClient.testOnlySetInstance(cosLibClient); + doReturn(cosClient).when(cosLibClient).get(false, "ap-jakarta"); + + when(cosClient.getObject(bucketName, objectName)).thenReturn(cosObject); + when(cosObject.getObjectContent()).thenReturn(new COSObjectInputStream(new ByteArrayInputStream("objectFile".getBytes()), mockRequest)); + + CosFileClient cosFileClient = new CosFileClient(false, "ap-jakarta"); + byte[] actualValue = cosFileClient.getFile(pythonFile); + + verify(this.cosClient, times(1)).getObject(bucketName, objectName); + verify(cosObject, times(1)).getObjectContent(); + Assert.assertEquals(expectedValue, Arrays.toString(actualValue)); + } +} diff --git a/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileSourceTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileSourceTest.java new file mode 100644 index 000000000..8652000c2 --- /dev/null +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/cos/CosFileSourceTest.java @@ -0,0 +1,38 @@ +package com.gotocompany.dagger.functions.udfs.python.file.source.cos; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; + +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +public class CosFileSourceTest { + + @Mock + private CosFileClient cosFileClient; + + @Before + public void setup() { + initMocks(this); + } + + @Test + public void shouldGetObjectFile() throws IOException { + ClassLoader classLoader = getClass().getClassLoader(); + String pythonFile = classLoader.getResource("python_udf.zip").getFile(); + byte[] expectedObject = Files.readAllBytes(Paths.get(pythonFile)); + + when(cosFileClient.getFile(pythonFile)).thenReturn(expectedObject); + CosFileSource cosFileSource = new CosFileSource(pythonFile, cosFileClient, false, "ap-jakarta"); + + byte[] actualObject = cosFileSource.getObjectFile(); + + Assert.assertEquals(expectedObject, actualObject); + } +} diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsClientTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsClientTest.java similarity index 94% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsClientTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsClientTest.java index 5c0c7950e..6ad3ed6f3 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsClientTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsClientTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.python.file.source.gcs; +package com.gotocompany.dagger.functions.udfs.python.file.source.gcs; import com.google.cloud.storage.Blob; import com.google.cloud.storage.BlobId; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsFileSourceTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsFileSourceTest.java similarity index 93% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsFileSourceTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsFileSourceTest.java index d72d6a8f2..3ad76ffbf 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/gcs/GcsFileSourceTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/gcs/GcsFileSourceTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.python.file.source.gcs; +package com.gotocompany.dagger.functions.udfs.python.file.source.gcs; import org.junit.Assert; import org.junit.Before; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/local/LocalFileSourceTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/local/LocalFileSourceTest.java similarity index 92% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/local/LocalFileSourceTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/local/LocalFileSourceTest.java index 4094e186a..bb659381b 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/local/LocalFileSourceTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/local/LocalFileSourceTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.python.file.source.local; +package com.gotocompany.dagger.functions.udfs.python.file.source.local; import org.junit.Assert; import org.junit.Test; diff --git a/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssClientTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssClientTest.java new file mode 100644 index 000000000..f89b4fcc5 --- /dev/null +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssClientTest.java @@ -0,0 +1,48 @@ +package com.gotocompany.dagger.functions.udfs.python.file.source.oss; + +import com.aliyun.oss.OSS; +import com.aliyun.oss.model.OSSObject; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.Arrays; + +import static org.mockito.Mockito.*; +import static org.mockito.MockitoAnnotations.initMocks; + +public class OssClientTest { + + @Mock + private OSS libOSSClient; + + @Mock + private OSSObject ossObject; + + @Before + public void setup() { + initMocks(this); + } + + @Test + public void shouldGetObjectFile() throws IOException { + + String pythonFile = "oss://bucket_name/path/to/file/python_udf.zip"; + String bucketName = "bucket_name"; + String objectName = "path/to/file/python_udf.zip"; + String expectedValue = Arrays.toString("objectFile".getBytes()); + + when(libOSSClient.getObject(bucketName, objectName)).thenReturn(ossObject); + when(ossObject.getObjectContent()).thenReturn(new ByteArrayInputStream("objectFile".getBytes())); + + OssClient ossClient = new OssClient(libOSSClient); + byte[] actualValue = ossClient.getFile(pythonFile); + + verify(libOSSClient, times(1)).getObject(bucketName, objectName); + verify(ossObject, times(1)).getObjectContent(); + Assert.assertEquals(expectedValue, Arrays.toString(actualValue)); + } +} diff --git a/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssFileSourceTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssFileSourceTest.java new file mode 100644 index 000000000..6dc56eb38 --- /dev/null +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/source/oss/OssFileSourceTest.java @@ -0,0 +1,38 @@ +package com.gotocompany.dagger.functions.udfs.python.file.source.oss; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; + +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +public class OssFileSourceTest { + + @Mock + private OssClient ossClient; + + @Before + public void setup() { + initMocks(this); + } + + @Test + public void shouldGetObjectFile() throws IOException { + ClassLoader classLoader = getClass().getClassLoader(); + String pythonFile = classLoader.getResource("python_udf.zip").getFile(); + byte[] expectedObject = Files.readAllBytes(Paths.get(pythonFile)); + + when(ossClient.getFile(pythonFile)).thenReturn(expectedObject); + OssFileSource ossFileSource = new OssFileSource(pythonFile, ossClient, "some-oss-endpoint"); + + byte[] actualObject = ossFileSource.getObjectFile(); + + Assert.assertEquals(expectedObject, actualObject); + } +} diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/type/FileTypeFactoryTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/type/FileTypeFactoryTest.java similarity index 56% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/type/FileTypeFactoryTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/type/FileTypeFactoryTest.java index 67f58cf63..24c2ac90d 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/type/FileTypeFactoryTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/type/FileTypeFactoryTest.java @@ -1,21 +1,37 @@ -package io.odpf.dagger.functions.udfs.python.file.type; +package com.gotocompany.dagger.functions.udfs.python.file.type; -import io.odpf.dagger.functions.exceptions.PythonFilesFormatException; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.functions.common.Constants; +import com.gotocompany.dagger.functions.exceptions.PythonFilesFormatException; import org.junit.Assert; +import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import org.mockito.Mock; + +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; public class FileTypeFactoryTest { @Rule public ExpectedException expectedEx = ExpectedException.none(); + @Mock + private Configuration configuration; + + @Before + public void setUp() { + initMocks(this); + when(configuration.getString(Constants.OSS_ENDPOINT, Constants.DEFAULT_OSS_ENDPOINT)).thenReturn("oss-ap-southeast-5.aliyuncs.com"); + } + @Test public void shouldGetPythonFileType() { String pythonFile = "/path/to/file/test_udf.py"; - FileType fileType = FileTypeFactory.getFileType(pythonFile); + FileType fileType = FileTypeFactory.getFileType(pythonFile, configuration); Assert.assertTrue(fileType instanceof PythonFileType); } @@ -24,7 +40,7 @@ public void shouldGetPythonFileType() { public void shouldGetZipFileType() { String pythonFile = "/path/to/file/python_udf.zip"; - FileType fileType = FileTypeFactory.getFileType(pythonFile); + FileType fileType = FileTypeFactory.getFileType(pythonFile, configuration); Assert.assertTrue(fileType instanceof ZipFileType); } @@ -36,6 +52,6 @@ public void shouldThrowExceptionIfPythonFilesNotInZipOrPyFormat() { String pythonFile = "/path/to/file/test_file.txt"; - FileTypeFactory.getFileType(pythonFile); + FileTypeFactory.getFileType(pythonFile, configuration); } } diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/type/PythonFileTypeTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/type/PythonFileTypeTest.java similarity index 90% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/type/PythonFileTypeTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/type/PythonFileTypeTest.java index 7ff4e9c69..54086e54a 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/type/PythonFileTypeTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/type/PythonFileTypeTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.python.file.type; +package com.gotocompany.dagger.functions.udfs.python.file.type; -import io.odpf.dagger.functions.exceptions.PythonFilesEmptyException; +import com.gotocompany.dagger.functions.exceptions.PythonFilesEmptyException; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/type/ZipFileTypeTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/type/ZipFileTypeTest.java similarity index 90% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/type/ZipFileTypeTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/type/ZipFileTypeTest.java index 99acd37ae..0d1dc1752 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/type/ZipFileTypeTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/python/file/type/ZipFileTypeTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.python.file.type; +package com.gotocompany.dagger.functions.udfs.python.file.type; -import io.odpf.dagger.functions.udfs.python.file.source.gcs.GcsFileSource; -import io.odpf.dagger.functions.udfs.python.file.source.local.LocalFileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.gcs.GcsFileSource; +import com.gotocompany.dagger.functions.udfs.python.file.source.local.LocalFileSource; import org.junit.Assert; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ArrayAggregateTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayAggregateTest.java similarity index 95% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ArrayAggregateTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayAggregateTest.java index 624c81130..544629789 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ArrayAggregateTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayAggregateTest.java @@ -1,5 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; +import com.gotocompany.dagger.functions.exceptions.ArrayAggregationException; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.table.catalog.DataTypeFactory; @@ -9,7 +10,6 @@ import org.apache.flink.table.types.inference.ConstantArgumentCount; import org.apache.flink.table.types.inference.InputTypeStrategy; -import io.odpf.dagger.functions.exceptions.ArrayAggregationException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -113,7 +113,7 @@ public void shouldComputeBasicAggregatesForArray() throws Exception { @Test public void shouldThrowErrorIfFunctionIsUnsupported() throws Exception { thrown.expect(ArrayAggregationException.class); - thrown.expectMessage("io.odpf.dagger.functions.udfs.scalar.longbow.array.processors.ArrayProcessor.initJexl@1:18 unsolvable function/method 'coun'"); + thrown.expectMessage("ArrayProcessor.initJexl@1:18 unsolvable function/method 'coun'"); Object[] objects = new Object[5]; objects[0] = "a"; objects[1] = "a"; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ArrayOperateTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayOperateTest.java similarity index 87% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ArrayOperateTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayOperateTest.java index aec0871f8..857183e7d 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ArrayOperateTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ArrayOperateTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.functions.exceptions.ArrayOperateException; +import com.gotocompany.dagger.functions.exceptions.ArrayOperateException; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.table.catalog.DataTypeFactory; @@ -18,7 +18,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.collection.ArrayMatching.arrayContaining; import static org.hamcrest.collection.ArrayMatching.arrayContainingInAnyOrder; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; @@ -48,7 +48,7 @@ public void setup() { @Test public void shouldComputeBasicOPerationsForArray() throws Exception { - Object[] objects = new Object[] {"a", "a", "b", "v", "a"}; + Object[] objects = new Object[]{"a", "a", "b", "v", "a"}; ArrayOperate arrayOperate = new ArrayOperate(); arrayOperate.open(functionContext); Object[] objectList = arrayOperate.eval(objects, "distinct", "other"); @@ -57,7 +57,7 @@ public void shouldComputeBasicOPerationsForArray() throws Exception { @Test public void shouldComputeBasicArrayOperationsForIntArray() throws Exception { - Object[] objects = new Object[] {1, 2, 1, 2, 1}; + Object[] objects = new Object[]{1, 2, 1, 2, 1}; ArrayOperate arrayOperate = new ArrayOperate(); arrayOperate.open(functionContext); Object[] result = arrayOperate.eval(objects, "distinct.sorted", "int"); @@ -66,7 +66,7 @@ public void shouldComputeBasicArrayOperationsForIntArray() throws Exception { @Test public void shouldComputeBasicArrayOperationsForDoubleArray() throws Exception { - Object[] objects = new Object[] {1.3d, 2.1d, 1.3d, 0.1d, 1.3d}; + Object[] objects = new Object[]{1.3d, 2.1d, 1.3d, 0.1d, 1.3d}; ArrayOperate arrayOperate = new ArrayOperate(); arrayOperate.open(functionContext); Object[] result = arrayOperate.eval(objects, "distinct.sorted", "double"); @@ -76,8 +76,8 @@ public void shouldComputeBasicArrayOperationsForDoubleArray() throws Exception { @Test public void shouldThrowErrorIfFunctionIsUnsupported() throws Exception { thrown.expect(ArrayOperateException.class); - thrown.expectMessage("org.apache.commons.jexl3.JexlException$Method: io.odpf.dagger.functions.udfs.scalar.longbow.array.processors.ArrayProcessor.initJexl@1:18 unsolvable function/method 'sort'"); - Object[] objects = new Object[] {"a", "a", "b", "v", "a"}; + thrown.expectMessage("org.apache.commons.jexl3.JexlException$Method: com.gotocompany.dagger.functions.udfs.scalar.longbow.array.processors.ArrayProcessor.initJexl@1:18 unsolvable function/method 'sort'"); + Object[] objects = new Object[]{"a", "a", "b", "v", "a"}; ArrayOperate arrayOperate = new ArrayOperate(); arrayOperate.open(functionContext); arrayOperate.eval(objects, "distinct.sort", "other"); diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ByteToStringTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ByteToStringTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ByteToStringTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ByteToStringTest.java index 090ce6f99..323774b4f 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ByteToStringTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ByteToStringTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import com.google.protobuf.ByteString; import org.apache.flink.table.api.DataTypes; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/CondEqTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/CondEqTest.java similarity index 88% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/CondEqTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/CondEqTest.java index 77d90dc1d..5d6983241 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/CondEqTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/CondEqTest.java @@ -1,17 +1,18 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.consumer.TestBookingLogMessage; import com.google.protobuf.DynamicMessage; import com.google.protobuf.Timestamp; import com.google.protobuf.Timestamp.Builder; -import io.odpf.dagger.functions.exceptions.LongbowException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.functions.exceptions.LongbowException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.function.Predicate; -import static org.junit.Assert.*; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class CondEqTest { @Rule @@ -60,7 +61,7 @@ public void shouldBeAbleToCompareNestedFieldName() { @Test public void shouldHandleFieldNameEmpty() { - thrown.expectMessage("Key : does not exist in Message io.odpf.dagger.consumer.TestBookingLogMessage"); + thrown.expectMessage("Key : does not exist in Message com.gotocompany.dagger.consumer.TestBookingLogMessage"); thrown.expect(LongbowException.class); TestBookingLogMessage testBookingLog = TestBookingLogMessage.newBuilder().build(); @@ -72,7 +73,7 @@ public void shouldHandleFieldNameEmpty() { @Test public void shouldHandleFieldNameDoesNotExist() { - thrown.expectMessage("Key : arbitrary does not exist in Message io.odpf.dagger.consumer.TestBookingLogMessage"); + thrown.expectMessage("Key : arbitrary does not exist in Message com.gotocompany.dagger.consumer.TestBookingLogMessage"); thrown.expect(LongbowException.class); TestBookingLogMessage testBookingLog = TestBookingLogMessage.newBuilder().build(); diff --git a/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/DartContainsTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/DartContainsTest.java new file mode 100644 index 000000000..d8643b3e8 --- /dev/null +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/DartContainsTest.java @@ -0,0 +1,186 @@ +package com.gotocompany.dagger.functions.udfs.scalar; + +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.functions.exceptions.TagDoesNotExistException; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DartDataStore; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DefaultDartDataStore; +import com.gotocompany.dagger.functions.udfs.scalar.dart.types.SetCache; +import org.apache.flink.metrics.Gauge; +import org.apache.flink.metrics.MetricGroup; +import org.apache.flink.table.functions.FunctionContext; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; + +import static java.util.Collections.singleton; +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; +import static org.mockito.MockitoAnnotations.initMocks; +import static org.mockito.internal.verification.VerificationModeFactory.times; + +public class DartContainsTest { + private DartDataStore dataStore; + + @Mock + private MetricGroup metricGroup; + + @Mock + private FunctionContext functionContext; + + @Mock + private MeterStatsManager meterStatsManager; + + @Mock + private GaugeStatsManager gaugeStatsManager; + + // Subject + private DartContains dartContains; + + @Before + public void setUp() { + initMocks(this); + when(functionContext.getMetricGroup()).thenReturn(metricGroup); + when(metricGroup.addGroup("udf", "DartContains")).thenReturn(metricGroup); + when(metricGroup.addGroup("DartContains")).thenReturn(metricGroup); + this.dataStore = mock(DefaultDartDataStore.class); + + dartContains = new DartContains(dataStore); + + dartContains.setMeterStatsManager(meterStatsManager); + dartContains.setGaugeStatsManager(gaugeStatsManager); + } + + @Test + public void shouldReturnTrueWhenFieldContainsTheValue() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("someField"))); + + assertTrue(dartContains.eval("someList", "someField", 0)); + } + + @Test + public void shouldReturnTrueWhenFieldContainsTheValueFromDifferentPaths() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("someField"))); + when(dataStore.getSet("otherList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("otherField"))); + + assertTrue(dartContains.eval("someList", "someField", 0)); + assertTrue(dartContains.eval("otherList", "otherField", 0)); + } + + @Test + public void shouldReturnFalseWhenFieldDoesNotContainsTheValue() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("someField"))); + + assertFalse(dartContains.eval("someList", "otherField", 0)); + } + + @Test(expected = TagDoesNotExistException.class) + public void shouldThrowErrorWhenFieldIsNotExist() { + when(dataStore.getSet("nonExistingList", meterStatsManager, gaugeStatsManager)).thenThrow(TagDoesNotExistException.class); + + dartContains.eval("nonExistingList", "someField", 0); + } + + @Test + public void shouldNotInvokeDataSourceWhenInvokedAgainWithinRefreshRate() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("someField"))); + + dartContains.eval("someList", "someField", 1); + dartContains.eval("someList", "otherField", 1); + + verify(dataStore, times(1)).getSet("someList", meterStatsManager, gaugeStatsManager); + } + + @Test + public void shouldInvokeDataSourceWhenExceededRefreshRate() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("someField"))); + + dartContains.eval("someList", "someField", -1); + dartContains.eval("someList", "otherField", -1); + + verify(dataStore, times(2)).getSet("someList", meterStatsManager, gaugeStatsManager); + } + + @Test + public void shouldReturnTrueWhenFieldContainsTheValueInMiddleWithARegex() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("prefixsomeField"))); + + assertTrue(dartContains.eval("someList", "a sentence with prefixsomeField and an end", ".*%s.*")); + } + + @Test + public void shouldReturnFalseWhenTagContainsSpaceAndFieldDoesNotWithARegex() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("prefixsomeField "))); + + assertFalse(dartContains.eval("someList", "a sentence with prefixsomeFieldsuffix and an end", ".*%s.*")); + } + + @Test + public void shouldReturnTrueWhenFieldContainsTheValueAtEndWithARegex() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("prefixsomeField"))); + + assertTrue(dartContains.eval("someList", "a sentence that ends with prefixsomeField", ".*%s")); + } + + @Test + public void shouldReturnTrueWhenFieldContainsTheValueAtBeginningWithARegex() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("prefixsomeField"))); + + assertTrue(dartContains.eval("someList", "prefixsomeField is the start of this sentence", "%s.*")); + } + + @Test + public void shouldReturnTrueWhenFieldContainsEntireValueWithARegex() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("prefixsomeField"))); + + assertTrue(dartContains.eval("someList", "prefixsomeField", "%s")); + } + + @Test + public void shouldReturnFalseWhenFieldContainsValueNotInSameCaseWithARegex() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("prefixsomeField"))); + + assertFalse(dartContains.eval("someList", "preFixSomEfield", ".*%s.*")); + } + + @Test + public void shouldReturnFalseWhenFieldDoesNotContainsTheValueWithARegex() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("someField"))); + + assertFalse(dartContains.eval("someList", "other", ".*%s.*")); + } + + @Test(expected = TagDoesNotExistException.class) + public void shouldThrowErrorWhenFieldIsNotExistWithARegex() { + when(dataStore.getSet("nonExistingList", meterStatsManager, gaugeStatsManager)).thenThrow(TagDoesNotExistException.class); + + dartContains.eval("nonExistingList", "someField", ".*%s.*"); + } + + @Test + public void shouldNotInvokeDataSourceWhenInvokedAgainWithinRefreshRateWithARegex() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("someField"))); + + dartContains.eval("someList", "someField", ".*%s.*", 1); + dartContains.eval("someList", "otherField", ".*%s.*", 1); + + verify(dataStore, times(1)).getSet("someList", meterStatsManager, gaugeStatsManager); + } + + @Test + public void shouldInvokeDataSourceWhenExceededRefreshRateWithARegex() { + when(dataStore.getSet("someList", meterStatsManager, gaugeStatsManager)).thenReturn(new SetCache(singleton("someField"))); + + dartContains.eval("someList", "someField", ".*%s.*", -1); + dartContains.eval("someList", "otherField", ".*%s.*", -1); + + verify(dataStore, times(2)).getSet("someList", meterStatsManager, gaugeStatsManager); + } + + @Test + public void shouldRegisterGauge() throws Exception { + dartContains.open(functionContext); + verify(metricGroup, Mockito.times(1)).gauge(any(String.class), any(Gauge.class)); + } +} diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/DartGetTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/DartGetTest.java similarity index 60% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/DartGetTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/DartGetTest.java index ee6072ba5..81b356a66 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/DartGetTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/DartGetTest.java @@ -1,10 +1,11 @@ -package io.odpf.dagger.functions.udfs.scalar; - -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.functions.exceptions.KeyDoesNotExistException; -import io.odpf.dagger.functions.exceptions.TagDoesNotExistException; -import io.odpf.dagger.functions.udfs.scalar.dart.store.gcs.GcsDataStore; -import io.odpf.dagger.functions.udfs.scalar.dart.types.MapCache; +package com.gotocompany.dagger.functions.udfs.scalar; + +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.functions.exceptions.KeyDoesNotExistException; +import com.gotocompany.dagger.functions.exceptions.TagDoesNotExistException; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.DefaultDartDataStore; +import com.gotocompany.dagger.functions.udfs.scalar.dart.types.MapCache; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.table.functions.FunctionContext; @@ -18,7 +19,7 @@ import static org.mockito.MockitoAnnotations.initMocks; public class DartGetTest { - private GcsDataStore dataStore; + private DefaultDartDataStore dataStore; @Mock private MetricGroup metricGroup; @@ -29,23 +30,31 @@ public class DartGetTest { @Mock private MeterStatsManager meterStatsManager; + @Mock + private GaugeStatsManager gaugeStatsManager; + + // Subject + private DartGet dartGet; + @Before public void setUp() { initMocks(this); when(functionContext.getMetricGroup()).thenReturn(metricGroup); when(metricGroup.addGroup("udf", "DartGet")).thenReturn(metricGroup); when(metricGroup.addGroup("DartGet")).thenReturn(metricGroup); - this.dataStore = mock(GcsDataStore.class); - when(dataStore.getMeterStatsManager()).thenReturn(meterStatsManager); + this.dataStore = mock(DefaultDartDataStore.class); + + dartGet = new DartGet(dataStore); + + dartGet.setMeterStatsManager(meterStatsManager); + dartGet.setGaugeStatsManager(gaugeStatsManager); } @Test public void shouldReturnValueWhenMapAndKeyExist() { String key = "some-key"; String value = "expected-value"; - when(dataStore.getMap("someMap")).thenReturn(new MapCache(singletonMap(key, value))); - - DartGet dartGet = new DartGet(dataStore); + when(dataStore.getMap("someMap", meterStatsManager, gaugeStatsManager)).thenReturn(new MapCache(singletonMap(key, value))); assertEquals(value, dartGet.eval("someMap", "some-key", 1)); } @@ -56,10 +65,8 @@ public void shouldReturnDifferentValueWhenMapAndKeyExistForAllOfThem() { String key2 = "other-key"; String value = "expected-value"; String value2 = "other-expected-value"; - when(dataStore.getMap("someMap")).thenReturn(new MapCache(singletonMap(key, value))); - when(dataStore.getMap("otherMap")).thenReturn(new MapCache(singletonMap(key2, value2))); - - DartGet dartGet = new DartGet(dataStore); + when(dataStore.getMap("someMap", meterStatsManager, gaugeStatsManager)).thenReturn(new MapCache(singletonMap(key, value))); + when(dataStore.getMap("otherMap", meterStatsManager, gaugeStatsManager)).thenReturn(new MapCache(singletonMap(key2, value2))); assertEquals(value, dartGet.eval("someMap", "some-key", 1)); assertEquals(value2, dartGet.eval("otherMap", "other-key", 1)); @@ -67,9 +74,7 @@ public void shouldReturnDifferentValueWhenMapAndKeyExistForAllOfThem() { @Test(expected = TagDoesNotExistException.class) public void shouldThrowErrorWhenMapDoesNotExist() { - when(dataStore.getMap("nonExistingMap")).thenThrow(TagDoesNotExistException.class); - - DartGet dartGet = new DartGet(dataStore); + when(dataStore.getMap("nonExistingMap", meterStatsManager, gaugeStatsManager)).thenThrow(TagDoesNotExistException.class); dartGet.eval("nonExistingMap", "some-key", 1); } @@ -77,55 +82,48 @@ public void shouldThrowErrorWhenMapDoesNotExist() { @Test(expected = KeyDoesNotExistException.class) public void shouldThrowErrorWhenKeyDoesNotExistAndDefaultValueNotGiven() { MapCache mapCache = mock(MapCache.class); - when(dataStore.getMap("someMap")).thenReturn(mapCache); + when(dataStore.getMap("someMap", meterStatsManager, gaugeStatsManager)).thenReturn(mapCache); when(mapCache.get("nonExistingKey")).thenThrow(KeyDoesNotExistException.class); - DartGet dartGet = new DartGet(dataStore); - dartGet.eval("someMap", "nonExistingKey", 1); } @Test public void shouldReturnDefaultValueWhenKeyIsNotFoundAndDefaultValueGiven() { MapCache mapCache = mock(MapCache.class); - when(dataStore.getMap("someMap")).thenReturn(mapCache); + when(dataStore.getMap("someMap", meterStatsManager, gaugeStatsManager)).thenReturn(mapCache); when(mapCache.get("nonExistingKey")).thenThrow(KeyDoesNotExistException.class); String defaultValue = "some value"; - DartGet dartGet = new DartGet(dataStore); - assertEquals(defaultValue, dartGet.eval("someMap", "nonExistingKey", 1, defaultValue)); } @Test public void shouldNotInvokeDataSourceWhenNotExceededRefreshRate() { MapCache mapCache = mock(MapCache.class); - when(dataStore.getMap("someMap")).thenReturn(mapCache); + when(dataStore.getMap("someMap", meterStatsManager, gaugeStatsManager)).thenReturn(mapCache); when(mapCache.hasExpired(1)).thenReturn(false); - DartGet dartGet = new DartGet(dataStore); dartGet.eval("someMap", "some-key", 1); dartGet.eval("someMap", "some-key", 1); - verify(dataStore, times(1)).getMap("someMap"); + verify(dataStore, times(1)).getMap("someMap", meterStatsManager, gaugeStatsManager); } @Test public void shouldInvokeDataSourceWhenExceededRefreshRate() { MapCache mapCache = mock(MapCache.class); - when(dataStore.getMap("someMap")).thenReturn(mapCache); + when(dataStore.getMap("someMap", meterStatsManager, gaugeStatsManager)).thenReturn(mapCache); when(mapCache.hasExpired(-1)).thenReturn(true); - DartGet dartGet = new DartGet(dataStore); dartGet.eval("someMap", "some-key", -1); - verify(dataStore, times(1)).getMap("someMap"); + verify(dataStore, times(1)).getMap("someMap", meterStatsManager, gaugeStatsManager); } @Test public void shouldRegisterGauge() throws Exception { - DartGet dartGet = new DartGet(dataStore); dartGet.open(functionContext); verify(metricGroup, times(1)).gauge(any(String.class), any(Gauge.class)); } diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/DistanceTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/DistanceTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/DistanceTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/DistanceTest.java index d19d51588..cc1be6a64 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/DistanceTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/DistanceTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ElementAtTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ElementAtTest.java similarity index 94% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ElementAtTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ElementAtTest.java index 17159eb38..e9ac9acb0 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ElementAtTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ElementAtTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.stencil.client.StencilClient; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.stencil.client.StencilClient; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.table.api.DataTypes; @@ -57,10 +57,10 @@ public class ElementAtTest { public void setup() { initMocks(this); protos = new LinkedHashMap<>(); - protos.put("data_stream_0", "io.odpf.dagger.consumer.TestBookingLogMessage"); - protos.put("data_stream_1", "io.odpf.dagger.consumer.TestCustomerLogMessage"); + protos.put("data_stream_0", "com.gotocompany.dagger.consumer.TestBookingLogMessage"); + protos.put("data_stream_1", "com.gotocompany.dagger.consumer.TestCustomerLogMessage"); when(stencilClientOrchestrator.getStencilClient()).thenReturn(stencilClient); - when(stencilClient.get("io.odpf.dagger.consumer.TestBookingLogMessage")).thenReturn(TestBookingLogMessage.getDescriptor()); + when(stencilClient.get("com.gotocompany.dagger.consumer.TestBookingLogMessage")).thenReturn(TestBookingLogMessage.getDescriptor()); when(functionContext.getMetricGroup()).thenReturn(metricGroup); when(metricGroup.addGroup("udf", "ElementAt")).thenReturn(metricGroup); } @@ -80,8 +80,8 @@ public void shouldReturnElementOfArrayAtGivenIndexAndPath() throws Exception { @Test public void shouldReturnElementOfArrayForGivenTableNameAtGivenIndexAndPath() throws Exception { protos = new LinkedHashMap<>(); - protos.put("data_stream_0", "io.odpf.dagger.consumer.TestCustomerLogMessage"); - protos.put("booking", "io.odpf.dagger.consumer.TestBookingLogMessage"); + protos.put("data_stream_0", "com.gotocompany.dagger.consumer.TestCustomerLogMessage"); + protos.put("booking", "com.gotocompany.dagger.consumer.TestBookingLogMessage"); ElementAt elementAt = new ElementAt(protos, stencilClientOrchestrator); Row routeRow = new Row(3); @@ -96,8 +96,8 @@ public void shouldReturnElementOfArrayForGivenTableNameAtGivenIndexAndPath() thr @Test public void shouldReturnEmptyValueAtGivenIndexAndPathWhenArrayIsNotPresentInFirstStreamAndTableNameIsNotGiven() throws Exception { protos = new LinkedHashMap<>(); - protos.put("data_stream_0", "io.odpf.dagger.consumer.TestCustomerLogMessage"); - protos.put("booking", "io.odpf.dagger.consumer.TestBookingLogMessage"); + protos.put("data_stream_0", "com.gotocompany.dagger.consumer.TestCustomerLogMessage"); + protos.put("booking", "com.gotocompany.dagger.consumer.TestBookingLogMessage"); ElementAt elementAt = new ElementAt(protos, stencilClientOrchestrator); Row routeRow = new Row(3); diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/EndOfMonthTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfMonthTest.java similarity index 96% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/EndOfMonthTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfMonthTest.java index 461fb0e37..860738562 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/EndOfMonthTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfMonthTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/EndOfWeekTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfWeekTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/EndOfWeekTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfWeekTest.java index 542d9d5ae..738bea752 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/EndOfWeekTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/EndOfWeekTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ExponentialMovingAverageTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ExponentialMovingAverageTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ExponentialMovingAverageTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ExponentialMovingAverageTest.java index b056c7763..34317bc33 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ExponentialMovingAverageTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ExponentialMovingAverageTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/FiltersTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/FiltersTest.java similarity index 83% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/FiltersTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/FiltersTest.java index b11475d7f..a1e3729af 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/FiltersTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/FiltersTest.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.stencil.StencilClientFactory; import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.stencil.StencilClientFactory; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -43,7 +43,7 @@ public void shouldReturnEverythingFromInputWhenPredicateIsTrue() ByteString[] inputBytes = new ByteString[]{inputBytes1, inputBytes2}; Filters filter = new Filters(StencilClientFactory.getClient()); - List result = filter.eval(inputBytes, "io.odpf.dagger.consumer.TestBookingLogMessage", alwaysTrue); + List result = filter.eval(inputBytes, "com.gotocompany.dagger.consumer.TestBookingLogMessage", alwaysTrue); assertEquals("order_1", result.get(0).getField(orderNumberFieldDesc)); assertEquals("order_2", result.get(1).getField(orderNumberFieldDesc)); @@ -57,7 +57,7 @@ public void shouldReturnNothingFromInputWhenPredicateIsFalse() ByteString[] inputBytes = new ByteString[]{inputBytes1, inputBytes2}; Filters filter = new Filters(StencilClientFactory.getClient()); - List result = filter.eval(inputBytes, "io.odpf.dagger.consumer.TestBookingLogMessage", alwaysFalse); + List result = filter.eval(inputBytes, "com.gotocompany.dagger.consumer.TestBookingLogMessage", alwaysFalse); assertEquals(0, result.size()); } @@ -72,7 +72,7 @@ public void shouldReturnOnlyWhenPredicateIsTrue() ByteString[] inputBytes = new ByteString[]{inputBytes1, inputBytes2}; Filters filter = new Filters(StencilClientFactory.getClient()); - List result = filter.eval(inputBytes, "io.odpf.dagger.consumer.TestBookingLogMessage", predicate); + List result = filter.eval(inputBytes, "com.gotocompany.dagger.consumer.TestBookingLogMessage", predicate); assertEquals(1, result.size()); assertEquals("order_1", result.get(0).getField(orderNumberFieldDesc)); @@ -87,7 +87,7 @@ public void shouldAcceptTwoPredicates() throws ClassNotFoundException, InvalidPr ByteString[] inputBytes = new ByteString[]{inputBytes1, inputBytes2}; Filters filter = new Filters(StencilClientFactory.getClient()); - List result = filter.eval(inputBytes, "io.odpf.dagger.consumer.TestBookingLogMessage", alwaysTrue, predicate); + List result = filter.eval(inputBytes, "com.gotocompany.dagger.consumer.TestBookingLogMessage", alwaysTrue, predicate); assertEquals(1, result.size()); assertEquals("order_1", result.get(0).getField(orderNumberFieldDesc)); @@ -102,7 +102,7 @@ public void shouldNotTestAgainstNextPredicateWhenFailEarly() throws ClassNotFoun ByteString[] inputBytes = new ByteString[]{inputBytes1, inputBytes2}; Filters filter = new Filters(StencilClientFactory.getClient()); - List result = filter.eval(inputBytes, "io.odpf.dagger.consumer.TestBookingLogMessage", alwaysTrue, predicate, alwaysTrue); + List result = filter.eval(inputBytes, "com.gotocompany.dagger.consumer.TestBookingLogMessage", alwaysTrue, predicate, alwaysTrue); assertEquals(1, result.size()); assertEquals("order_1", result.get(0).getField(orderNumberFieldDesc)); diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/FormatTimeInZoneTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/FormatTimeInZoneTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/FormatTimeInZoneTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/FormatTimeInZoneTest.java index e8b936d38..6d258ea48 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/FormatTimeInZoneTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/FormatTimeInZoneTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/GeoHashTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/GeoHashTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/GeoHashTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/GeoHashTest.java index 836a29a68..f26b7f6c4 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/GeoHashTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/GeoHashTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/JsonDeleteTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/JsonDeleteTest.java similarity index 99% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/JsonDeleteTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/JsonDeleteTest.java index 8528de8b1..42f95a675 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/JsonDeleteTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/JsonDeleteTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/JsonQueryTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/JsonQueryTest.java similarity index 98% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/JsonQueryTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/JsonQueryTest.java index da0f9f757..aec62695f 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/JsonQueryTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/JsonQueryTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/JsonUpdateTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/JsonUpdateTest.java similarity index 99% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/JsonUpdateTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/JsonUpdateTest.java index 38081de37..23cd48fdb 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/JsonUpdateTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/JsonUpdateTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/LinearTrendTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/LinearTrendTest.java similarity index 99% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/LinearTrendTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/LinearTrendTest.java index 7c30fab3e..85924551b 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/LinearTrendTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/LinearTrendTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ListContainsTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ListContainsTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ListContainsTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ListContainsTest.java index a8ce97172..f724946c9 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/ListContainsTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/ListContainsTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/MapGetTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/MapGetTest.java similarity index 98% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/MapGetTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/MapGetTest.java index 823bc1dba..d0a3e3b50 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/MapGetTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/MapGetTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/S2AreaInKm2Test.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/S2AreaInKm2Test.java similarity index 96% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/S2AreaInKm2Test.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/S2AreaInKm2Test.java index 94e30da44..5f7bbbe85 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/S2AreaInKm2Test.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/S2AreaInKm2Test.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/S2IdTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/S2IdTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/S2IdTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/S2IdTest.java index 66bd88572..d88d54067 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/S2IdTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/S2IdTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/SelectFieldsTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/SelectFieldsTest.java similarity index 88% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/SelectFieldsTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/SelectFieldsTest.java index f09acd9e6..91fda83a2 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/SelectFieldsTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/SelectFieldsTest.java @@ -1,20 +1,19 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; +import com.google.protobuf.ByteString; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.Timestamp; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage; +import com.gotocompany.dagger.functions.exceptions.LongbowException; +import com.gotocompany.stencil.StencilClientFactory; +import com.gotocompany.stencil.client.StencilClient; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.table.catalog.DataTypeFactory; import org.apache.flink.table.functions.FunctionContext; import org.apache.flink.table.types.UnresolvedDataType; import org.apache.flink.table.types.inference.CallContext; import org.apache.flink.table.types.inference.TypeStrategy; - -import io.odpf.stencil.StencilClientFactory; -import io.odpf.stencil.client.StencilClient; -import com.google.protobuf.ByteString; -import com.google.protobuf.DynamicMessage; -import com.google.protobuf.Timestamp; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestEnrichedBookingLogMessage; -import io.odpf.dagger.functions.exceptions.LongbowException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -25,9 +24,7 @@ import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; +import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; public class SelectFieldsTest { @@ -63,7 +60,7 @@ public void shouldReturnSelectedValues() throws Exception { byteStrings[0] = testBookingLogByteString; selectFields.open(functionContext); - Object[] selectedFields = selectFields.eval(byteStrings, "io.odpf.dagger.consumer.TestBookingLogMessage", "order_number"); + Object[] selectedFields = selectFields.eval(byteStrings, "com.gotocompany.dagger.consumer.TestBookingLogMessage", "order_number"); assertEquals(selectedFields[0], "test_order_number"); } @@ -82,7 +79,7 @@ public void shouldReturnSelectedValuesInListOfInputBytes() throws Exception { SelectFields selectFields = new SelectFields(stencilClient); selectFields.open(functionContext); - Object[] selectedFields = selectFields.eval(byteStrings, "io.odpf.dagger.consumer.TestBookingLogMessage", "order_number"); + Object[] selectedFields = selectFields.eval(byteStrings, "com.gotocompany.dagger.consumer.TestBookingLogMessage", "order_number"); assertEquals(selectedFields[0], "test_order_number_1"); assertEquals(selectedFields[1], "test_order_number_2"); @@ -92,7 +89,7 @@ public void shouldReturnSelectedValuesInListOfInputBytes() throws Exception { @Test public void shouldThrowErrorWhenFieldNameDoesNotMatch() throws Exception { thrown.expect(LongbowException.class); - thrown.expectMessage("Key : order_number_ does not exist in Message io.odpf.dagger.consumer.TestBookingLogMessage"); + thrown.expectMessage("Key : order_number_ does not exist in Message com.gotocompany.dagger.consumer.TestBookingLogMessage"); String orderNumber = "test_order_number"; ByteString testBookingLogByteString = TestBookingLogMessage.newBuilder().setOrderNumber(orderNumber).build().toByteString(); @@ -102,7 +99,7 @@ public void shouldThrowErrorWhenFieldNameDoesNotMatch() throws Exception { SelectFields selectFields = new SelectFields(stencilClient); selectFields.open(functionContext); - selectFields.eval(byteStrings, "io.odpf.dagger.consumer.TestBookingLogMessage", "order_number_"); + selectFields.eval(byteStrings, "com.gotocompany.dagger.consumer.TestBookingLogMessage", "order_number_"); } @Test @@ -148,7 +145,7 @@ public void shouldReturnSelectedValueForNestedFieldsInCaseOfSelectOnly() throws selectFields.open(functionContext); Object[] selectedFields = selectFields.eval(byteStrings, - "io.odpf.dagger.consumer.TestEnrichedBookingLogMessage", + "com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage", "booking_log.event_timestamp.seconds"); assertEquals(selectedFields[0], 100L); @@ -165,7 +162,7 @@ public void shouldThrowIfNoParentClassFound() throws Exception { SelectFields selectFields = new SelectFields(stencilClient); selectFields.open(functionContext); - selectFields.eval(byteStrings, "io.odpf.dagger.consumer.NotTestBookingLogMessage", "order_number"); + selectFields.eval(byteStrings, "com.gotocompany.dagger.consumer.NotTestBookingLogMessage", "order_number"); } @Test diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/SingleFeatureWithTypeTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/SingleFeatureWithTypeTest.java similarity index 84% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/SingleFeatureWithTypeTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/SingleFeatureWithTypeTest.java index 0b75bb759..6c4fe75c6 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/SingleFeatureWithTypeTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/SingleFeatureWithTypeTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import com.google.protobuf.ByteString; -import io.odpf.dagger.functions.exceptions.InvalidNumberOfArgumentsException; -import io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.functions.udfs.aggregate.feast.handler.ValueEnum; +import com.gotocompany.dagger.functions.exceptions.InvalidNumberOfArgumentsException; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.types.Row; import org.junit.Before; @@ -14,7 +14,6 @@ import java.util.HashMap; import java.util.Map; -import static io.odpf.dagger.functions.udfs.aggregate.feast.handler.ValueEnum.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.MockitoAnnotations.initMocks; @@ -42,14 +41,14 @@ public void shouldAddOneEntry() { public void shouldHandleMultipleDataTypeOfAllKinds() { SingleFeatureWithType singleFeatureWithTypeUdf = new SingleFeatureWithType(); HashMap> data = new HashMap<>(); - data.put("FloatKey", new Tuple2<>(1.0F, FloatType)); - data.put("StringKey", new Tuple2<>("stringValue", StringType)); - data.put("DoubleKey", new Tuple2<>(1.00D, DoubleType)); - data.put("IntegerKey", new Tuple2<>(1, IntegerType)); - data.put("LongKey", new Tuple2<>(1L, LongType)); - data.put("ByteKey", new Tuple2<>(ByteString.copyFrom("value1".getBytes()), ByteType)); - data.put("BoolKey", new Tuple2<>(true, BooleanType)); - data.put("TimestampKey", new Tuple2<>(getTimestampAsRow(123141, 431231), TimestampType)); + data.put("FloatKey", new Tuple2<>(1.0F, ValueEnum.FloatType)); + data.put("StringKey", new Tuple2<>("stringValue", ValueEnum.StringType)); + data.put("DoubleKey", new Tuple2<>(1.00D, ValueEnum.DoubleType)); + data.put("IntegerKey", new Tuple2<>(1, ValueEnum.IntegerType)); + data.put("LongKey", new Tuple2<>(1L, ValueEnum.LongType)); + data.put("ByteKey", new Tuple2<>(ByteString.copyFrom("value1".getBytes()), ValueEnum.ByteType)); + data.put("BoolKey", new Tuple2<>(true, ValueEnum.BooleanType)); + data.put("TimestampKey", new Tuple2<>(getTimestampAsRow(123141, 431231), ValueEnum.TimestampType)); ArrayList arrayData = new ArrayList<>(); for (Map.Entry> entry : data.entrySet()) { diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/SplitTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/SplitTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/SplitTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/SplitTest.java index b55d5a277..d1dce22df 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/SplitTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/SplitTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/StartOfMonthTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfMonthTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/StartOfMonthTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfMonthTest.java index ef805e65a..859f41e55 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/StartOfMonthTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfMonthTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/StartOfWeekTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfWeekTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/StartOfWeekTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfWeekTest.java index 4af598e64..de2843ab2 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/StartOfWeekTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/StartOfWeekTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/TimeInDateTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/TimeInDateTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/TimeInDateTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/TimeInDateTest.java index cbbe66094..8fce1183a 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/TimeInDateTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/TimeInDateTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/TimestampFromUnixTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/TimestampFromUnixTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/TimestampFromUnixTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/TimestampFromUnixTest.java index 0af6cc86c..9c0a8b16f 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/TimestampFromUnixTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/TimestampFromUnixTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar; +package com.gotocompany.dagger.functions.udfs.scalar; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.MetricGroup; diff --git a/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStoreClientProviderTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStoreClientProviderTest.java new file mode 100644 index 000000000..507be94bd --- /dev/null +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DartDataStoreClientProviderTest.java @@ -0,0 +1,77 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart.store; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.functions.common.Constants; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.gcs.GcsDartClient; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.oss.OssDartClient; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +import static org.junit.Assert.assertTrue; + +public class DartDataStoreClientProviderTest { + private DartDataStoreClientProvider dartDataStoreClientProvider; + @Mock + private Configuration configuration; + + @Before + public void setUp() { + initMocks(this); + dartDataStoreClientProvider = null; + when(configuration.getString(Constants.OSS_ENDPOINT, Constants.DEFAULT_OSS_ENDPOINT)).thenReturn("oss-ap-southeast-5.aliyuncs.com"); + } + + @Test + public void shouldReturnGcsDartClientWhenUdfStoreProviderIsGcs() { + String udfStoreProvider = Constants.UDF_STORE_PROVIDER_GCS; + String projectID = "test-project"; + + dartDataStoreClientProvider = new DartDataStoreClientProvider(udfStoreProvider, projectID, configuration); + + DartDataStoreClient client = dartDataStoreClientProvider.getDartDataStoreClient(); + + assertTrue(client instanceof GcsDartClient); + } + + @Test + public void shouldReturnOssDartClientWhenUdfStoreProviderIsOss() { + String udfStoreProvider = Constants.UDF_STORE_PROVIDER_OSS; + + dartDataStoreClientProvider = new DartDataStoreClientProvider(udfStoreProvider, null, configuration); + DartDataStoreClient client = dartDataStoreClientProvider.getDartDataStoreClient(); + + assertTrue(client instanceof OssDartClient); + } + + @Test(expected = IllegalArgumentException.class) + public void shouldThrowIllegalArgumentExceptionForUnknownUdfStoreProvider() { + String udfStoreProvider = "UNKNOWN-PROVIDER"; + + dartDataStoreClientProvider = new DartDataStoreClientProvider(udfStoreProvider, null, configuration); + + try { + dartDataStoreClientProvider.getDartDataStoreClient(); + } catch (IllegalArgumentException e) { + Assert.assertEquals("Unknown UDF Store Provider: UNKNOWN-PROVIDER", e.getMessage()); + throw e; + } + } + + @Test + public void shouldReturnSameClientOnSubsequentCalls() { + String udfStoreProvider = Constants.UDF_STORE_PROVIDER_GCS; + String projectID = "test-project"; + + dartDataStoreClientProvider = new DartDataStoreClientProvider(udfStoreProvider, projectID, configuration); + + DartDataStoreClient firstClient = dartDataStoreClientProvider.getDartDataStoreClient(); + DartDataStoreClient secondClient = dartDataStoreClientProvider.getDartDataStoreClient(); + + Assert.assertEquals(firstClient, secondClient); + } +} diff --git a/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DefaultDartDataStoreTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DefaultDartDataStoreTest.java new file mode 100644 index 000000000..ed13ed881 --- /dev/null +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/DefaultDartDataStoreTest.java @@ -0,0 +1,106 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart.store; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.common.metrics.managers.MeterStatsManager; +import com.gotocompany.dagger.functions.common.Constants; +import com.gotocompany.dagger.functions.exceptions.BucketDoesNotExistException; +import com.gotocompany.dagger.functions.exceptions.TagDoesNotExistException; +import com.gotocompany.dagger.functions.udfs.scalar.dart.store.gcs.GcsDartClient; +import com.gotocompany.dagger.functions.udfs.scalar.dart.types.MapCache; +import com.gotocompany.dagger.functions.udfs.scalar.dart.types.SetCache; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import java.util.*; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class DefaultDartDataStoreTest { + private final String defaultListName = "listName"; + + private final String defaultMapName = "mapName"; + @Rule + public ExpectedException thrown = ExpectedException.none(); + private DefaultDartDataStore defaultDartDataStore; + private List listContent; + private Map mapContent; + private GcsDartClient gcsDartClient; + private MeterStatsManager meterStatsManager; + private GaugeStatsManager gaugeStatsManager; + + @Mock + private Configuration configuration; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + when(configuration.getString(Constants.OSS_ENDPOINT, Constants.DEFAULT_OSS_ENDPOINT)).thenReturn("oss-ap-southeast-5.aliyuncs.com"); + // Subject + DartDataStoreClientProvider dartDataStoreClientProvider = mock(DartDataStoreClientProvider.class); + defaultDartDataStore = new DefaultDartDataStore(dartDataStoreClientProvider, "test-bucket", configuration); + + gcsDartClient = mock(GcsDartClient.class); + meterStatsManager = mock(MeterStatsManager.class); + gaugeStatsManager = mock(GaugeStatsManager.class); + when(dartDataStoreClientProvider.getDartDataStoreClient()).thenReturn(gcsDartClient); + listContent = Arrays.asList("listContent"); + mapContent = Collections.singletonMap("key", "value"); + } + + @Test + public void shouldGetExistingListGivenName() { + String jsonData = " { \"data\" : [ \"listContent\" ] } "; + + when(gcsDartClient.fetchJsonData(any(), any(), any(), anyString())).thenReturn(jsonData); + SetCache setCache = new SetCache(new HashSet<>(listContent)); + Assert.assertEquals(setCache, defaultDartDataStore.getSet(defaultListName, meterStatsManager, gaugeStatsManager)); + } + + @Test + public void shouldThrowTagDoesNotExistWhenListIsNotThere() { + thrown.expect(TagDoesNotExistException.class); + thrown.expectMessage("Could not find the content in gcs for invalidListName"); + + when(gcsDartClient.fetchJsonData(any(), any(), any(), anyString())).thenThrow(new TagDoesNotExistException("Could not find the content in gcs for invalidListName")); + + defaultDartDataStore.getSet("invalidListName", meterStatsManager, gaugeStatsManager); + } + + @Test + public void shouldThrowBucketDoesNotExistWhenBucketIsNotThere() { + thrown.expect(BucketDoesNotExistException.class); + thrown.expectMessage("Could not find the bucket in gcs for invalidListName"); + + when(gcsDartClient.fetchJsonData(any(), any(), any(), anyString())).thenThrow(new BucketDoesNotExistException("Could not find the bucket in gcs for invalidListName")); + + defaultDartDataStore.getSet("invalidListName", meterStatsManager, gaugeStatsManager); + } + + @Test + public void shouldGetExistingMapGivenName() { + String jsonData = " { \"key\" : \"value\" } "; + when(gcsDartClient.fetchJsonData(any(), any(), any(), anyString())).thenReturn(jsonData); + MapCache mapCache = new MapCache(new HashMap<>(mapContent)); + + Assert.assertEquals(mapCache, defaultDartDataStore.getMap(defaultMapName, meterStatsManager, gaugeStatsManager)); + } + + @Test + public void shouldThrowTagDoesNotExistWhenMapIsNotThere() { + thrown.expect(TagDoesNotExistException.class); + thrown.expectMessage("Could not find the content in gcs for invalidMapName"); + + when(gcsDartClient.fetchJsonData(any(), any(), any(), anyString())).thenThrow(new TagDoesNotExistException("Could not find the content in gcs for invalidMapName")); + + defaultDartDataStore.getSet("invalidMapName", meterStatsManager, gaugeStatsManager); + } +} diff --git a/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/cos/CosDartClientTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/cos/CosDartClientTest.java new file mode 100644 index 000000000..0e450347e --- /dev/null +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/cos/CosDartClientTest.java @@ -0,0 +1,60 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart.store.cos; + +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import com.gotocompany.dagger.functions.common.CosLibClient; +import com.qcloud.cos.COSClient; +import com.qcloud.cos.model.COSObject; +import com.qcloud.cos.model.COSObjectInputStream; +import org.apache.http.client.methods.HttpRequestBase; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.io.ByteArrayInputStream; + +import static org.mockito.Mockito.*; +import static org.mockito.MockitoAnnotations.initMocks; + +public class CosDartClientTest { + @Mock + private CosLibClient cosLibClient; + + @Mock + private COSClient cosClient; + + @Mock + private COSObject cosObject; + + @Mock + private GaugeStatsManager gaugeStatsManager; + + @Mock + private HttpRequestBase mockRequest; + + @Before + public void setup() { + initMocks(this); + } + + @Test + public void shouldGetObjectFile() { + String bucketName = "bucket_name"; + String udfName = "DartGet"; + String dartName = "dart-get/path/to/data.json"; + String jsonFileContent = "{\"name\":\"house-stark-dev\"}"; + + CosLibClient.testOnlySetInstance(cosLibClient); + doReturn(cosClient).when(cosLibClient).get(false, "ap-jakarta"); + + when(cosClient.getObject(bucketName, dartName)).thenReturn(cosObject); + when(cosObject.getObjectContent()).thenReturn(new COSObjectInputStream(new ByteArrayInputStream(jsonFileContent.getBytes()), mockRequest)); + + CosDartClient cosDartClient = new CosDartClient(false, "ap-jakarta"); + String jsonData = cosDartClient.fetchJsonData(udfName, gaugeStatsManager, bucketName, dartName); + + verify(cosClient, times(1)).getObject(bucketName, dartName); + verify(cosObject, times(1)).getObjectContent(); + Assert.assertEquals(jsonFileContent, jsonData); + } +} diff --git a/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/oss/OssDartClientTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/oss/OssDartClientTest.java new file mode 100644 index 000000000..d839d6df6 --- /dev/null +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/store/oss/OssDartClientTest.java @@ -0,0 +1,48 @@ +package com.gotocompany.dagger.functions.udfs.scalar.dart.store.oss; + +import com.aliyun.oss.OSS; +import com.aliyun.oss.model.OSSObject; +import com.gotocompany.dagger.common.metrics.managers.GaugeStatsManager; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.io.ByteArrayInputStream; + +import static org.mockito.Mockito.*; +import static org.mockito.MockitoAnnotations.initMocks; + +public class OssDartClientTest { + @Mock + private OSS libOSSClient; + + @Mock + private OSSObject ossObject; + + @Mock + private GaugeStatsManager gaugeStatsManager; + + @Before + public void setup() { + initMocks(this); + } + + @Test + public void shouldGetObjectFile() { + String bucketName = "bucket_name"; + String udfName = "DartGet"; + String dartName = "dart-get/path/to/data.json"; + String jsonFileContent = "{\"name\":\"house-stark-dev\"}"; + + when(libOSSClient.getObject(bucketName, dartName)).thenReturn(ossObject); + when(ossObject.getObjectContent()).thenReturn(new ByteArrayInputStream(jsonFileContent.getBytes())); + + OssDartClient ossDartClient = new OssDartClient(libOSSClient); + String jsonData = ossDartClient.fetchJsonData(udfName, gaugeStatsManager, bucketName, dartName); + + verify(libOSSClient, times(1)).getObject(bucketName, dartName); + verify(ossObject, times(1)).getObjectContent(); + Assert.assertEquals(jsonFileContent, jsonData); + } +} diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/types/CacheTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/CacheTest.java similarity index 90% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/types/CacheTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/CacheTest.java index 6b66f3cc3..dd30c51d9 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/types/CacheTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/CacheTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.dart.types; +package com.gotocompany.dagger.functions.udfs.scalar.dart.types; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/types/MapCacheTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/MapCacheTest.java similarity index 91% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/types/MapCacheTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/MapCacheTest.java index 4b59fde4f..fa3210589 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/types/MapCacheTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/MapCacheTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.functions.udfs.scalar.dart.types; +package com.gotocompany.dagger.functions.udfs.scalar.dart.types; -import io.odpf.dagger.functions.exceptions.KeyDoesNotExistException; +import com.gotocompany.dagger.functions.exceptions.KeyDoesNotExistException; import org.apache.commons.collections.map.SingletonMap; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/types/SetCacheTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/SetCacheTest.java similarity index 95% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/types/SetCacheTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/SetCacheTest.java index a1bab25e2..b69eb72ac 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/types/SetCacheTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/dart/types/SetCacheTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.dart.types; +package com.gotocompany.dagger.functions.udfs.scalar.dart.types; import org.junit.Before; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/MessageReaderTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/MessageReaderTest.java similarity index 82% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/MessageReaderTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/MessageReaderTest.java index 8610cc495..ffc67e9f5 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/MessageReaderTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/MessageReaderTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar.elementAt; +package com.gotocompany.dagger.functions.udfs.scalar.elementAt; -import io.odpf.stencil.StencilClientFactory; -import io.odpf.stencil.client.StencilClient; +import com.gotocompany.stencil.StencilClientFactory; +import com.gotocompany.stencil.client.StencilClient; import org.apache.flink.types.Row; import org.junit.Test; @@ -17,7 +17,7 @@ public void shouldBeAbleToReadPathOfOneDepth() throws ClassNotFoundException { Row routeRow = new Row(3); routeRow.setField(2, 21.5F); - MessageReader messageReader = new MessageReader(routeRow, "io.odpf.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); + MessageReader messageReader = new MessageReader(routeRow, "com.gotocompany.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); assertEquals(21.5F, messageReader.read("distance_in_kms")); } @@ -30,7 +30,7 @@ public void shouldBeAbleToReadPathOfTwoDepth() throws ClassNotFoundException { locationRow.setField(3, CENTRAL_MONUMENT_JAKARTA_LONGITUDE); routeRow.setField(0, locationRow); - MessageReader messageReader = new MessageReader(routeRow, "io.odpf.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); + MessageReader messageReader = new MessageReader(routeRow, "com.gotocompany.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); assertEquals(CENTRAL_MONUMENT_JAKARTA_LATITUDE, messageReader.read("start.latitude")); assertEquals(CENTRAL_MONUMENT_JAKARTA_LONGITUDE, messageReader.read("start.longitude")); @@ -41,7 +41,7 @@ public void shouldHandleWhenNoValidProtobufForMessageRoute() throws ClassNotFoun Row routeRow = new Row(3); routeRow.setField(2, 21.5F); - MessageReader messageReader = new MessageReader(routeRow, "io.odpf.dagger.consumer.TestBookingLogMessage", "invalid", STENCIL_CLIENT); + MessageReader messageReader = new MessageReader(routeRow, "com.gotocompany.dagger.consumer.TestBookingLogMessage", "invalid", STENCIL_CLIENT); assertEquals("", messageReader.read("distance_in_kms")); } @@ -60,7 +60,7 @@ public void shouldHandleInvalidParentPathOfOneDepth() throws ClassNotFoundExcept Row routeRow = new Row(3); routeRow.setField(2, 21.5F); - MessageReader messageReader = new MessageReader(routeRow, "io.odpf.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); + MessageReader messageReader = new MessageReader(routeRow, "com.gotocompany.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); assertEquals("", messageReader.read("invalid")); } @@ -72,7 +72,7 @@ public void shouldHandleInvalidPathAtParentWhenReadingNestedField() throws Class locationRow.setField(2, CENTRAL_MONUMENT_JAKARTA_LATITUDE); routeRow.setField(0, locationRow); - MessageReader messageReader = new MessageReader(routeRow, "io.odpf.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); + MessageReader messageReader = new MessageReader(routeRow, "com.gotocompany.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); assertEquals("", messageReader.read("invalid.start")); } @@ -84,7 +84,7 @@ public void shouldHandleInvalidNestedPath() throws ClassNotFoundException { locationRow.setField(2, CENTRAL_MONUMENT_JAKARTA_LATITUDE); routeRow.setField(0, locationRow); - MessageReader messageReader = new MessageReader(routeRow, "io.odpf.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); + MessageReader messageReader = new MessageReader(routeRow, "com.gotocompany.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); assertEquals("", messageReader.read("start.invalid")); } @@ -96,7 +96,7 @@ public void shouldBeAbleToReadObject() throws ClassNotFoundException { locationRow.setField(2, CENTRAL_MONUMENT_JAKARTA_LATITUDE); routeRow.setField(0, locationRow); - MessageReader messageReader = new MessageReader(routeRow, "io.odpf.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); + MessageReader messageReader = new MessageReader(routeRow, "com.gotocompany.dagger.consumer.TestBookingLogMessage", "routes", STENCIL_CLIENT); assertEquals(locationRow, messageReader.read("start")); } diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptorTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptorTest.java similarity index 79% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptorTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptorTest.java index 5e22a018f..60e91c2c9 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptorTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/descriptor/CustomDescriptorTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.elementAt.descriptor; +package com.gotocompany.dagger.functions.udfs.scalar.elementAt.descriptor; import com.google.protobuf.Descriptors; import org.junit.Test; @@ -11,7 +11,7 @@ public class CustomDescriptorTest { @Test public void shouldGetTheFieldDescriptorForMessage() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Descriptors.Descriptor descriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor descriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); CustomDescriptor customDescriptor = new CustomDescriptor(descriptor); Descriptors.FieldDescriptor fieldDescriptor = descriptor.getFields().get(43); @@ -21,7 +21,7 @@ public void shouldGetTheFieldDescriptorForMessage() throws ClassNotFoundExceptio @Test public void shouldGetTheFieldDescriptorIfNotMessage() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Descriptors.Descriptor descriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor descriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); CustomDescriptor customDescriptor = new CustomDescriptor(descriptor); Descriptors.FieldDescriptor fieldDescriptor = descriptor.getFields().get(0); @@ -31,7 +31,7 @@ public void shouldGetTheFieldDescriptorIfNotMessage() throws ClassNotFoundExcept @Test public void shouldHandleForFieldDescriptorIfNotValidPath() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Descriptors.Descriptor descriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor descriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); CustomDescriptor customDescriptor = new CustomDescriptor(descriptor); assertEquals(Optional.empty(), customDescriptor.getFieldDescriptor("invalid")); @@ -39,7 +39,7 @@ public void shouldHandleForFieldDescriptorIfNotValidPath() throws ClassNotFoundE @Test public void shouldGetTheDescriptorForMessage() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Descriptors.Descriptor descriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor descriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); CustomDescriptor customDescriptor = new CustomDescriptor(descriptor); Descriptors.FieldDescriptor fieldDescriptor = descriptor.getFields().get(43); @@ -49,7 +49,7 @@ public void shouldGetTheDescriptorForMessage() throws ClassNotFoundException, No @Test public void shouldGetTheEmptyDescriptorIfNotAMessage() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Descriptors.Descriptor descriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor descriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); CustomDescriptor customDescriptor = new CustomDescriptor(descriptor); assertEquals(Optional.empty(), customDescriptor.getDescriptor("service_type")); @@ -57,7 +57,7 @@ public void shouldGetTheEmptyDescriptorIfNotAMessage() throws ClassNotFoundExcep @Test public void shouldHandleForDescriptorIfNotValidPath() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Descriptors.Descriptor descriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor descriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); CustomDescriptor customDescriptor = new CustomDescriptor(descriptor); assertEquals(Optional.empty(), customDescriptor.getDescriptor("invalid")); @@ -65,7 +65,7 @@ public void shouldHandleForDescriptorIfNotValidPath() throws ClassNotFoundExcept @Test public void shouldGetTheCustomDescriptorForMessage() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Descriptors.Descriptor descriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor descriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); CustomDescriptor customDescriptor = new CustomDescriptor(descriptor); Descriptors.FieldDescriptor fieldDescriptor = descriptor.getFields().get(43); @@ -75,7 +75,7 @@ public void shouldGetTheCustomDescriptorForMessage() throws ClassNotFoundExcepti @Test public void shouldGetTheEmptyCustomDescriptorIfNotAMessage() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Descriptors.Descriptor descriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor descriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); CustomDescriptor customDescriptor = new CustomDescriptor(descriptor); assertEquals(Optional.empty(), customDescriptor.get("service_type")); @@ -83,7 +83,7 @@ public void shouldGetTheEmptyCustomDescriptorIfNotAMessage() throws ClassNotFoun @Test public void shouldHandleForCustomDescriptorIfNotValidPath() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Descriptors.Descriptor descriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor descriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); CustomDescriptor customDescriptor = new CustomDescriptor(descriptor); assertEquals(Optional.empty(), customDescriptor.get("invalid")); diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/ElementTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/ElementTest.java similarity index 75% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/ElementTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/ElementTest.java index 81168c87b..abf3b8d69 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/ElementTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/ElementTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar.elementAt.row; +package com.gotocompany.dagger.functions.udfs.scalar.elementAt.row; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; +import com.gotocompany.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; import org.junit.Test; import java.lang.reflect.InvocationTargetException; @@ -12,21 +12,21 @@ public class ElementTest { @Test public void shouldInitializeValueElementWhenNotMessageType() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Optional element = Element.initialize(null, null, new CustomDescriptor(getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage")), "service_type"); + Optional element = Element.initialize(null, null, new CustomDescriptor(getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage")), "service_type"); assertEquals(ValueElement.class, element.get().getClass()); } @Test public void shouldInitializeRowElementWhenNotMessageType() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Optional element = Element.initialize(null, null, new CustomDescriptor(getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage")), "routes"); + Optional element = Element.initialize(null, null, new CustomDescriptor(getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage")), "routes"); assertEquals(RowElement.class, element.get().getClass()); } @Test public void shouldHandleInvalidType() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - Optional element = Element.initialize(null, null, new CustomDescriptor(getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage")), "invalid"); + Optional element = Element.initialize(null, null, new CustomDescriptor(getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage")), "invalid"); assertEquals(Optional.empty(), element); } diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/RowElementTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/RowElementTest.java similarity index 84% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/RowElementTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/RowElementTest.java index 34f37c63e..93a072e47 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/RowElementTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/RowElementTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar.elementAt.row; +package com.gotocompany.dagger.functions.udfs.scalar.elementAt.row; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; +import com.gotocompany.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; import org.apache.flink.types.Row; import org.junit.Test; import org.mockito.Mockito; @@ -14,7 +14,7 @@ public class RowElementTest { @Test public void shouldCreateNextAsRowElementForMessageType() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - RowElement element = (RowElement) Element.initialize(null, null, new CustomDescriptor(getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage")), "routes").get(); + RowElement element = (RowElement) Element.initialize(null, null, new CustomDescriptor(getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage")), "routes").get(); Optional next = element.createNext("start"); @@ -23,7 +23,7 @@ public void shouldCreateNextAsRowElementForMessageType() throws ClassNotFoundExc @Test public void shouldCreateNextAsValueElementForNonMessageType() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - RowElement element = (RowElement) Element.initialize(null, null, new CustomDescriptor(getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage")), "routes").get(); + RowElement element = (RowElement) Element.initialize(null, null, new CustomDescriptor(getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage")), "routes").get(); Optional next = element.createNext("distance_in_kms"); @@ -32,7 +32,7 @@ public void shouldCreateNextAsValueElementForNonMessageType() throws ClassNotFou @Test public void shouldHandleForInvalidPath() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - RowElement element = (RowElement) Element.initialize(null, null, new CustomDescriptor(getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage")), "routes").get(); + RowElement element = (RowElement) Element.initialize(null, null, new CustomDescriptor(getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage")), "routes").get(); Optional next = element.createNext("path"); @@ -45,7 +45,7 @@ public void shouldInvokeParentOfElementMockWhenFetching() throws ClassNotFoundEx Row routeRow = new Row(3); routeRow.setField(2, 21.5F); Mockito.when(rowElementMock.fetch()).thenReturn(routeRow); - Descriptors.Descriptor bookingDescriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor bookingDescriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); Descriptors.Descriptor routeDescriptor = bookingDescriptor.getFields().get(43).getMessageType(); Element childElement = Element.initialize(rowElementMock, null, new CustomDescriptor(routeDescriptor), "start").get(); @@ -60,7 +60,7 @@ public void shouldInvokeParentOfValueMockWhenFetching() throws ClassNotFoundExce Row routeRow = new Row(3); routeRow.setField(2, 21.5F); Mockito.when(rowElementMock.fetch()).thenReturn(routeRow); - Descriptors.Descriptor bookingDescriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor bookingDescriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); Descriptors.Descriptor routeDescriptor = bookingDescriptor.getFields().get(43).getMessageType(); Element childElement = Element.initialize(rowElementMock, null, new CustomDescriptor(routeDescriptor), "distance_in_kms").get(); @@ -73,7 +73,7 @@ public void shouldInvokeParentOfValueMockWhenFetching() throws ClassNotFoundExce public void shouldNotInvokeParentOfElementMockWhenFetchingForRoot() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { Row routeRow = new Row(3); routeRow.setField(2, 21.5F); - Descriptors.Descriptor bookingDescriptor = getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage"); + Descriptors.Descriptor bookingDescriptor = getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage"); Descriptors.Descriptor routeDescriptor = bookingDescriptor.getFields().get(43).getMessageType(); Element rootElement = Element.initialize(null, routeRow, new CustomDescriptor(routeDescriptor), "distance_in_kms").get(); diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/ValueElementTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/ValueElementTest.java similarity index 80% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/ValueElementTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/ValueElementTest.java index c75bb6b90..446104e87 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/elementAt/row/ValueElementTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/elementAt/row/ValueElementTest.java @@ -1,7 +1,7 @@ -package io.odpf.dagger.functions.udfs.scalar.elementAt.row; +package com.gotocompany.dagger.functions.udfs.scalar.elementAt.row; import com.google.protobuf.Descriptors; -import io.odpf.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; +import com.gotocompany.dagger.functions.udfs.scalar.elementAt.descriptor.CustomDescriptor; import org.junit.Test; import java.lang.reflect.InvocationTargetException; @@ -19,7 +19,7 @@ public void shouldCreateEmptyNextElementForInvalidValueObject() { @Test public void shouldCreateNextAsEmptyForValidValueObject() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { - ValueElement element = (ValueElement) Element.initialize(null, null, new CustomDescriptor(getDescriptor("io.odpf.dagger.consumer.TestBookingLogMessage")), "service_type").get(); + ValueElement element = (ValueElement) Element.initialize(null, null, new CustomDescriptor(getDescriptor("com.gotocompany.dagger.consumer.TestBookingLogMessage")), "service_type").get(); Optional next = element.createNext("valid_path_not_possible"); diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/MessageParserTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/MessageParserTest.java similarity index 92% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/MessageParserTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/MessageParserTest.java index 969d7f0b8..bbe91c85b 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/MessageParserTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/MessageParserTest.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow; +package com.gotocompany.dagger.functions.udfs.scalar.longbow; -import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.consumer.TestEnrichedBookingLogMessage; -import io.odpf.dagger.consumer.TestApiLogMessage; -import io.odpf.dagger.consumer.TestBookingStatus; import com.google.protobuf.DynamicMessage; import com.google.protobuf.Timestamp; -import io.odpf.dagger.functions.exceptions.LongbowException; +import com.gotocompany.dagger.consumer.TestApiLogMessage; +import com.gotocompany.dagger.consumer.TestBookingLogMessage; +import com.gotocompany.dagger.consumer.TestBookingStatus; +import com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage; +import com.gotocompany.dagger.functions.exceptions.LongbowException; import org.apache.flink.types.Row; import org.junit.Rule; import org.junit.Test; @@ -59,7 +59,7 @@ public void shouldReadFromNestedField() { @Test public void shouldThrowIfUnableToFindKey() { - thrown.expectMessage("Key : event_timestamp1 does not exist in Message io.odpf.dagger.consumer.TestBookingLogMessage"); + thrown.expectMessage("Key : event_timestamp1 does not exist in Message com.gotocompany.dagger.consumer.TestBookingLogMessage"); thrown.expect(LongbowException.class); long timeStampInSeconds = 100; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpressionTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpressionTest.java similarity index 92% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpressionTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpressionTest.java index 24e9767ea..9ec58feda 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpressionTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/AggregationExpressionTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array.expression; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression; import org.junit.Assert; import org.junit.Before; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpressionTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpressionTest.java similarity index 92% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpressionTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpressionTest.java index a2d8188fd..d12a920cc 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpressionTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/expression/OperationExpressionTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array.expression; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression; import org.junit.Assert; import org.junit.Before; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessorTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessorTest.java similarity index 89% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessorTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessorTest.java index 26df79244..402d64e00 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessorTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayAggregateProcessorTest.java @@ -1,8 +1,8 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array.processors; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array.processors; -import io.odpf.dagger.functions.exceptions.ArrayAggregationException; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.LongbowArrayType; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.expression.AggregationExpression; +import com.gotocompany.dagger.functions.exceptions.ArrayAggregationException; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.LongbowArrayType; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression.AggregationExpression; import org.apache.commons.jexl3.JexlContext; import org.apache.commons.jexl3.JexlEngine; import org.apache.commons.jexl3.JexlScript; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessorTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessorTest.java similarity index 86% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessorTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessorTest.java index a85a6b273..7eecbf6d8 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessorTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/scalar/longbow/array/processors/ArrayOperateProcessorTest.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.functions.udfs.scalar.longbow.array.processors; +package com.gotocompany.dagger.functions.udfs.scalar.longbow.array.processors; -import io.odpf.dagger.functions.exceptions.ArrayOperateException; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.LongbowArrayType; -import io.odpf.dagger.functions.udfs.scalar.longbow.array.expression.OperationExpression; +import com.gotocompany.dagger.functions.exceptions.ArrayOperateException; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.LongbowArrayType; +import com.gotocompany.dagger.functions.udfs.scalar.longbow.array.expression.OperationExpression; import org.apache.commons.jexl3.JexlContext; import org.apache.commons.jexl3.JexlEngine; import org.apache.commons.jexl3.JexlScript; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/table/HistogramBucketTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/table/HistogramBucketTest.java similarity index 97% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/table/HistogramBucketTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/table/HistogramBucketTest.java index 8a7fbba08..ea5bb36ab 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/table/HistogramBucketTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/table/HistogramBucketTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.table; +package com.gotocompany.dagger.functions.udfs.table; import org.apache.flink.api.java.tuple.Tuple1; import org.apache.flink.metrics.Gauge; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/table/OutlierMadTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/table/OutlierMadTest.java similarity index 99% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/table/OutlierMadTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/table/OutlierMadTest.java index f0ada7b34..e2abe71e4 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/table/OutlierMadTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/table/OutlierMadTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.table; +package com.gotocompany.dagger.functions.udfs.table; import org.apache.flink.api.java.tuple.Tuple5; import org.apache.flink.metrics.Gauge; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/table/outlier/mad/MadTest.java b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/table/outlier/mad/MadTest.java similarity index 99% rename from dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/table/outlier/mad/MadTest.java rename to dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/table/outlier/mad/MadTest.java index 417d3d70c..3c3e84852 100644 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/table/outlier/mad/MadTest.java +++ b/dagger-functions/src/test/java/com/gotocompany/dagger/functions/udfs/table/outlier/mad/MadTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.functions.udfs.table.outlier.mad; +package com.gotocompany.dagger.functions.udfs.table.outlier.mad; import org.junit.Test; diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/FileSourceFactoryTest.java b/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/FileSourceFactoryTest.java deleted file mode 100644 index 2cd5a472f..000000000 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/python/file/source/FileSourceFactoryTest.java +++ /dev/null @@ -1,27 +0,0 @@ -package io.odpf.dagger.functions.udfs.python.file.source; - -import io.odpf.dagger.functions.udfs.python.file.source.gcs.GcsFileSource; -import io.odpf.dagger.functions.udfs.python.file.source.local.LocalFileSource; -import org.junit.Assert; -import org.junit.Test; - -public class FileSourceFactoryTest { - - @Test - public void shouldGetLocalFileSource() { - String pythonFile = "/path/to/file/test_function.py"; - - FileSource fileSource = FileSourceFactory.getFileSource(pythonFile); - - Assert.assertTrue(fileSource instanceof LocalFileSource); - } - - @Test - public void shouldGetGcsFileSource() { - String pythonFile = "gs://bucket-name/path/to/file/test_function.py"; - - FileSource fileSource = FileSourceFactory.getFileSource(pythonFile); - - Assert.assertTrue(fileSource instanceof GcsFileSource); - } -} diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/DartContainsTest.java b/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/DartContainsTest.java deleted file mode 100644 index 3ea29af89..000000000 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/DartContainsTest.java +++ /dev/null @@ -1,204 +0,0 @@ -package io.odpf.dagger.functions.udfs.scalar; - - -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.functions.exceptions.TagDoesNotExistException; -import io.odpf.dagger.functions.udfs.scalar.dart.store.gcs.GcsDataStore; -import io.odpf.dagger.functions.udfs.scalar.dart.types.SetCache; -import org.apache.flink.metrics.Gauge; -import org.apache.flink.metrics.MetricGroup; -import org.apache.flink.table.functions.FunctionContext; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.Mockito; - -import static java.util.Collections.singleton; -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.*; -import static org.mockito.MockitoAnnotations.initMocks; -import static org.mockito.internal.verification.VerificationModeFactory.times; - -public class DartContainsTest { - private GcsDataStore dataStore; - - @Mock - private MetricGroup metricGroup; - - @Mock - private FunctionContext functionContext; - - @Mock - private MeterStatsManager meterStatsManager; - - @Before - public void setUp() { - initMocks(this); - when(functionContext.getMetricGroup()).thenReturn(metricGroup); - when(metricGroup.addGroup("udf", "DartContains")).thenReturn(metricGroup); - when(metricGroup.addGroup("DartContains")).thenReturn(metricGroup); - this.dataStore = mock(GcsDataStore.class); - when(dataStore.getMeterStatsManager()).thenReturn(meterStatsManager); - } - - @Test - public void shouldReturnTrueWhenFieldContainsTheValue() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("someField"))); - - DartContains dartContains = new DartContains(dataStore); - - assertEquals(true, dartContains.eval("someList", "someField", 0)); - } - - @Test - public void shouldReturnTrueWhenFieldContainsTheValueFromDifferentPaths() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("someField"))); - when(dataStore.getSet("otherList")).thenReturn(new SetCache(singleton("otherField"))); - - DartContains dartContains = new DartContains(dataStore); - - assertEquals(true, dartContains.eval("someList", "someField", 0)); - assertEquals(true, dartContains.eval("otherList", "otherField", 0)); - } - - @Test - public void shouldReturnFalseWhenFieldDoesNotContainsTheValue() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("someField"))); - - DartContains dartContains = new DartContains(dataStore); - - assertEquals(false, dartContains.eval("someList", "otherField", 0)); - } - - @Test(expected = TagDoesNotExistException.class) - public void shouldThrowErrorWhenFieldIsNotExist() { - when(dataStore.getSet("nonExistingList")).thenThrow(TagDoesNotExistException.class); - - DartContains dartContains = new DartContains(dataStore); - - dartContains.eval("nonExistingList", "someField", 0); - } - - @Test - public void shouldNotInvokeDataSourceWhenInvokedAgainWithinRefreshRate() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("someField"))); - - DartContains dartContains = new DartContains(dataStore); - dartContains.eval("someList", "someField", 1); - dartContains.eval("someList", "otherField", 1); - - verify(dataStore, times(1)).getSet("someList"); - } - - @Test - public void shouldInvokeDataSourceWhenExceededRefreshRate() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("someField"))); - - DartContains dartContains = new DartContains(dataStore); - dartContains.eval("someList", "someField", -1); - dartContains.eval("someList", "otherField", -1); - - verify(dataStore, times(2)).getSet("someList"); - } - - @Test - public void shouldReturnTrueWhenFieldContainsTheValueInMiddleWithARegex() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("prefixsomeField"))); - - DartContains dartContains = new DartContains(dataStore); - - assertEquals(true, dartContains.eval("someList", "a sentence with prefixsomeField and an end", ".*%s.*")); - } - - @Test - public void shouldReturnFalseWhenTagContainsSpaceAndFieldDoesNotWithARegex() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("prefixsomeField "))); - - DartContains dartContains = new DartContains(dataStore); - - assertEquals(false, dartContains.eval("someList", "a sentence with prefixsomeFieldsuffix and an end", ".*%s.*")); - } - - @Test - public void shouldReturnTrueWhenFieldContainsTheValueAtEndWithARegex() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("prefixsomeField"))); - - DartContains dartContains = new DartContains(dataStore); - - assertEquals(true, dartContains.eval("someList", "a sentence that ends with prefixsomeField", ".*%s")); - } - - @Test - public void shouldReturnTrueWhenFieldContainsTheValueAtBeginningWithARegex() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("prefixsomeField"))); - - DartContains dartContains = new DartContains(dataStore); - - assertEquals(true, dartContains.eval("someList", "prefixsomeField is the start of this sentence", "%s.*")); - } - - @Test - public void shouldReturnTrueWhenFieldContainsEntireValueWithARegex() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("prefixsomeField"))); - - DartContains dartContains = new DartContains(dataStore); - - assertEquals(true, dartContains.eval("someList", "prefixsomeField", "%s")); - } - - @Test - public void shouldReturnFalseWhenFieldContainsValueNotInSameCaseWithARegex() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("prefixsomeField"))); - - DartContains dartContains = new DartContains(dataStore); - - assertEquals(false, dartContains.eval("someList", "preFixSomEfield", ".*%s.*")); - } - - @Test - public void shouldReturnFalseWhenFieldDoesNotContainsTheValueWithARegex() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("someField"))); - - DartContains dartContains = new DartContains(dataStore); - - assertEquals(false, dartContains.eval("someList", "other", ".*%s.*")); - } - - @Test(expected = TagDoesNotExistException.class) - public void shouldThrowErrorWhenFieldIsNotExistWithARegex() { - when(dataStore.getSet("nonExistingList")).thenThrow(TagDoesNotExistException.class); - - DartContains dartContains = new DartContains(dataStore); - - dartContains.eval("nonExistingList", "someField", ".*%s.*"); - } - - @Test - public void shouldNotInvokeDataSourceWhenInvokedAgainWithinRefreshRateWithARegex() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("someField"))); - - DartContains dartContains = new DartContains(dataStore); - dartContains.eval("someList", "someField", ".*%s.*", 1); - dartContains.eval("someList", "otherField", ".*%s.*", 1); - - verify(dataStore, times(1)).getSet("someList"); - } - - @Test - public void shouldInvokeDataSourceWhenExceededRefreshRateWithARegex() { - when(dataStore.getSet("someList")).thenReturn(new SetCache(singleton("someField"))); - - DartContains dartContains = new DartContains(dataStore); - dartContains.eval("someList", "someField", ".*%s.*", -1); - dartContains.eval("someList", "otherField", ".*%s.*", -1); - - verify(dataStore, times(2)).getSet("someList"); - } - - @Test - public void shouldRegisterGauge() throws Exception { - DartContains dartContains = new DartContains(dataStore); - dartContains.open(functionContext); - verify(metricGroup, Mockito.times(1)).gauge(any(String.class), any(Gauge.class)); - } -} diff --git a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/store/gcs/GcsDataStoreTest.java b/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/store/gcs/GcsDataStoreTest.java deleted file mode 100644 index 95a4c5f5e..000000000 --- a/dagger-functions/src/test/java/io/odpf/dagger/functions/udfs/scalar/dart/store/gcs/GcsDataStoreTest.java +++ /dev/null @@ -1,109 +0,0 @@ -package io.odpf.dagger.functions.udfs.scalar.dart.store.gcs; - - -import io.odpf.dagger.common.metrics.managers.MeterStatsManager; -import io.odpf.dagger.functions.exceptions.BucketDoesNotExistException; -import io.odpf.dagger.functions.exceptions.TagDoesNotExistException; -import io.odpf.dagger.functions.udfs.scalar.dart.types.MapCache; -import io.odpf.dagger.functions.udfs.scalar.dart.types.SetCache; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; - -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class GcsDataStoreTest { - - - private final String defaultListName = "listName"; - - private final String defaultMapName = "mapName"; - @Rule - public ExpectedException thrown = ExpectedException.none(); - private GcsDataStore gcsDataStore; - private List listContent; - private Map mapContent; - private GcsClient gcsClient; - private MeterStatsManager meterStatsManager; - - @Before - public void setUp() { - gcsDataStore = mock(GcsDataStore.class); - gcsClient = mock(GcsClient.class); - meterStatsManager = mock(MeterStatsManager.class); - when(gcsDataStore.getSet(anyString())).thenCallRealMethod(); - when(gcsDataStore.getMap(anyString())).thenCallRealMethod(); - when(gcsDataStore.getGcsClient()).thenReturn(gcsClient); - when(gcsDataStore.getMeterStatsManager()).thenReturn(meterStatsManager); - listContent = Arrays.asList("listContent"); - mapContent = Collections.singletonMap("key", "value"); - - } - - - @Test - public void shouldGetExistingListGivenName() { - String jsonData = " { \"data\" : [ \"listContent\" ] } "; - - when(gcsClient.fetchJsonData(any(), any(), any(), anyString())).thenReturn(jsonData); - SetCache setCache = new SetCache(new HashSet<>(listContent)); - assertEquals(setCache, gcsDataStore.getSet(defaultListName)); - } - - @Test - public void shouldThrowTagDoesNotExistWhenListIsNotThere() { - thrown.expect(TagDoesNotExistException.class); - thrown.expectMessage("Could not find the content in gcs for invalidListName"); - - - when(gcsClient.fetchJsonData(any(), any(), any(), anyString())).thenThrow(new TagDoesNotExistException("Could not find the content in gcs for invalidListName")); - - gcsDataStore.getSet("invalidListName"); - } - - @Test - public void shouldThrowBucketDoesNotExistWhenBucketIsNotThere() { - thrown.expect(BucketDoesNotExistException.class); - thrown.expectMessage("Could not find the bucket in gcs for invalidListName"); - - - when(gcsClient.fetchJsonData(any(), any(), any(), anyString())).thenThrow(new BucketDoesNotExistException("Could not find the bucket in gcs for invalidListName")); - - gcsDataStore.getSet("invalidListName"); - } - - - @Test - public void shouldGetExistingMapGivenName() { - - String jsonData = " { \"key\" : \"value\" } "; - when(gcsClient.fetchJsonData(any(), any(), any(), anyString())).thenReturn(jsonData); - MapCache mapCache = new MapCache(new HashMap<>(mapContent)); - - assertEquals(mapCache, gcsDataStore.getMap(defaultMapName)); - } - - @Test - public void shouldThrowTagDoesNotExistWhenMapIsNotThere() { - thrown.expect(TagDoesNotExistException.class); - thrown.expectMessage("Could not find the content in gcs for invalidMapName"); - - when(gcsClient.fetchJsonData(any(), any(), any(), anyString())).thenThrow(new TagDoesNotExistException("Could not find the content in gcs for invalidMapName")); - - gcsDataStore.getSet("invalidMapName"); - } - - -} diff --git a/dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/EsExternalPostProcessorIntegrationTest.java b/dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/EsExternalPostProcessorIntegrationTest.java similarity index 89% rename from dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/EsExternalPostProcessorIntegrationTest.java rename to dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/EsExternalPostProcessorIntegrationTest.java index b8d3db9f8..088a77a7b 100644 --- a/dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/EsExternalPostProcessorIntegrationTest.java +++ b/dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/EsExternalPostProcessorIntegrationTest.java @@ -1,6 +1,13 @@ -package io.odpf.dagger.integrationtest; - -import io.odpf.dagger.common.core.DaggerContextTestBase; +package com.gotocompany.dagger.integrationtest; + +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.PostProcessorFactory; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.processors.types.PostProcessor; +import org.apache.commons.lang3.StringUtils; import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration; import org.apache.flink.streaming.api.datastream.DataStream; @@ -8,14 +15,6 @@ import org.apache.flink.streaming.api.functions.sink.SinkFunction; import org.apache.flink.test.util.MiniClusterWithClientResource; import org.apache.flink.types.Row; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.PostProcessorFactory; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.PostProcessor; -import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpHost; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -33,9 +32,9 @@ import java.util.HashMap; import java.util.List; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_CONFIG_KEY; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_CONFIG_KEY; +import static com.gotocompany.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -60,7 +59,7 @@ public class EsExternalPostProcessorIntegrationTest extends DaggerContextTestBas @Before public void setUp() { - String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"dummy-topic\",\"INPUT_SCHEMA_TABLE\":\"testbooking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"io.odpf.dagger.consumer.TestBookingLogMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"localhost:6668\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-consumer\",\"SOURCE_KAFKA_NAME\":\"localkafka\"}]"; + String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"dummy-topic\",\"INPUT_SCHEMA_TABLE\":\"testbooking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"com.gotocompany.dagger.consumer.TestBookingLogMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"localhost:6668\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-consumer\",\"SOURCE_KAFKA_NAME\":\"localkafka\"}]"; this.configurationMap = new HashMap<>(); configurationMap.put(PROCESSOR_POSTPROCESSOR_ENABLE_KEY, "true"); configurationMap.put(INPUT_STREAMS, streams); @@ -130,7 +129,7 @@ public void shouldPopulateFieldFromESOnSuccessResponse() throws Exception { + " \"socket_timeout\": \"5000\",\n" + " \"retry_timeout\": \"5000\",\n" + " \"capacity\": \"30\",\n" - + " \"type\": \"io.odpf.dagger.consumer.TestEnrichedBookingLogMessage\", \n" + + " \"type\": \"com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage\", \n" + " \"output_mapping\": {\n" + " \"customer_profile\": {\n" + " \"path\": \"$._source\"\n" @@ -181,7 +180,7 @@ public void shouldPopulateFieldFromESOnSuccessResponseWithExternalAndInternalSou + " \"socket_timeout\": \"5000\",\n" + " \"retry_timeout\": \"5000\",\n" + " \"capacity\": \"30\",\n" - + " \"type\": \"io.odpf.dagger.consumer.TestEnrichedBookingLogMessage\", \n" + + " \"type\": \"com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage\", \n" + " \"output_mapping\": {\n" + " \"customer_profile\": {\n" + " \"path\": \"$._source\"\n" @@ -248,7 +247,7 @@ public void shouldPopulateFieldFromESOnSuccessResponseWithAllThreeSourcesIncludi + " \"socket_timeout\": \"5000\",\n" + " \"retry_timeout\": \"5000\",\n" + " \"capacity\": \"30\",\n" - + " \"type\": \"io.odpf.dagger.consumer.TestEnrichedBookingLogMessage\", \n" + + " \"type\": \"com.gotocompany.dagger.consumer.TestEnrichedBookingLogMessage\", \n" + " \"output_mapping\": {\n" + " \"customer_profile\": {\n" + " \"path\": \"$._source\"\n" @@ -272,7 +271,7 @@ public void shouldPopulateFieldFromESOnSuccessResponseWithAllThreeSourcesIncludi + ",\n" + " \"transformers\": [" + "{\n" - + " \"transformation_class\": \"io.odpf.dagger.functions.transformers.ClearColumnTransformer\",\n" + + " \"transformation_class\": \"com.gotocompany.dagger.functions.transformers.ClearColumnTransformer\",\n" + " \"transformation_arguments\": {\n" + " \"targetColumnName\": \"driver_id\"\n" + " }\n" diff --git a/dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/GrpcExternalPostProcessorIntegrationTest.java b/dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/GrpcExternalPostProcessorIntegrationTest.java similarity index 86% rename from dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/GrpcExternalPostProcessorIntegrationTest.java rename to dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/GrpcExternalPostProcessorIntegrationTest.java index 52dbb09c2..57f4a74fe 100644 --- a/dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/GrpcExternalPostProcessorIntegrationTest.java +++ b/dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/GrpcExternalPostProcessorIntegrationTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.integrationtest; +package com.gotocompany.dagger.integrationtest; -import io.odpf.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration; import org.apache.flink.streaming.api.datastream.DataStream; @@ -9,15 +9,15 @@ import org.apache.flink.test.util.MiniClusterWithClientResource; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.consumer.TestGrpcRequest; -import io.odpf.dagger.consumer.TestGrpcResponse; -import io.odpf.dagger.consumer.TestServerGrpc; -import io.odpf.dagger.core.processors.PostProcessorFactory; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.consumer.TestGrpcRequest; +import com.gotocompany.dagger.consumer.TestGrpcResponse; +import com.gotocompany.dagger.consumer.TestServerGrpc; +import com.gotocompany.dagger.core.processors.PostProcessorFactory; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.processors.types.PostProcessor; import org.grpcmock.GrpcMock; import org.junit.Before; import org.junit.ClassRule; @@ -28,10 +28,10 @@ import java.util.HashMap; import java.util.List; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.common.core.Constants.SCHEMA_REGISTRY_STENCIL_ENABLE_KEY; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_CONFIG_KEY; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.common.core.Constants.SCHEMA_REGISTRY_STENCIL_ENABLE_KEY; +import static com.gotocompany.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_CONFIG_KEY; +import static com.gotocompany.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY; import static org.grpcmock.GrpcMock.stubFor; import static org.grpcmock.GrpcMock.unaryMethod; import static org.junit.Assert.assertEquals; @@ -56,7 +56,7 @@ public class GrpcExternalPostProcessorIntegrationTest extends DaggerContextTestB @Before public void setUp() { - String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"dummy-topic\",\"INPUT_SCHEMA_TABLE\":\"testbooking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"io.odpf.dagger.consumer.TestBookingLogMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"localhost:6668\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-consumer\",\"SOURCE_KAFKA_NAME\":\"localkafka\"}]"; + String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"dummy-topic\",\"INPUT_SCHEMA_TABLE\":\"testbooking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"com.gotocompany.dagger.consumer.TestBookingLogMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"localhost:6668\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-consumer\",\"SOURCE_KAFKA_NAME\":\"localkafka\"}]"; this.configurationMap = new HashMap<>(); configurationMap.put(PROCESSOR_POSTPROCESSOR_ENABLE_KEY, "true"); configurationMap.put(INPUT_STREAMS, streams); @@ -82,9 +82,9 @@ public void shouldPopulateFieldFromGrpcOnSuccess() throws Exception { + " \"fail_on_errors\": false,\n" + " \"retain_response_type\": true,\n" + " \"grpc_stencil_url\": \"http://localhost:8000/messages.desc\",\n" - + " \"grpc_request_proto_schema\": \"io.odpf.dagger.consumer.TestGrpcRequest\",\n" - + " \"grpc_response_proto_schema\": \"io.odpf.dagger.consumer.TestGrpcResponse\",\n" - + " \"grpc_method_url\": \"io.odpf.dagger.consumer.TestServer/TestRpcMethod\",\n" + + " \"grpc_request_proto_schema\": \"com.gotocompany.dagger.consumer.TestGrpcRequest\",\n" + + " \"grpc_response_proto_schema\": \"com.gotocompany.dagger.consumer.TestGrpcResponse\",\n" + + " \"grpc_method_url\": \"com.gotocompany.dagger.consumer.TestServer/TestRpcMethod\",\n" + " \"capacity\": \"30\",\n" + " \"headers\": {\n" + " \"content-type\": \"application/json\" \n" @@ -150,9 +150,9 @@ public void shouldPopulateFieldFromGrpcOnSuccessWithExternalAndInternalSource() + " \"fail_on_errors\": false,\n" + " \"retain_response_type\": true,\n" + " \"grpc_stencil_url\": \"http://localhost:8000/messages.desc\",\n" - + " \"grpc_request_proto_schema\": \"io.odpf.dagger.consumer.TestGrpcRequest\",\n" - + " \"grpc_response_proto_schema\": \"io.odpf.dagger.consumer.TestGrpcResponse\",\n" - + " \"grpc_method_url\": \"io.odpf.dagger.consumer.TestServer/TestRpcMethod\",\n" + + " \"grpc_request_proto_schema\": \"com.gotocompany.dagger.consumer.TestGrpcRequest\",\n" + + " \"grpc_response_proto_schema\": \"com.gotocompany.dagger.consumer.TestGrpcResponse\",\n" + + " \"grpc_method_url\": \"com.gotocompany.dagger.consumer.TestServer/TestRpcMethod\",\n" + " \"capacity\": \"30\",\n" + " \"output_mapping\": {\n" + " \"field3\": {\n" @@ -234,9 +234,9 @@ public void shouldPopulateFieldFromGrpcOnSuccessWithAllThreeSourcesIncludingTran + " \"fail_on_errors\": false,\n" + " \"retain_response_type\": true,\n" + " \"grpc_stencil_url\": \"http://localhost:8000/messages.desc\",\n" - + " \"grpc_request_proto_schema\": \"io.odpf.dagger.consumer.TestGrpcRequest\",\n" - + " \"grpc_response_proto_schema\": \"io.odpf.dagger.consumer.TestGrpcResponse\",\n" - + " \"grpc_method_url\": \"io.odpf.dagger.consumer.TestServer/TestRpcMethod\",\n" + + " \"grpc_request_proto_schema\": \"com.gotocompany.dagger.consumer.TestGrpcRequest\",\n" + + " \"grpc_response_proto_schema\": \"com.gotocompany.dagger.consumer.TestGrpcResponse\",\n" + + " \"grpc_method_url\": \"com.gotocompany.dagger.consumer.TestServer/TestRpcMethod\",\n" + " \"capacity\": \"30\",\n" + " \"output_mapping\": {\n" + " \"field3\": {\n" @@ -265,7 +265,7 @@ public void shouldPopulateFieldFromGrpcOnSuccessWithAllThreeSourcesIncludingTran + " ], \n" + " \"transformers\": [" + " {\n" - + " \"transformation_class\": \"io.odpf.dagger.functions.transformers.ClearColumnTransformer\",\n" + + " \"transformation_class\": \"com.gotocompany.dagger.functions.transformers.ClearColumnTransformer\",\n" + " \"transformation_arguments\": {\n" + " \"targetColumnName\": \"customer_id\"\n" + " }\n" diff --git a/dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/HttpExternalPostProcessorIntegrationTest.java b/dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/HttpExternalPostProcessorIntegrationTest.java similarity index 90% rename from dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/HttpExternalPostProcessorIntegrationTest.java rename to dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/HttpExternalPostProcessorIntegrationTest.java index 429978ad0..95d1d090e 100644 --- a/dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/HttpExternalPostProcessorIntegrationTest.java +++ b/dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/HttpExternalPostProcessorIntegrationTest.java @@ -1,6 +1,13 @@ -package io.odpf.dagger.integrationtest; +package com.gotocompany.dagger.integrationtest; -import io.odpf.dagger.common.core.DaggerContextTestBase; +import com.github.tomakehurst.wiremock.junit.WireMockRule; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.DaggerContextTestBase; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.PostProcessorFactory; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.processors.types.PostProcessor; import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration; import org.apache.flink.streaming.api.datastream.DataStream; @@ -8,19 +15,7 @@ import org.apache.flink.streaming.api.functions.sink.SinkFunction; import org.apache.flink.test.util.MiniClusterWithClientResource; import org.apache.flink.types.Row; - -import com.github.tomakehurst.wiremock.junit.WireMockRule; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.PostProcessorFactory; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.PostProcessor; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; +import org.junit.*; import java.sql.Timestamp; import java.util.ArrayList; @@ -28,9 +23,9 @@ import java.util.List; import static com.github.tomakehurst.wiremock.client.WireMock.*; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_CONFIG_KEY; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_CONFIG_KEY; +import static com.gotocompany.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -54,7 +49,7 @@ public class HttpExternalPostProcessorIntegrationTest extends DaggerContextTestB @Before public void setUp() { - String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"dummy-topic\",\"INPUT_SCHEMA_TABLE\":\"testbooking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"io.odpf.dagger.consumer.TestBookingLogMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"localhost:6668\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-consumer\",\"SOURCE_KAFKA_NAME\":\"localkafka\"}]"; + String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"dummy-topic\",\"INPUT_SCHEMA_TABLE\":\"testbooking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"com.gotocompany.dagger.consumer.TestBookingLogMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"localhost:6668\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-consumer\",\"SOURCE_KAFKA_NAME\":\"localkafka\"}]"; this.configMap = new HashMap<>(); configMap.put(PROCESSOR_POSTPROCESSOR_ENABLE_KEY, "true"); configMap.put(INPUT_STREAMS, streams); @@ -85,7 +80,7 @@ public void shouldPopulateFieldFromHTTPGetApiOnSuccessResponseWithCorrespondingD + " \"headers\": {\n" + " \"content-type\": \"application/json\"\n" + " },\n" - + " \"type\": \"io.odpf.dagger.consumer.TestSurgeFactorLogMessage\", \n" + + " \"type\": \"com.gotocompany.dagger.consumer.TestSurgeFactorLogMessage\", \n" + " \"output_mapping\": {\n" + " \"surge_factor\": {\n" + " \"path\": \"$.data\"\n" @@ -203,7 +198,7 @@ public void shouldPopulateFieldFromHTTPGetApiOnSuccessResponseWithExternalAndInt + " \"headers\": {\n" + " \"content-type\": \"application/json\"\n" + " },\n" - + " \"type\": \"io.odpf.dagger.consumer.TestSurgeFactorLogMessage\", \n" + + " \"type\": \"com.gotocompany.dagger.consumer.TestSurgeFactorLogMessage\", \n" + " \"output_mapping\": {\n" + " \"surge_factor\": {\n" + " \"path\": \"$.data\"\n" @@ -295,7 +290,7 @@ public void shouldPopulateFieldFromHTTPGetApiOnSuccessResponseWithAllThreeSource + ",\n" + " \"transformers\": [" + " {\n" - + " \"transformation_class\": \"io.odpf.dagger.functions.transformers.ClearColumnTransformer\",\n" + + " \"transformation_class\": \"com.gotocompany.dagger.functions.transformers.ClearColumnTransformer\",\n" + " \"transformation_arguments\": {\n" + " \"targetColumnName\": \"customer_id\"\n" + " }\n" @@ -337,7 +332,7 @@ public void shouldPopulateFieldFromHTTPGetApiOnSuccessResponseWithAllThreeSource @Test public void shouldPopulateFieldsFromHttpPostApiWithProperJsonBodyForComplexDataTypes() throws Exception { - String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"dummy-topic\",\"INPUT_SCHEMA_TABLE\":\"testbooking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"io.odpf.dagger.consumer.TestBookingLogMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"localhost:6668\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-consumer\",\"SOURCE_KAFKA_NAME\":\"localkafka\"}]"; + String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"dummy-topic\",\"INPUT_SCHEMA_TABLE\":\"testbooking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"com.gotocompany.dagger.consumer.TestBookingLogMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"localhost:6668\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-consumer\",\"SOURCE_KAFKA_NAME\":\"localkafka\"}]"; String postProcessorConfigString = "{\n" + " \"external_source\": {\n" @@ -354,7 +349,7 @@ public void shouldPopulateFieldsFromHttpPostApiWithProperJsonBodyForComplexDataT + " \"headers\": {\n" + " \"content-type\": \"application/json\"\n" + " },\n" - + " \"type\": \"io.odpf.dagger.consumer.TestSurgeFactorLogMessage\", \n" + + " \"type\": \"com.gotocompany.dagger.consumer.TestSurgeFactorLogMessage\", \n" + " \"output_mapping\": {\n" + " \"surge_factor\": {\n" + " \"path\": \"$.data\"\n" diff --git a/dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/PostGresExternalPostProcessorIntegrationTest.java b/dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/PostGresExternalPostProcessorIntegrationTest.java similarity index 90% rename from dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/PostGresExternalPostProcessorIntegrationTest.java rename to dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/PostGresExternalPostProcessorIntegrationTest.java index 6c259c808..a174e9426 100644 --- a/dagger-tests/src/integrationtest/java/io/odpf/dagger/integrationtest/PostGresExternalPostProcessorIntegrationTest.java +++ b/dagger-tests/src/integrationtest/java/com/gotocompany/dagger/integrationtest/PostGresExternalPostProcessorIntegrationTest.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.integrationtest; +package com.gotocompany.dagger.integrationtest; -import io.odpf.dagger.common.core.DaggerContext; +import com.gotocompany.dagger.common.core.DaggerContext; import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration; import org.apache.flink.streaming.api.datastream.DataStream; @@ -10,12 +10,12 @@ import org.apache.flink.test.util.MiniClusterWithClientResource; import org.apache.flink.types.Row; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.processors.PostProcessorFactory; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.processors.types.PostProcessor; +import com.gotocompany.dagger.common.configuration.Configuration; +import com.gotocompany.dagger.common.core.StencilClientOrchestrator; +import com.gotocompany.dagger.common.core.StreamInfo; +import com.gotocompany.dagger.core.processors.PostProcessorFactory; +import com.gotocompany.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import com.gotocompany.dagger.core.processors.types.PostProcessor; import io.vertx.pgclient.PgConnectOptions; import io.vertx.pgclient.PgPool; import io.vertx.sqlclient.PoolOptions; @@ -32,9 +32,9 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import static io.odpf.dagger.common.core.Constants.INPUT_STREAMS; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_CONFIG_KEY; -import static io.odpf.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY; +import static com.gotocompany.dagger.common.core.Constants.INPUT_STREAMS; +import static com.gotocompany.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_CONFIG_KEY; +import static com.gotocompany.dagger.core.utils.Constants.PROCESSOR_POSTPROCESSOR_ENABLE_KEY; import static io.vertx.pgclient.PgPool.pool; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -73,7 +73,7 @@ public static void setUp() { host = "localhost"; } - String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"dummy-topic\",\"INPUT_SCHEMA_TABLE\":\"testbooking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"io.odpf.dagger.consumer.TestBookingLogMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"localhost:6668\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-consumer\",\"SOURCE_KAFKA_NAME\":\"localkafka\"}]"; + String streams = "[{\"SOURCE_KAFKA_TOPIC_NAMES\":\"dummy-topic\",\"INPUT_SCHEMA_TABLE\":\"testbooking\",\"INPUT_SCHEMA_PROTO_CLASS\":\"com.gotocompany.dagger.consumer.TestBookingLogMessage\",\"INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX\":\"41\",\"SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS\":\"localhost:6668\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE\":\"\",\"SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET\":\"latest\",\"SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID\":\"test-consumer\",\"SOURCE_KAFKA_NAME\":\"localkafka\"}]"; configurationMap.put(PROCESSOR_POSTPROCESSOR_ENABLE_KEY, "true"); configurationMap.put(INPUT_STREAMS, streams); configuration = new Configuration(ParameterTool.fromMap(configurationMap)); @@ -181,7 +181,7 @@ public void shouldPopulateFieldFromPostgresWithCorrespondingDataType() throws Ex + " \"idle_timeout\": \"5000\",\n" + " \"fail_on_errors\": \"false\", \n" + " \"capacity\": \"30\",\n" - + " \"type\": \"io.odpf.dagger.consumer.TestSurgeFactorLogMessage\", \n" + + " \"type\": \"com.gotocompany.dagger.consumer.TestSurgeFactorLogMessage\", \n" + " \"output_mapping\": {\n" + " \"surge_factor\": \"surge_factor\" \n" + " }\n" @@ -232,7 +232,7 @@ public void shouldPopulateFieldFromPostgresWithSuccessResponseWithExternalAndInt + " \"idle_timeout\": \"5000\",\n" + " \"fail_on_errors\": \"false\", \n" + " \"capacity\": \"30\",\n" - + " \"type\": \"io.odpf.dagger.consumer.TestSurgeFactorLogMessage\", \n" + + " \"type\": \"com.gotocompany.dagger.consumer.TestSurgeFactorLogMessage\", \n" + " \"output_mapping\": {\n" + " \"surge_factor\": \"surge_factor\" \n" + " }\n" @@ -293,7 +293,7 @@ public void shouldPopulateFieldFromPostgresOnSuccessResponseWithAllThreeSourcesI + " \"idle_timeout\": \"5000\",\n" + " \"fail_on_errors\": \"false\", \n" + " \"capacity\": \"30\",\n" - + " \"type\": \"io.odpf.dagger.consumer.TestSurgeFactorLogMessage\", \n" + + " \"type\": \"com.gotocompany.dagger.consumer.TestSurgeFactorLogMessage\", \n" + " \"output_mapping\": {\n" + " \"surge_factor\": \"surge_factor\" \n" + " }\n" @@ -315,7 +315,7 @@ public void shouldPopulateFieldFromPostgresOnSuccessResponseWithAllThreeSourcesI + ",\n" + " \"transformers\": [" + "{\n" - + " \"transformation_class\": \"io.odpf.dagger.functions.transformers.ClearColumnTransformer\",\n" + + " \"transformation_class\": \"com.gotocompany.dagger.functions.transformers.ClearColumnTransformer\",\n" + " \"transformation_arguments\": {\n" + " \"targetColumnName\": \"customer_id\"\n" + " }\n" diff --git a/docs/blog/2021-08-20-dagger-launch.md b/docs/blog/2021-08-20-dagger-launch.md deleted file mode 100644 index 480a2f844..000000000 --- a/docs/blog/2021-08-20-dagger-launch.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -slug: introducing-dagger -title: Introducing Dagger -authors: - name: Ravi Suhag - title: Maintainer - url: https://github.com/ravisuhag -tags: [odpf, dagger] ---- - -We are live! diff --git a/docs/blog/2023-05-23-intro-dagger-docs.md b/docs/blog/2023-05-23-intro-dagger-docs.md new file mode 100644 index 000000000..de0ebf63c --- /dev/null +++ b/docs/blog/2023-05-23-intro-dagger-docs.md @@ -0,0 +1,9 @@ +--- +slug: introducing-dagger +title: Introducing Dagger Documentation page +authors: + name: Hari krishna +tags: [dagger] +--- + +Our docs website is live. diff --git a/docs/blog/authors.yaml b/docs/blog/authors.yaml new file mode 100644 index 000000000..6f24bcd3e --- /dev/null +++ b/docs/blog/authors.yaml @@ -0,0 +1,5 @@ +harikrishna: + name: Hari krishna + title: Maintainer + url: https://github.com/harikrishnakanchi + image_url: https://github.com/harikrishnakanchi.png diff --git a/docs/blog/authors.yml b/docs/blog/authors.yml deleted file mode 100644 index 3ff04a18e..000000000 --- a/docs/blog/authors.yml +++ /dev/null @@ -1,5 +0,0 @@ -ravisuhag: - name: Ravi Suhag - title: Maintainer - url: https://github.com/ravisuhag - image_url: https://github.com/ravisuhag.png diff --git a/docs/docs/advance/overview.md b/docs/docs/advance/overview.md index f22723a00..a367f9b61 100644 --- a/docs/docs/advance/overview.md +++ b/docs/docs/advance/overview.md @@ -24,4 +24,4 @@ DARTS allows you to join streaming data from a reference data store. It supports ### [Security](./security.md) -Enable secure data access from ACL enabled kafka source using SASL (Simple Authentication Security Layer) authentication. \ No newline at end of file +Enable secure data access from ACL enabled kafka source using SASL (Simple Authentication Security Layer) authentication. Also enable data access from SSL/TLS enabled kafka source. \ No newline at end of file diff --git a/docs/docs/advance/post_processor.md b/docs/docs/advance/post_processor.md index 647dee97f..a5634cfb9 100644 --- a/docs/docs/advance/post_processor.md +++ b/docs/docs/advance/post_processor.md @@ -291,6 +291,13 @@ A flag for deciding whether the job should fail on encountering errors(timeout a - Type: `optional` - Default value: `false` +##### `exclude_fail_on_errors_code_range` + +Defines the exclusion range of HTTP status codes for which job should not fail if `fail_on_errors` is true. + +- Example value: `400,404-499` +- Type: `optional` + ##### `capacity` This parameter(Async I/O capacity) defines how many max asynchronous requests may be in progress at the same time. @@ -659,6 +666,24 @@ The timeout value for gRPC client in ms. - Example value: `5000` - Type: `required` +##### `grpc_arg_keepalive_time_ms` + +The keepalive ping is a way to check if a channel is currently working by sending HTTP2 pings over the transport. It is sent periodically, and if the ping is not acknowledged by the peer within a certain timeout period, the transport is disconnected. Other keepalive configurations are described [here](https://github.com/grpc/grpc/blob/master/doc/keepalive.md). + +This channel argument controls the period (in milliseconds) after which a keepalive ping is sent on the transport. If smaller than 10000, 10000 will be used instead. + +- Example value: `60000` +- Type: `optional` +- Default value: `infinite` + +##### `grpc_arg_keepalive_timeout_ms` + +This channel argument controls the amount of time (in milliseconds) the sender of the keepalive ping waits for an acknowledgement. If it does not receive an acknowledgment within this time, it will close the connection. + +- Example value: `5000` +- Type: `optional` +- Default value: `20000` + ##### `fail_on_errors` A flag for deciding whether the job should fail on encountering errors or not. If set false the job won’t fail and enrich with empty fields otherwise the job will fail. @@ -793,7 +818,7 @@ The type of internal post processor. This could be ‘SQL’, ‘constant’ or The configuration argument needed to specify inputs for certain function type internal post processors. As of now, this is only required for `JSON_PAYLOAD` internal post processor. -- Example value: `{"schema_proto_class": "io.odpf.dagger.consumer.TestBookingLogMessage"}` +- Example value: `{"schema_proto_class": "com.gotocompany.dagger.consumer.TestBookingLogMessage"}` - Type: `optional` ### Sample Query @@ -884,7 +909,7 @@ PROCESSOR_POSTPROCESSOR_CONFIG = { "type": "function", "value": "JSON_PAYLOAD", "internal_processor_config": { - "schema_proto_class": "io.odpf.dagger.consumer.TestBookingLogMessage" + "schema_proto_class": "com.gotocompany.dagger.consumer.TestBookingLogMessage" } } ] diff --git a/docs/docs/advance/pre_processor.md b/docs/docs/advance/pre_processor.md index f6d8a8a58..253cee2c6 100644 --- a/docs/docs/advance/pre_processor.md +++ b/docs/docs/advance/pre_processor.md @@ -27,7 +27,7 @@ Following variables need to be configured as part of [PROCESSOR_PREPROCESSOR_CON A list of transformer configs. -- Example value: `[{"table_name": "testtable","transformers": [{"transformation_class": "io.odpf.dagger.functions.transformers.InvalidRecordFilterTransformer"}]}]` +- Example value: `[{"table_name": "testtable","transformers": [{"transformation_class": "InvalidRecordFilterTransformer"}]}]` - Type: `required` ## `table_name` @@ -41,14 +41,14 @@ Table name for the transformer. List of transformers per table. -- Example value: `[{"transformation_class": "io.odpf.dagger.functions.transformers.InvalidRecordFilterTransformer"}]` +- Example value: `[{"transformation_class": "InvalidRecordFilterTransformer"}]` - Type: `required` ## `transformation_class` Fully qualified name of the class to be used for transformation. -- Example value: `"io.odpf.dagger.functions.transformers.InvalidRecordFilterTransformer"` +- Example value: `"InvalidRecordFilterTransformer"` - Type: `required` ## `transformation_arguments` @@ -67,7 +67,7 @@ PROCESSOR_PREPROCESSOR_CONFIG = { "table_transformers": [{ "table_name": "data_stream", "transformers": [{ - "transformation_class": "io.odpf.dagger.functions.transformers.InvalidRecordFilterTransformer" + "transformation_class": "InvalidRecordFilterTransformer" }] }] } diff --git a/docs/docs/advance/security.md b/docs/docs/advance/security.md index e5d2b60fb..20d195735 100644 --- a/docs/docs/advance/security.md +++ b/docs/docs/advance/security.md @@ -4,7 +4,7 @@ The primary goals of the Dagger security needs are to enable secure data access # Supported secure data access sources -We currently support only secure data access from kafka source using simple authentication security layer([SASL](https://kafka.apache.org/documentation/#security_sasl)) +We currently support secure data access from kafka source using simple authentication security layer([SASL](https://kafka.apache.org/documentation/#security_sasl)) and SSL encryption for data-in-transit between the source kafka and the dagger client. - [KAFKA_SOURCE](../guides/choose_source.md) - [KAFKA_CONSUMER](../guides/choose_source.md) @@ -146,4 +146,168 @@ STREAMS = [ ] } ] -``` \ No newline at end of file +``` +## Encryption and Authentication using SSL + +SSL is used for encryption of traffic as well as authentication. By default, SSL is disabled in dagger kafka source but can be turned on if needed. + +Dagger currently support SSL based encryption and authentication with `KAFKA_SOURCE` and `KAFKA_CONSUMER`. + +**Note:** You must configure your Kafka cluster to enable encryption and authentication using SSL. See the [Kafka documentation](https://kafka.apache.org/34/documentation.html#security_ssl) for your Kafka version to learn how to configure SSL encryption of traffic as well as authentication. + +### Configurations + +To consume data from SSL/TLS enabled kafka, following variables need to be configured as part of [STREAMS](../reference/configuration.md) JSON + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL` + +Defines the security protocol used to communicate with SSL enabled kafka. Other than SASL supported values, as mentioned above, Dagger supports, +* `SSL`: to enable SSL/TLS for encryption and authentication + +* Example value: `SSL` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL` + +Defines the security protocol used to communicate with SSL enabled kafka. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.protocol) +Dagger supported values are: TLSv1.2, TLSv1.3, TLS, TLSv1.1, SSL, SSLv2 and SSLv3 + +* Example value 1: `SSL` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +* Example value 2: `TLS` +* Type: `optional` required only for TLS enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEY_PASSWORD` + +Defines the SSL Key Password for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.key.password) + +* Example value: `myKeyPass` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION` + +Defines the SSL KeyStore location or path for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.keystore.location) + +* Example value: `/tmp/myKeyStore.jks` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_PASSWORD` + +Defines the SSL KeyStore password for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.keystore.password) + +* Example value: `myKeyStorePass` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE` + +Defines the SSL KeyStore Type like JKS, PKCS12 etc for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.keystore.type) +Dagger supported values are: JKS, PKCS12, PEM + +* Example value: `JKS` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_LOCATION` + +Defines the SSL TrustStore location or path for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.truststore.location) + +* Example value: `/tmp/myTrustStore.jks` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_PASSWORD` + +Defines the SSL TrustStore password for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.truststore.password) + +* Example value: `myTrustStorePass` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE` + +Defines the SSL TrustStore Type like JKS, PKCS12 for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.truststore.type) +Dagger supported values are: JKS, PKCS12, PEM + +* Example value: `JKS` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +### Example + +STREAMS configurations to consume data from the SSL/TLS enabled kafka - + +``` +STREAMS = [ + { + "SOURCE_KAFKA_TOPIC_NAMES": "test-topic", + "INPUT_SCHEMA_TABLE": "data_stream", + "INPUT_SCHEMA_PROTO_CLASS": "com.tests.TestMessage", + "INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX": "41", + "SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS": "localhost:9092", + "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE": "false", + "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET": "latest", + "SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID": "dummy-consumer-group", + "SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL": "SSL", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL": "TLS", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION": "my-keystore.jks", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_PASSWORD": "test-keystore-pass", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE": "JKS", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_LOCATION": "my-truststore.jks", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_PASSWORD": "test-truststore-pass", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE": "JKS", + "SOURCE_KAFKA_NAME": "local-kafka-stream", + "SOURCE_DETAILS": [ + { + "SOURCE_TYPE": "UNBOUNDED", + "SOURCE_NAME": "KAFKA_CONSUMER" + } + ] + } +] +``` + +STREAMS configurations to consume data from multiple kafka sources - + +``` +STREAMS = [ + { + "SOURCE_KAFKA_TOPIC_NAMES": "test-topic", + "INPUT_SCHEMA_TABLE": "data_stream", + "INPUT_SCHEMA_PROTO_CLASS": "com.tests.TestMessage", + "INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX": "41", + "SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS": "localhost:9092", + "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE": "false", + "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET": "latest", + "SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID": "dummy-consumer-group", + "SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL": "SSL", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL": "TLS", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION": "my-keystore.jks", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_PASSWORD": "test-keystore-pass", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE": "JKS", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_LOCATION": "my-truststore.jks", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_PASSWORD": "test-truststore-pass", + "SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE": "JKS", + "SOURCE_KAFKA_NAME": "local-kafka-stream", + "SOURCE_DETAILS": [ + { + "SOURCE_TYPE": "UNBOUNDED", + "SOURCE_NAME": "KAFKA_CONSUMER" + } + ] + }, + { + "SOURCE_KAFKA_TOPIC_NAMES": "test-topic-2", + "INPUT_SCHEMA_TABLE": "data_stream-2", + "INPUT_SCHEMA_PROTO_CLASS": "com.tests.TestMessage", + "INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX": "41", + "SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS": "localhost:9091", + "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE": "false", + "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET": "latest", + "SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID": "dummy-consumer-group", + "SOURCE_KAFKA_NAME": "local-kafka-stream-2", + "SOURCE_DETAILS": [ + { + "SOURCE_TYPE": "UNBOUNDED", + "SOURCE_NAME": "KAFKA_CONSUMER" + } + ] + } +] +``` diff --git a/docs/docs/concepts/architecture.md b/docs/docs/concepts/architecture.md index a9e88413e..73ca685e4 100644 --- a/docs/docs/concepts/architecture.md +++ b/docs/docs/concepts/architecture.md @@ -20,7 +20,7 @@ files as provided are consumed in a single stream. _**Dagger Core**_ -- The core part of the dagger(StreamManager) has the following responsibilities. It works sort of as a controller for other components in the dagger. +- The core part of the dagger(DaggerSqlJobBuilder) has the following responsibilities. It works sort of as a controller for other components in the dagger. - Configuration management. - Table registration. - Configuring Deserialization and Serialization of data. @@ -69,7 +69,7 @@ _**Sink and Serializer**_ - Dagger supports Kafka, BigQuery and InfluxDB as supported sinks where the unbounded results are pushed at the end of the lifecycle. - In the case of Kafka Sink the final result is protobuf encoded. So the result goes through a serialization stage on some defined output schema. The serializer module of the proto-handler does this. Results in Kafka can be used via any Kafka consumer. - Influx Sink helps in real-time analytics and dashboarding. In the case of Influx Sink dagger, converts results in Flink Row to InfluxDB points and add `tag`/`labels` as specified in the SQL. -- BigQuery is a data warehouse capable of running SQL queries over large datasets. Bigquery Sink is created using the [ODPF Depot](https://github.com/odpf/depot/tree/main/docs) library. Depot is a sink connector, which acts as a bridge between data processing systems and real sink. In BigQuery Sink, each Flink Row is converted into one BigQuery table row. The schema, table and partitioning details of the table are fetched from user supplied configuration. +- BigQuery is a data warehouse capable of running SQL queries over large datasets. Bigquery Sink is created using the [GOTO Depot](https://github.com/goto/depot/tree/main/docs) library. Depot is a sink connector, which acts as a bridge between data processing systems and real sink. In BigQuery Sink, each Flink Row is converted into one BigQuery table row. The schema, table and partitioning details of the table are fetched from user supplied configuration. ### Schema Handling @@ -78,7 +78,7 @@ _**Sink and Serializer**_ Each stream, irrespective of the data source, should produce data according to a fixed, configured protobuf schema. - Dagger deserializes the data consumed from the topics using the Protobuf descriptors generated out of the artifacts. The schema handling i:e, finding the mapped schema for the topic, downloading the descriptors, and dynamically being -notified of/updating with the latest schema is abstracted through a homegrown library called [stencil](https://github.com/odpf/stencil). +notified of/updating with the latest schema is abstracted through a homegrown library called [stencil](https://github.com/goto/stencil). - Stencil is a proprietary library that provides an abstraction layer, for schema handling. - Schema Caching, dynamic schema updates are features of the stencil client library. diff --git a/docs/docs/contribute/add_transformer.md b/docs/docs/contribute/add_transformer.md index 8c7463d6e..c2f92efd7 100644 --- a/docs/docs/contribute/add_transformer.md +++ b/docs/docs/contribute/add_transformer.md @@ -8,13 +8,13 @@ For adding custom Transformers follow these steps - Ensure none of the [built-in Transformers](../reference/transformers.md) suits your requirement. -- Transformers take [StreamInfo](https://github.com/odpf/dagger/blob/main/dagger-common/src/main/java/io/odpf/dagger/common/core/StreamInfo.java) which is a wrapper around Flink DataStream as input and transform them to some other StreamInfo/DataStream. +- Transformers take [StreamInfo](https://github.com/goto/dagger/blob/main/dagger-common/src/main/java/com/gotocompany/dagger/common/core/StreamInfo.java) which is a wrapper around Flink DataStream as input and transform them to some other StreamInfo/DataStream. -- To define a new Transformer implement Transformer interface. The contract of Transformers is defined [here](https://github.com/odpf/dagger/blob/main/dagger-common/src/main/java/io/odpf/dagger/common/core/Transformer.java). +- To define a new Transformer implement Transformer interface. The contract of Transformers is defined [here](https://github.com/goto/dagger/blob/main/dagger-common/src/main/java/com/gotocompany/dagger/common/core/Transformer.java). - Since an input DataStream is available in Transformer, all the Flink supported operators which transform `DataStream -> DataStream` can be applied/used by default for the transformations. Operators are how Flink exposes classic Map-reduce type functionalities. Read more about Flink Operators [here](https://ci.apache.org/projects/flink/flink-docs-release-1.14/dev/stream/operators/). -- In the case of single Operator Transformation you can extend the desired Operator in the Transformer class itself. For example, follow this code of [HashTransformer](https://github.com/odpf/dagger/blob/main/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers/HashTransformer.java). You can also define multiple chaining operators to Transform Data. +- In the case of single Operator Transformation you can extend the desired Operator in the Transformer class itself. For example, follow this code of [HashTransformer](https://github.com/goto/dagger/blob/main/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers/HashTransformer.java). You can also define multiple chaining operators to Transform Data. - A configuration `transformation_arguments` inject the required parameters as a Constructor argument to the Transformer class. From the config point of view, these are simple Map of String and Object. So you need to cast them to your desired data types. Find a more detailed overview of the transformer example [here](../guides/use_transformer.md). diff --git a/docs/docs/contribute/add_udf.md b/docs/docs/contribute/add_udf.md index 04f30da37..a9fcce431 100644 --- a/docs/docs/contribute/add_udf.md +++ b/docs/docs/contribute/add_udf.md @@ -14,16 +14,16 @@ For adding custom UDFs follow these steps: - For adding UDF with Java/Scala: - Follow [this](https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/dev/table/functions/udfs/) for more insights on writing your UDF. - - UDF needs to be added in the `function-type` folder inside [this](https://github.com/odpf/dagger/tree/main/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs) on `dagger-functions` subproject. - - Extend either of [ScalarUdf](https://github.com/odpf/dagger/blob/main/dagger-common/src/main/java/io/odpf/dagger/common/udfs/ScalarUdf.java), [TableUdf](https://github.com/odpf/dagger/blob/main/dagger-common/src/main/java/io/odpf/dagger/common/udfs/TableUdf.java) or [AggregateUdf](https://github.com/odpf/dagger/blob/main/dagger-common/src/main/java/io/odpf/dagger/common/udfs/AggregateUdf.java) from `dagger-common`. They are boilerplate contracts extending Flink UDF classes. These classes do some more preprocessing(like exposing some metrics) in the `open` method behind the scene. - - Register the UDF in [this](https://github.com/odpf/dagger/blob/main/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs/factories/FunctionFactory.java) class. This is required to let Flink know about your function. - - If you have some business-specific use-cases and you don't want to add UDFs to the open-sourced repo, you can have a separate local codebase for those UDFs. Those UDFs need to be registered in a similar class like the [`UDFFactory`](https://github.com/odpf/dagger/blob/main/dagger-common/src/main/java/io/odpf/dagger/common/udfs/UdfFactory.java). Keep both the UDF classes and the factory class in the classpath of Dagger. Configure the fully qualified Factory class in the `FUNCTION_FACTORY_CLASSES` parameter and you will be able to use the desired UDF in your query. + - UDF needs to be added in the `function-type` folder inside [this](https://github.com/goto/dagger/tree/main/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs) on `dagger-functions` subproject. + - Extend either of [ScalarUdf](https://github.com/goto/dagger/blob/main/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/ScalarUdf.java), [TableUdf](https://github.com/goto/dagger/blob/main/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/TableUdf.java) or [AggregateUdf](https://github.com/goto/dagger/blob/main/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/AggregateUdf.java) from `dagger-common`. They are boilerplate contracts extending Flink UDF classes. These classes do some more preprocessing(like exposing some metrics) in the `open` method behind the scene. + - Register the UDF in [this](https://github.com/goto/dagger/blob/main/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs/factories/FunctionFactory.java) class. This is required to let Flink know about your function. + - If you have some business-specific use-cases and you don't want to add UDFs to the open-sourced repo, you can have a separate local codebase for those UDFs. Those UDFs need to be registered in a similar class like the [`UDFFactory`](https://github.com/goto/dagger/blob/main/dagger-common/src/main/java/com/gotocompany/dagger/common/udfs/UdfFactory.java). Keep both the UDF classes and the factory class in the classpath of Dagger. Configure the fully qualified Factory class in the `FUNCTION_FACTORY_CLASSES` parameter and you will be able to use the desired UDF in your query. - For adding UDF with Python: - Follow [this](https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/dev/python/table/udfs/overview/) for more insights on writing your UDF. - - UDF need to be added inside [this](https://github.com/odpf/dagger/tree/main/dagger-py-functions/udfs) on `dagger-py-functions` directory. + - UDF need to be added inside [this](https://github.com/goto/dagger/tree/main/dagger-py-functions/udfs) on `dagger-py-functions` directory. - Ensure that the filename and method name on the python functions is the same. This name will be registered by dagger as a function name which later can be used on the query. - - Ensure to add dependency needed for the python function on the [requirements.txt](https://github.com/odpf/dagger/tree/main/dagger-py-functions/requirements.txt) file. + - Ensure to add dependency needed for the python function on the [requirements.txt](https://github.com/goto/dagger/tree/main/dagger-py-functions/requirements.txt) file. - Add python unit test and the make sure the test is passed. - If you have some business-specific use-cases and you don't want to add UDFs to the open-sourced repo, you can have a separate local codebase for those UDFs and specify that file on the python configuration. diff --git a/docs/docs/contribute/contribution.md b/docs/docs/contribute/contribution.md index 732cbfa08..cff4711e5 100644 --- a/docs/docs/contribute/contribution.md +++ b/docs/docs/contribute/contribution.md @@ -4,7 +4,7 @@ The following is a set of guidelines for contributing to Dagger. These are mostl - [Concepts](../concepts/overview) section will explain to you about Dagger architecture. - Our [roadmap](../roadmap.md) is the 10000-foot view of where we're heading in near future. -- Github [issues](https://github.com/odpf/dagger/issues) track the ongoing and reported issues. +- Github [issues](https://github.com/goto/dagger/issues) track the ongoing and reported issues. ## How can I contribute? @@ -34,7 +34,7 @@ Some rules need to be followed to contribute. - If you are introducing a completely new feature or making any major changes to an existing one, we recommend starting with an RFC and get consensus on the basic design first. - Make sure your local build is running with all the tests and check style passing. - If your change is related to user-facing protocols/configurations, you need to make the corresponding change in the documentation as well. -- Docs live in the code repo under [docs](https://github.com/odpf/dagger/tree/main/docs) so that changes to that can be done in the same PR as changes to the code. +- Docs live in the code repo under [docs](https://github.com/goto/dagger/tree/main/docs) so that changes to that can be done in the same PR as changes to the code. ## Making a pull request diff --git a/docs/docs/contribute/development.md b/docs/docs/contribute/development.md index 7357312c7..d1c313648 100644 --- a/docs/docs/contribute/development.md +++ b/docs/docs/contribute/development.md @@ -41,7 +41,7 @@ Dagger follows a multi-project build structure with multiple sub-projects. This - **`dagger-core`**: The core module in Dagger. Accommodates All the core functionalities like SerDE, SQL execution, Processors and many more. Also efficiently interacts with other subprojects. - **`dagger-common`**: This module contains all the code/contracts that are shared between other submodules. This allows reducing code duplicate and an efficient sharing of code between other submodules. For example, MetricsManagers are part of this submodule since metrics need to be recorded both in `dagger-common` and `dagger-functions`. -- **`dagger-functions`**: Submodule that defines all the plugin components in Dagger like [UDFs](https://github.com/odpf/dagger/tree/main/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs) and [Transformers](https://github.com/odpf/dagger/tree/main/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers). +- **`dagger-functions`**: Submodule that defines all the plugin components in Dagger like [UDFs](https://github.com/goto/dagger/tree/main/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs) and [Transformers](https://github.com/goto/dagger/tree/main/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers). - **`dagger-tests`**: Integration test framework to test some central end to end flows in Dagger. ## Dependencies Configuration diff --git a/docs/docs/examples/aggregation_tumble_window.md b/docs/docs/examples/aggregation_tumble_window.md index 96be9b397..72b16ac6b 100644 --- a/docs/docs/examples/aggregation_tumble_window.md +++ b/docs/docs/examples/aggregation_tumble_window.md @@ -11,7 +11,7 @@ In this example, we will count the number of booking orders,(as Kafka records) i 2. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` ## Steps diff --git a/docs/docs/examples/deduplication_transformer.md b/docs/docs/examples/deduplication_transformer.md index 05e93a63f..45d51eb42 100644 --- a/docs/docs/examples/deduplication_transformer.md +++ b/docs/docs/examples/deduplication_transformer.md @@ -11,7 +11,7 @@ In this example, we will use the DeDuplication Transformer in Dagger to remove t 2. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` ## Steps diff --git a/docs/docs/examples/distance_java_udf.md b/docs/docs/examples/distance_java_udf.md index f2793e9c5..9a13ad69f 100644 --- a/docs/docs/examples/distance_java_udf.md +++ b/docs/docs/examples/distance_java_udf.md @@ -11,7 +11,7 @@ In this example, we will use a User-Defined Function in Dagger to compute the di 2. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` ## Steps diff --git a/docs/docs/examples/elasticsearch_enrichment.md b/docs/docs/examples/elasticsearch_enrichment.md index 83cfbf67e..101cb3a46 100644 --- a/docs/docs/examples/elasticsearch_enrichment.md +++ b/docs/docs/examples/elasticsearch_enrichment.md @@ -10,7 +10,7 @@ In this example, we will use Dagger Post-processors to enrich the payment transa 2. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` ## Steps diff --git a/docs/docs/examples/kafka_inner_join.md b/docs/docs/examples/kafka_inner_join.md index fdc348371..b43444f08 100644 --- a/docs/docs/examples/kafka_inner_join.md +++ b/docs/docs/examples/kafka_inner_join.md @@ -11,7 +11,7 @@ In this example, we will use the Inner joins in Dagger to join the data streams 2. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` ## Steps diff --git a/docs/docs/guides/create_dagger.md b/docs/docs/guides/create_dagger.md index 7e3dce57a..428f50c14 100644 --- a/docs/docs/guides/create_dagger.md +++ b/docs/docs/guides/create_dagger.md @@ -112,7 +112,7 @@ $ java -jar dagger-core/build/libs/dagger-core--fat.jar ConfigFi libraries. You can follow this [tutorial](https://www.conduktor.io/how-to-produce-and-consume-protobuf-records-in-apache-kafka/). - For all kinds of sources, you need to define the [java compiled protobuf schema](https://developers.google.com/protocol-buffers/docs/javatutorial) in the classpath or - use our in-house schema registry tool like [Stencil](https://github.com/odpf/stencil) to let dagger know about the data + use our in-house schema registry tool like [Stencil](https://github.com/goto/stencil) to let dagger know about the data schema. Stencil is an event schema registry that provides an abstraction layer for schema handling, schema caching, and dynamic schema updates. [These configurations](../reference/configuration.md#schema-registry) needs to be set if you are using stencil for proto schema handling. @@ -134,13 +134,13 @@ $ java -jar dagger-core/build/libs/dagger-core--fat.jar ConfigFi ##### `BigQuery Sink` : - BigQuery is a fully managed enterprise data warehouse that helps you manage and analyze your data with built-in features like machine learning, geospatial analysis, and business intelligence.BigQuery's serverless architecture lets you use SQL queries to answer your organization's biggest questions with zero infrastructure management. BigQuery's scalable, distributed analysis engine lets you query terabytes in seconds and petabytes in minutes. - - Bigquery Sink is created using the ODPF Depot library. - - Depot is a sink connector, which acts as a bridge between data processing systems and real sink. You can check out the Depot Github repository [here](https://github.com/odpf/depot/tree/main/docs). + - Bigquery Sink is created using the GOTO Depot library. + - Depot is a sink connector, which acts as a bridge between data processing systems and real sink. You can check out the Depot Github repository [here](https://github.com/goto/depot/tree/main/docs). ## Common Configurations -- These configurations are mandatory for dagger creation and are sink independent. Here you need to set configurations such as the source details, the protobuf schema class, the SQL query to be applied on the streaming data, etc. In local execution, they would be set inside [`local.properties`](https://github.com/odpf/dagger/blob/main/dagger-core/env/local.properties) file. In the clustered environment they can be passed as job parameters to the Flink exposed job creation API. +- These configurations are mandatory for dagger creation and are sink independent. Here you need to set configurations such as the source details, the protobuf schema class, the SQL query to be applied on the streaming data, etc. In local execution, they would be set inside [`local.properties`](https://github.com/goto/dagger/blob/main/dagger-core/env/local.properties) file. In the clustered environment they can be passed as job parameters to the Flink exposed job creation API. - Configuration for a given schema involving a single source is consolidated as a Stream. In daggers, you can configure one or more streams for a single job. To know how to configure a stream based on a source, check [here](../reference/configuration.md#streams) - The `FLINK_JOB_ID` defines the name of the flink job. `ROWTIME_ATTRIBUTE_NAME` is the key name of [row time attribute](https://ci.apache.org/projects/flink/flink-docs-release-1.13/docs/dev/table/concepts/time_attributes/) required for stream processing. - In clustered mode, you can set up the `parallelism` configuration for distributed processing. @@ -192,7 +192,7 @@ SINK=log - Log sink is mostly used for testing and debugging purpose since it just a print statement for the processed data. This is a sample message produced in the log sink after the simple query above. ``` -INFO io.odpf.dagger.core.sink.log.LogSink - {sample_field=81179979,sample_field_2=81179979, rowtime=2021-05-21 11:55:33.0, event_timestamp=1621598133,0} +INFO LogSink - {sample_field=81179979,sample_field_2=81179979, rowtime=2021-05-21 11:55:33.0, event_timestamp=1621598133,0} ``` ## Influx Sink @@ -241,22 +241,22 @@ OUTPUT_KAFKA_TOPIC=test-kafka-output - BigQuery is a data warehouse capable of quickly running SQL queries over large datasets. -- Bigquery Sink is created using the ODPF Depot library. Depot is a sink connector, which acts as a bridge between data processing systems and real sink. -- You can check out the BigQuery Sink Connector in the Depot Github repository [here](https://github.com/odpf/depot/blob/main/docs/sinks/bigquery.md). +- Bigquery Sink is created using the GOTO Depot library. Depot is a sink connector, which acts as a bridge between data processing systems and real sink. +- You can check out the BigQuery Sink Connector in the Depot Github repository [here](https://github.com/goto/depot/blob/main/docs/sinks/bigquery.md). ### BigQuery Sink Features: -- [Datatype Protobuf](https://github.com/odpf/depot/blob/main/docs/sinks/bigquery.md#datatype-protobuf) -- [Datatype JSON](https://github.com/odpf/depot/blob/main/docs/sinks/bigquery.md#datatype-json) +- [Datatype Protobuf](https://github.com/goto/depot/blob/main/docs/sinks/bigquery.md#datatype-protobuf) +- [Datatype JSON](https://github.com/goto/depot/blob/main/docs/sinks/bigquery.md#datatype-json) -- [Bigquery Table Schema Update](https://github.com/odpf/depot/blob/main/docs/sinks/bigquery.md#bigquery-table-schema-update) -- [Protobuf - Bigquery Table Type Mapping](https://github.com/odpf/depot/blob/main/docs/sinks/bigquery.md#protobuf---bigquery-table-type-mapping) +- [Bigquery Table Schema Update](https://github.com/goto/depot/blob/main/docs/sinks/bigquery.md#bigquery-table-schema-update) +- [Protobuf - Bigquery Table Type Mapping](https://github.com/goto/depot/blob/main/docs/sinks/bigquery.md#protobuf---bigquery-table-type-mapping) -- [Partitioning](https://github.com/odpf/depot/blob/main/docs/sinks/bigquery.md#partitioning) -- [Metadata](https://github.com/odpf/depot/blob/main/docs/sinks/bigquery.md#metadata) -- [Default columns for json data type](https://github.com/odpf/depot/blob/main/docs/sinks/bigquery.md#default-columns-for-json-data-type) -- [Errors Handling](https://github.com/odpf/depot/blob/main/docs/sinks/bigquery.md#errors-handling) -- [Google Cloud Bigquery IAM Permission](https://github.com/odpf/depot/blob/main/docs/sinks/bigquery.md#google-cloud-bigquery-iam-permission) +- [Partitioning](https://github.com/goto/depot/blob/main/docs/sinks/bigquery.md#partitioning) +- [Metadata](https://github.com/goto/depot/blob/main/docs/sinks/bigquery.md#metadata) +- [Default columns for json data type](https://github.com/goto/depot/blob/main/docs/sinks/bigquery.md#default-columns-for-json-data-type) +- [Errors Handling](https://github.com/goto/depot/blob/main/docs/sinks/bigquery.md#errors-handling) +- [Google Cloud Bigquery IAM Permission](https://github.com/goto/depot/blob/main/docs/sinks/bigquery.md#google-cloud-bigquery-iam-permission) ## Advanced Data Processing diff --git a/docs/docs/guides/deployment.md b/docs/docs/guides/deployment.md index ccd9072d8..d960e87ca 100644 --- a/docs/docs/guides/deployment.md +++ b/docs/docs/guides/deployment.md @@ -15,7 +15,7 @@ To know more about Flink's cluster mode deployment follow [this](https://ci.apac ./gradlew clean build #creating a fat jar -./gradlew :dagger-core:shadowJar +./gradlew :dagger-core:fatJar ``` diff --git a/docs/docs/guides/kafka.md b/docs/docs/guides/kafka.md new file mode 100644 index 000000000..39efaee6b --- /dev/null +++ b/docs/docs/guides/kafka.md @@ -0,0 +1,41 @@ +# Kafka + +Kafka topics are used as the source and output of daggers. Both of source and output kafka configurations are defined through the properties file. + +## Source Kafka Configuration + +There can be multiple source kafka configurations in the properties file. Source configurations are defined through `STREAMS` property. +Here are the predefined properties for source kafka configuration: + +- SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE +- SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID +- SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS +- SOURCE_KAFKA_CONSUMER_CONFIG_SECURITY_PROTOCOL +- SOURCE_KAFKA_CONSUMER_CONFIG_SASL_MECHANISM +- SOURCE_KAFKA_CONSUMER_CONFIG_SASL_JAAS_CONFIG +- SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS +- SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEY_PASSWORD +- SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION +- SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_PASSWORD +- SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE +- SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_LOCATION +- SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_PASSWORD +- SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE +- SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL + +Additional kafka configuration can be passed through the `SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS` property. This property should be a json key-value map. +For example : +- SOURCE_KAFKA_CONSUMER_ADDITIONAL_CONFIGURATIONS={"SOURCE_KAFKA_CONSUMER_CONFIG_KEY_DESERIALIZER":"org.apache.kafka.common.serialization.StringDeserializer","SOURCE_KAFKA_CONSUMER_CONFIG_VALUE_DESERIALIZER":"org.apache.kafka.common.serialization.StringDeserializer"} + + +## Sink Kafka Configuration + +There is only one sink kafka configuration in the properties file. Sink configuration is defined by properties having `SINK_KAFKA_PRODUCER_CONFIG` prefix. +Here are the predefined properties for sink kafka configuration: +- SINK_KAFKA_LINGER_MS_KEY +- SINK_KAFKA_BROKERS_KEY +- SINK_KAFKA_TOPIC_KEY + +Additional kafka configurations can be passed through by introducing new properties with the `SINK_KAFKA_PRODUCER_CONFIG` prefix. +For example : `SINK_KAFKA_PRODUCER_CONFIG_SASL_LOGIN_CALLBACK_HANDLER_CLASS="io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler"` +Example above will add `sasl.login.callback.handler.class` to the sink kafka configuration. \ No newline at end of file diff --git a/docs/docs/guides/monitoring.md b/docs/docs/guides/monitoring.md index b422d3915..cd3882cfd 100644 --- a/docs/docs/guides/monitoring.md +++ b/docs/docs/guides/monitoring.md @@ -9,12 +9,12 @@ This section will give a brief overview of Dagger's monitoring stack and explain We use Flink's inbuilt metrics reporter to publish application metrics to one of the supported sinks. Other metrics like JMX can be enabled from Flink. Find more details on Flink's metrics reporting and supported sinks [here](https://ci.apache.org/projects/flink/flink-docs-release-1.9/monitoring/metrics.html#reporter). To register new application metrics from the dagger codebase follow [this](https://ci.apache.org/projects/flink/flink-docs-release-1.9/monitoring/metrics.html#registering-metrics/). -We have also included a [custom grafana dashboard](https://github.com/odpf/dagger/blob/main/docs/static/assets/dagger-grafana-dashboard.json) for dagger related [metrics](../reference/metrics.md). +We have also included a [custom grafana dashboard](https://github.com/goto/dagger/blob/main/docs/static/assets/dagger-grafana-dashboard.json) for dagger related [metrics](../reference/metrics.md). Follow [this](https://grafana.com/docs/grafana/latest/dashboards/export-import/) to import this dashboard. ## Dagger Dashboard -This section gives an overview of the important panels/titles in the dagger [dashboard](https://github.com/odpf/dagger/blob/main/docs/static/assets/dagger-grafana-dashboard.json). +This section gives an overview of the important panels/titles in the dagger [dashboard](https://github.com/goto/dagger/blob/main/docs/static/assets/dagger-grafana-dashboard.json). Find more about all the panels [here](../reference/metrics.md). #### Overview diff --git a/docs/docs/guides/overview.md b/docs/docs/guides/overview.md index 7c0fbc878..710890e09 100644 --- a/docs/docs/guides/overview.md +++ b/docs/docs/guides/overview.md @@ -22,7 +22,7 @@ Dagger runs inside a Flink cluster which can be set up in some distributed resou ### [Monitoring Dagger with exposed metrics](./monitoring.md) -Dagger support first-class monitoring support. You can get a lot of insights about a running dagger from the pre-exported [monitoring dashboards](https://github.com/odpf/dagger/blob/main/docs/static/assets/dagger-grafana-dashboard.json). This section contains guides, best practices and pieces of advice related to managing Dagger in production. +Dagger support first-class monitoring support. You can get a lot of insights about a running dagger from the pre-exported [monitoring dashboards](https://github.com/goto/dagger/blob/main/docs/static/assets/dagger-grafana-dashboard.json). This section contains guides, best practices and pieces of advice related to managing Dagger in production. ### [Query Examples](./query_examples.md) diff --git a/docs/docs/guides/quickstart.md b/docs/docs/guides/quickstart.md index 6aab77cbc..b69838f24 100644 --- a/docs/docs/guides/quickstart.md +++ b/docs/docs/guides/quickstart.md @@ -14,7 +14,7 @@ Following are the steps for setting up dagger in docker compose - 1. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` 2. cd into the docker-compose directory: ```shell @@ -66,7 +66,7 @@ The dagger environment variables are present in the `local.properties` file insi 1. Clone Dagger repository into your local ```shell -git clone https://github.com/odpf/dagger.git +git clone https://github.com/goto/dagger.git ``` 2. Next, we will generate our proto descriptor set. Ensure you are at the top level directory(`dagger`) and then fire this command @@ -85,11 +85,11 @@ python -m SimpleHTTPServer 8000 This will spin up a mock HTTP server and serve the descriptor set we just generated in the previous step at port 8000. The Stencil client being used in Dagger will fetch it by calling this URL. This has been already configured in `local.properties`, as we have set `SCHEMA_REGISTRY_STENCIL_ENABLE` to true and pointed `SCHEMA_REGISTRY_STENCIL_URLS` to `http://127.0.0.1:8000/dagger-descriptors.bin`. -4. Next, we will generate and send some messages to a sample kafka topic as per some proto schema. Note that, in `local.properties` we have set `INPUT_SCHEMA_PROTO_CLASS` under `STREAMS` to use `io.odpf.dagger.consumer.TestPrimitiveMessage` proto. Hence, we will push messages which conform to this schema into the topic. For doing this, please follow these steps: +4. Next, we will generate and send some messages to a sample kafka topic as per some proto schema. Note that, in `local.properties` we have set `INPUT_SCHEMA_PROTO_CLASS` under `STREAMS` to use `com.gotocompany.dagger.consumer.TestPrimitiveMessage` proto. Hence, we will push messages which conform to this schema into the topic. For doing this, please follow these steps: 1. `cd` into the directory `dagger-common/src/test/proto`. You should see a text file `sample_message.txt` which contains just one message. We will encode it into a binary in protobuf format. 2. Fire this command: ```protobuf - protoc --proto_path=./ --encode=io.odpf.dagger.consumer.TestPrimitiveMessage ./TestLogMessage.proto < ./sample_message.txt > out.bin + protoc --proto_path=./ --encode=com.gotocompany.dagger.consumer.TestPrimitiveMessage ./TestLogMessage.proto < ./sample_message.txt > out.bin ``` This will generate a binary file called `out.bin`. It contains the binary encoded message of `sample_message.txt`. diff --git a/docs/docs/guides/troubleshooting.md b/docs/docs/guides/troubleshooting.md index a28b7a528..bc8830641 100644 --- a/docs/docs/guides/troubleshooting.md +++ b/docs/docs/guides/troubleshooting.md @@ -80,7 +80,7 @@ access the bucket. ./bin/kafka-topics.sh --list --zookeeper localhost:2181 ``` -- `Data verification`: You can use any standard Kafka consumer that supports protobuf encoding in data to verify this. We have a custom consumer called [firehose](https://github.com/odpf/firehose). You can run a simple log sink firehose for this. +- `Data verification`: You can use any standard Kafka consumer that supports protobuf encoding in data to verify this. We have a custom consumer called [firehose](https://github.com/goto/firehose). You can run a simple log sink firehose for this. ### Why can I not find the Topic created by Daggers? diff --git a/docs/docs/guides/use_transformer.md b/docs/docs/guides/use_transformer.md index 3deb05cfb..541ed7dd3 100644 --- a/docs/docs/guides/use_transformer.md +++ b/docs/docs/guides/use_transformer.md @@ -17,7 +17,7 @@ In this section, we will know more about transformers, how to use them and how y - There are some transformers to solve some generic use cases pre-built in the dagger. -- All the pre-supported transformers present in the `dagger-functions` sub-module in [this](https://github.com/odpf/dagger/tree/main/dagger-functions/src/main/java/io/odpf/dagger/functions/transformers) directory. Find more details about each of the existing transformers and some sample examples [here](../reference/transformers.md). +- All the pre-supported transformers present in the `dagger-functions` sub-module in [this](https://github.com/goto/dagger/tree/main/dagger-functions/src/main/java/com/gotocompany/dagger/functions/transformers) directory. Find more details about each of the existing transformers and some sample examples [here](../reference/transformers.md). - In case any of the predefined transformers do not meet your requirement, you can create your custom Transformers by extending some contract. Follow this [contribution guidelines](../contribute/add_transformer.md) on how to add a transformer in dagger. @@ -38,7 +38,7 @@ In this section, we will know more about transformers, how to use them and how y ], "transformers": [ { - "transformation_class": "io.odpf.dagger.functions.transformers.HashTransformer", + "transformation_class": "HashTransformer", "transformation_arguments": { "maskColumns": [ "data_2", diff --git a/docs/docs/guides/use_udf.md b/docs/docs/guides/use_udf.md index a8e049513..3f9953e48 100644 --- a/docs/docs/guides/use_udf.md +++ b/docs/docs/guides/use_udf.md @@ -26,9 +26,9 @@ Some of the use-cases can not be solved using Flink SQL & the Apache Calcite fun Maps zero or more values to multiple rows and each row may have multiple columns. -All the supported java udfs present in the `dagger-functions` subproject in [this](https://github.com/odpf/dagger/tree/main/dagger-functions/src/main/java/io/odpf/dagger/functions/udfs) directory. +All the supported java udfs present in the `dagger-functions` subproject in [this](https://github.com/goto/dagger/tree/main/dagger-functions/src/main/java/com/gotocompany/dagger/functions/udfs) directory. -All the supported python udfs present in the [dagger-py-functions](https://github.com/odpf/dagger/tree/main/dagger-py-functions/udfs/) directory. +All the supported python udfs present in the [dagger-py-functions](https://github.com/goto/dagger/tree/main/dagger-py-functions/udfs/) directory. Follow [this](../reference/udfs.md) to find more details about the already supported UDFs in the dagger. diff --git a/docs/docs/intro.md b/docs/docs/intro.md index 758d0ad42..552641af8 100644 --- a/docs/docs/intro.md +++ b/docs/docs/intro.md @@ -29,7 +29,7 @@ Discover why to use Dagger - [Data Deduplication with Transformer](./reference/transformers.md#DeDuplicationTransformer) - [Realtime long window processing with Longbow](./advance/longbow.md) -To know more, follow the detailed [documentation](https://odpf.gitbook.io/dagger). +To know more, follow the detailed [documentation](https://goto.gitbook.io/dagger). ## Where to go from here diff --git a/docs/docs/reference/configuration.md b/docs/docs/reference/configuration.md index 4426419a0..8c632c2c9 100644 --- a/docs/docs/reference/configuration.md +++ b/docs/docs/reference/configuration.md @@ -99,6 +99,68 @@ Defines the SASL Java Authentication and Authorization Service (JAAS) Config use * Example value: `org.apache.kafka.common.security.scram.ScramLoginModule required username="admin" password="admin";` * Type: `optional` required only for ACL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` if static JAAS configuration system property `java.security.auth.login.config` is not configured in flink cluster. +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_PROTOCOL` + +Defines the security protocol used to communicate with SSL enabled kafka. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.protocol) +Dagger supported values are: TLSv1.2, TLSv1.3, TLS, TLSv1.1, SSL, SSLv2 and SSLv3 + +* Example value 1: `SSL` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +* Example value 2: `TLS` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEY_PASSWORD` + +Defines the SSL Key Password for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.key.password) + +* Example value: `myKeyPass` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_LOCATION` + +Defines the SSL KeyStore location or path for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.keystore.location) + +* Example value: `myKeyStore.jks` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_PASSWORD` + +Defines the SSL KeyStore password for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.keystore.password) + +* Example value: `myKeyStorePass` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_KEYSTORE_TYPE` + +Defines the SSL KeyStore Type like JKS, PKCS12 etc for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.keystore.type) +Dagger supported values are: JKS, PKCS12, PEM + +* Example value: `JKS` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_LOCATION` + +Defines the SSL TrustStore location or path for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.truststore.location) + +* Example value: `myTrustStore.jks` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_PASSWORD` + +Defines the SSL TrustStore password for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.truststore.password) + +* Example value: `myTrustStorePass` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + +##### `SOURCE_KAFKA_CONSUMER_CONFIG_SSL_TRUSTSTORE_TYPE` + +Defines the SSL TrustStore Type like JKS, PKCS12 for Kafka source. Find more details on this config [here](https://kafka.apache.org/documentation/#brokerconfigs_ssl.truststore.type) +Dagger supported values are: JKS, PKCS12, PEM + +* Example value: `JKS` +* Type: `optional` required only for SSL enabled `KAFKA_CONSUMER` or `KAFKA_SOURCE` + ##### `SOURCE_KAFKA_CONFIG_AUTO_COMMIT_ENABLE` Enable/Disable Kafka consumer auto-commit. Find more details on this config [here](https://kafka.apache.org/documentation/#consumerconfigs_enable.auto.commit). @@ -250,9 +312,9 @@ Defines the time attribute field name on the data stream. Find more details on t Defines the factory class of the UDF. Multiple factory classes could be given in a comma-separated format. -* Example value: `io.odpf.dagger.functions.udfs.factories.FunctionFactory` +* Example value: `FunctionFactory` * Type: `Optional` -* Default value: `io.odpf.dagger.functions.udfs.factories.FunctionFactory` +* Default value: `FunctionFactory` #### `SQL_QUERY` @@ -379,10 +441,18 @@ Enable/Disable to produce large messages to Kafka. by default, it's configuratio * Type: `optional` * Default value: `false` +#### `SINK_KAFKA_LINGER_MS` + +Defines the max interval in milliseconds, the producer will wait for the sink/producer buffer to fill. + +* Example value: `1000` +* Type: `optional` +* Default value: `0` + ### BigQuery Sink A BigQuery sink Dagger (`SINK_TYPE=bigquery`) requires following env variables to be set along with the Generic Dagger env variables, as well as the -[Generic](https://github.com/odpf/depot/blob/main/docs/reference/configuration/generic.md) and [BigQuery ](https://github.com/odpf/depot/blob/main/docs/reference/configuration/bigquery-sink.md)env variables in the ODPF Depot repository, since Dagger uses the BigQuery sink connector implementation available in [Depot](https://github.com/odpf/depot) repository. +[Generic](https://github.com/goto/depot/blob/main/docs/reference/configuration/generic.md) and [BigQuery ](https://github.com/goto/depot/blob/main/docs/reference/configuration/bigquery-sink.md)env variables in the GOTO Depot repository, since Dagger uses the BigQuery sink connector implementation available in [Depot](https://github.com/goto/depot) repository. #### `SINK_BIGQUERY_BATCH_SIZE` @@ -399,7 +469,7 @@ Contains the error types for which the dagger should throw an exception if such ### Schema Registry -Stencil is dynamic schema registry for protobuf. Find more details about Stencil [here](https://github.com/odpf/stencil#stencil). +Stencil is dynamic schema registry for protobuf. Find more details about Stencil [here](https://github.com/goto/stencil#stencil). #### `SCHEMA_REGISTRY_STENCIL_ENABLE` @@ -416,21 +486,53 @@ Defines the stencil URL. Multiple URLs could be given in a comma-separated forma * Example value: `http://localhost:8000/testproto.desc` * Type: `required` -#### `SCHEMA_REGISTRY_STENCIL_REFRESH_CACHE` +#### `SCHEMA_REGISTRY_STENCIL_FETCH_TIMEOUT_MS` -Enable/Disable the stencil refresh cache. +Defines the timeout in milliseconds while fetching the descriptor set from the Stencil server. -* Example value: `false` +* Example value: `607800` +* Type: `optional` +* Default value: `60000` + +#### `SCHEMA_REGISTRY_STENCIL_CACHE_AUTO_REFRESH` + +Defines whether to enable/disable the auto schema refresh. Please note that auto schema refresh will only work for additions in Enum types in the Proto. It will not fail for other scenarios but it will just ignore any new field additions at the root or nested level, unless the job is restarted. + +* Example value: `true` * Type: `optional` * Default value: `false` -#### `SCHEMA_REGISTRY_STENCIL_TIMEOUT_MS` +#### `SCHEMA_REGISTRY_STENCIL_REFRESH_STRATEGY` + +Defines the schema refresh strategy i.e. `VERSION_BASED_REFRESH` or `LONG_POLLING` when auto schema refresh is enabled. Please note that if the schema refresh strategy is set to `VERSION_BASED_REFRESH` then the `SCHEMA_REGISTRY_STENCIL_URLS` should not be a versioned URL, i.e. it should not have `/versions/xx` at the end. Also note that `VERSION_BASED_REFRESH` strategy will only work if you are using a Stencil server as the schema registry. -Defines the stencil timeout in milliseconds. +* Example value: `VERSION_BASED_REFRESH` +* Type: `optional` +* Default value: `LONG_POLLING` + +#### `SCHEMA_REGISTRY_STENCIL_CACHE_TTL_MS` + +Defines the ttl in milliseconds of the Stencil schema cache after which it will fetch the new descriptors. * Example value: `60000` * Type: `optional` -* Default value: `60000` +* Default value: `7200000` + +#### `SCHEMA_REGISTRY_STENCIL_FETCH_BACKOFF_MIN_MS` + +Defines the time interval in milliseconds for after which the stencil client will retry to fetch the descriptors after the first failed attempt. + +* Example value: `7000` +* Type: `optional` +* Default value: `5000` + +#### `SCHEMA_REGISTRY_STENCIL_FETCH_RETRIES` + +Defines the maximum no. of retries to fetch the descriptors from the Stencil server. + +* Example value: `7` +* Type: `optional` +* Default value: `4` ### Flink @@ -677,7 +779,7 @@ Example: You should set the path name to `data/sample.txt` on the udf to be able open the files. -* Example how to use this, can be found in this [udf](https://github.com/odpf/dagger/tree/main/dagger-py-functions/udfs/scalar/sample.py) +* Example how to use this, can be found in this [udf](https://github.com/goto/dagger/tree/main/dagger-py-functions/udfs/scalar/sample.py) * Type: `optional` * Default value: `(none)` diff --git a/docs/docs/reference/transformers.md b/docs/docs/reference/transformers.md index 869b7f551..55ffbef84 100644 --- a/docs/docs/reference/transformers.md +++ b/docs/docs/reference/transformers.md @@ -14,7 +14,7 @@ This page contains references for all the custom transformers available on Dagge ### ClearColumnTransformer * Transformation Class: - * `io.odpf.dagger.functions.transformers.ClearColumnTransformer` + * `ClearColumnTransformer` * Contract: * After Selecting columns by SQL, you need to reselect the desired columns with the help of an internal source. Following transformation arguments can be passed: * `targetColumnName`: The field that needs to be cleared. @@ -43,7 +43,7 @@ This page contains references for all the custom transformers available on Dagge ], "transformers": [ { - "transformation_class": "io.odpf.dagger.functions.transformers.ClearColumnTransformer", + "transformation_class": "ClearColumnTransformer", "transformation_arguments": { "targetColumnName": "data1" } @@ -54,7 +54,7 @@ This page contains references for all the custom transformers available on Dagge ### DeDuplicationTransformer * Transformation Class: - * `io.odpf.dagger.functions.transformers.DeDuplicationTransformer` + * `DeDuplicationTransformer` * Contract: * After Selecting columns by SQL, you need to reselect the desired columns with the help of an internal source. Following transformation arguments can be passed: * `key_column`: This value will be used as the deduplication key (other events with the same key will be stopped). @@ -92,7 +92,7 @@ This page contains references for all the custom transformers available on Dagge "key_column": "data1", "ttl_in_seconds": "3600" }, - "transformation_class": "io.odpf.dagger.functions.transformers.DeDuplicationTransformer" + "transformation_class": "DeDuplicationTransformer" } ] } @@ -100,7 +100,7 @@ This page contains references for all the custom transformers available on Dagge ### FeatureTransformer * Transformation Class: - * `io.odpf.dagger.functions.transformers.FeatureTransformer` + * `FeatureTransformer` * Contract: * After Selecting columns by SQL, you need to reselect the desired columns with the help of an internal source. Following transformation arguments can be passed: * `keyColumnName`: This value will be used to form the key of the feature. @@ -133,7 +133,7 @@ This page contains references for all the custom transformers available on Dagge "keyColumnName": "data1", "valueColumnName": "features" }, - "transformation_class": "io.odpf.dagger.functions.transformers.FeatureTransformer" + "transformation_class": "FeatureTransformer" } ] } @@ -141,7 +141,7 @@ This page contains references for all the custom transformers available on Dagge ### FeatureWithTypeTransformer * Transformation Class: - * `io.odpf.dagger.functions.transformers.FeatureWithTypeTransformer` + * `FeatureWithTypeTransformer` * Contract: * After Selecting columns by SQL, you need to reselect the desired columns with the help of an internal source. Following transformation arguments can be passed: * `outputColumnName`: The column where the final feature will be written and `FeatureRow` are synonyms with `FeaturesWithType` UDF and a single feature is represented by an element in an array. @@ -179,7 +179,7 @@ This page contains references for all the custom transformers available on Dagge ], "transformers": [ { - "transformation_class": "io.odpf.dagger.functions.transformers.FeatureTransformer", + "transformation_class": "FeatureTransformer", "transformation_arguments": { "outputColumnName": "features", "data": [ @@ -197,7 +197,7 @@ This page contains references for all the custom transformers available on Dagge ### HashTransformer * Transformation Class: - * `io.odpf.dagger.functions.transformers.HashTransformer` + * `HashTransformer` * Contract: * After Selecting columns by SQL, you need to reselect the desired columns with the help of an internal source. Following transformation arguments can be passed: * `maskColumns`: A list of fields that need to be encrypted/masked. @@ -228,7 +228,7 @@ This page contains references for all the custom transformers available on Dagge ], "transformers": [ { - "transformation_class": "io.odpf.dagger.functions.transformers.HashTransformer", + "transformation_class": "HashTransformer", "transformation_arguments": { "maskColumns": [ "test_data.data1" @@ -241,7 +241,7 @@ This page contains references for all the custom transformers available on Dagge ### InvalidRecordFilterTransformer * Transformation Class: - * `io.odpf.dagger.functions.transformers.InvalidRecordFilterTransformer` + * `InvalidRecordFilterTransformer` * Contract: * Following transformation arguments can be passed: * `transformation_arguments`: A key-value map required for parameters required for the custom transformation class. @@ -266,7 +266,7 @@ This page contains references for all the custom transformers available on Dagge "table_name": "testtable", "transformers": [ { - "transformation_class": "io.odpf.dagger.functions.transformers.InvalidRecordFilterTransformer", + "transformation_class": "InvalidRecordFilterTransformer", "transformation_arguments": "testtable" } ] @@ -277,7 +277,7 @@ This page contains references for all the custom transformers available on Dagge ### SQLTransformer * Transformation Class: - * `io.odpf.dagger.functions.transformers.SQLTransformer` + * `SQLTransformer` * Contract: * After Selecting columns by SQL, you need to reselect the desired columns with the help of an internal source. Following transformation arguments can be passed: * `sqlQuery`: The SQL query for transformation @@ -318,7 +318,7 @@ This page contains references for all the custom transformers available on Dagge ], "transformers": [ { - "transformation_class": "io.odpf.dagger.functions.transformers.SQLTransformer", + "transformation_class": "SQLTransformer", "transformation_arguments": { "sqlQuery": "SELECT count(distinct data1) AS `count`, data2, TUMBLE_END(rowtime, INTERVAL '60' SECOND) AS event_timestamp FROM data_stream group by TUMBLE (rowtime, INTERVAL '60' SECOND), data2" } diff --git a/docs/docs/roadmap.md b/docs/docs/roadmap.md index 36099a505..f538700bd 100644 --- a/docs/docs/roadmap.md +++ b/docs/docs/roadmap.md @@ -4,9 +4,9 @@ sidebar_position: 2 # Roadmap -In the following section you can learn about what features we're working on, what stage they're in, and when we expect to bring them to you. Have any questions or comments about items on the roadmap? Join the [discussions](https://github.com/odpf/dagger/discussions) on Dagger Github forum. +In the following section you can learn about what features we're working on, what stage they're in, and when we expect to bring them to you. Have any questions or comments about items on the roadmap? Join the [discussions](https://github.com/goto/dagger/discussions) on Dagger Github forum. -We’re planning to iterate on the format of the roadmap itself, and we see potential to engage more in discussions about the future of Dagger features. If you have feedback about this roadmap section itself, such as how the issues are presented, let us know through [discussions](https://github.com/odpf/dagger/discussions). +We’re planning to iterate on the format of the roadmap itself, and we see potential to engage more in discussions about the future of Dagger features. If you have feedback about this roadmap section itself, such as how the issues are presented, let us know through [discussions](https://github.com/goto/dagger/discussions). ### Dagger 0.2 * Flink upgrade from 1.9 to 1.13 diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index af4af1cce..38749ce2d 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -6,12 +6,12 @@ const darkCodeTheme = require('prism-react-renderer/themes/dracula'); (module.exports = { title: 'Dagger', tagline: 'Stream processing framework', - url: 'https://odpf.github.io/', + url: 'https://goto.github.io/', baseUrl: '/dagger/', onBrokenLinks: 'throw', onBrokenMarkdownLinks: 'warn', favicon: 'img/favicon.ico', - organizationName: 'ODPF', + organizationName: 'GOTO', projectName: 'dagger', presets: [ @@ -20,12 +20,12 @@ const darkCodeTheme = require('prism-react-renderer/themes/dracula'); ({ docs: { sidebarPath: require.resolve('./sidebars.js'), - editUrl: 'https://github.com/odpf/dagger/edit/master/docs/', + editUrl: 'https://github.com/goto/dagger/edit/master/docs/', }, blog: { showReadingTime: true, editUrl: - 'https://github.com/odpf/optimus/edit/master/docs/blog/', + 'https://github.com/goto/optimus/edit/master/docs/blog/', }, theme: { customCss: [ @@ -65,7 +65,7 @@ const darkCodeTheme = require('prism-react-renderer/themes/dracula'); className: 'header-slack-link', }, { - href: 'https://github.com/odpf/dagger', + href: 'https://github.com/goto/dagger', className: 'navbar-item-github', position: 'right', }, @@ -77,10 +77,10 @@ const darkCodeTheme = require('prism-react-renderer/themes/dracula'); { title: 'Products', items: [ - { label: 'Optimus', href: 'https://github.com/odpf/optimus' }, - { label: 'Firehose', href: 'https://github.com/odpf/firehose' }, - { label: 'Raccoon', href: 'https://github.com/odpf/raccoon' }, - { label: 'Stencil', href: 'https://github.com/odpf/stencil' }, + { label: 'Optimus', href: 'https://github.com/goto/optimus' }, + { label: 'Firehose', href: 'https://github.com/goto/firehose' }, + { label: 'Raccoon', href: 'https://github.com/goto/raccoon' }, + { label: 'Stencil', href: 'https://github.com/goto/stencil' }, ], }, { @@ -95,7 +95,7 @@ const darkCodeTheme = require('prism-react-renderer/themes/dracula'); title: 'Community', items: [ { label: 'Slack', href: 'https://bit.ly/2RzPbtn' }, - { label: 'GitHub', href: 'https://github.com/odpf/dagger' } + { label: 'GitHub', href: 'https://github.com/goto/dagger' } ], }, ], @@ -109,7 +109,7 @@ const darkCodeTheme = require('prism-react-renderer/themes/dracula'); }, announcementBar: { id: 'star-repo', - content: '⭐️ If you like Dagger, give it a star on GitHub! ⭐', + content: '⭐️ If you like Dagger, give it a star on GitHub! ⭐', backgroundColor: '#222', textColor: '#eee', isCloseable: true, diff --git a/docs/src/pages/help.js b/docs/src/pages/help.js index 7e77a47ed..e90891a5d 100644 --- a/docs/src/pages/help.js +++ b/docs/src/pages/help.js @@ -19,21 +19,12 @@ export default function Home() { - The Dagger team has an open source slack workspace to discuss development and support. - Most of the Dagger discussions happen in #dagger channel. -
Join us on Slack - ) - }, { title: 'GitHub Issues', content: (
Have a general issue or bug that you've found? We'd love to hear about it in our GitHub issues. This can be feature requests too! -
Go to issues +
Go to issues
) }, @@ -42,7 +33,7 @@ export default function Home() { content: (
For help and questions about best practices, join our GitHub discussions. Browse and ask questions. -
Go to discussions +
Go to discussions
) } diff --git a/docs/static/assets/dagger-grafana-dashboard.json b/docs/static/assets/dagger-grafana-dashboard.json index 26d066fde..d9e18598d 100644 --- a/docs/static/assets/dagger-grafana-dashboard.json +++ b/docs/static/assets/dagger-grafana-dashboard.json @@ -13767,7 +13767,7 @@ ] }, "timezone": "browser", - "title": "ODPF Dagger Timeboard open source", + "title": "GOTO Dagger Timeboard open source", "uid": "Y3jyYmm7k", "version": 4 } diff --git a/quickstart/docker-compose/compose.yaml b/quickstart/docker-compose/compose.yaml index ddf3e62e7..91c610d09 100644 --- a/quickstart/docker-compose/compose.yaml +++ b/quickstart/docker-compose/compose.yaml @@ -127,7 +127,7 @@ services: yes Y | apt install gradle echo -e 'Finished installing all dependencies' /var/lib/dpkg/info/ca-certificates-java.postinst configure - git clone https://github.com/odpf/dagger + git clone https://github.com/goto/dagger cp /resources/local.properties /dagger/dagger-core/env/ cd dagger echo -e 'Building and starting Dagger Job' diff --git a/quickstart/docker-compose/resources/TestLogMessage.proto b/quickstart/docker-compose/resources/TestLogMessage.proto index 16f0cb893..a5e45c77b 100644 --- a/quickstart/docker-compose/resources/TestLogMessage.proto +++ b/quickstart/docker-compose/resources/TestLogMessage.proto @@ -1,9 +1,9 @@ syntax = "proto3"; -package io.odpf.dagger.consumer; +package com.gotocompany.dagger.consumer; option java_multiple_files = true; -option java_package = "io.odpf.dagger.consumer"; +option java_package = "com.gotocompany.dagger.consumer"; option java_outer_classname = "TestLogMessageProto"; import "google/protobuf/struct.proto"; diff --git a/quickstart/docker-compose/resources/kafkafeeder.sh b/quickstart/docker-compose/resources/kafkafeeder.sh index 772afdc9a..50133a443 100755 --- a/quickstart/docker-compose/resources/kafkafeeder.sh +++ b/quickstart/docker-compose/resources/kafkafeeder.sh @@ -5,4 +5,4 @@ random_enum_index=$(($RANDOM %3)) declare -a myArray=("FLIGHT" "BUS" "TRAIN") cat sample_message.txt | \ sed "s/replace_timestamp_here/$timestamp_now/g; s/replace_service_type_here/${myArray[$random_enum_index]}/g; s/replace_customer_suffix_here/$random_3char_suffix/g" | \ -protoc --proto_path=./ --encode=io.odpf.dagger.consumer.TestBookingLogMessage ./TestLogMessage.proto > message.bin +protoc --proto_path=./ --encode=com.gotocompany.dagger.consumer.TestBookingLogMessage ./TestLogMessage.proto > message.bin diff --git a/quickstart/docker-compose/resources/local.properties b/quickstart/docker-compose/resources/local.properties index 07a729ee5..4bc3950c3 100644 --- a/quickstart/docker-compose/resources/local.properties +++ b/quickstart/docker-compose/resources/local.properties @@ -3,7 +3,7 @@ FLINK_SQL_QUERY=SELECT count(1) as booking_count, TUMBLE_END(rowtime, INTERVAL ' FLINK_WATERMARK_INTERVAL_MS=10000 FLINK_WATERMARK_DELAY_MS=1000 # == Input Stream == -STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"io.odpf.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] +STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"com.gotocompany.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] # == Preprocessor == PROCESSOR_PREPROCESSOR_ENABLE=false @@ -25,7 +25,7 @@ METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS=10000 METRIC_TELEMETRY_ENABLE=true # == Others == -FUNCTION_FACTORY_CLASSES=io.odpf.dagger.functions.udfs.factories.FunctionFactory +FUNCTION_FACTORY_CLASSES=com.gotocompany.dagger.functions.udfs.factories.FunctionFactory FLINK_ROWTIME_ATTRIBUTE_NAME=rowtime # == Python Udf == diff --git a/quickstart/examples/aggregation/tumble_window/README.md b/quickstart/examples/aggregation/tumble_window/README.md index 060f7d98c..835149dbb 100644 --- a/quickstart/examples/aggregation/tumble_window/README.md +++ b/quickstart/examples/aggregation/tumble_window/README.md @@ -11,7 +11,7 @@ In this example, we will count the number of booking orders,(as Kafka records) i 2. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` ## Steps diff --git a/quickstart/examples/aggregation/tumble_window/compose.yaml b/quickstart/examples/aggregation/tumble_window/compose.yaml index 057e4d715..c89047fb4 100644 --- a/quickstart/examples/aggregation/tumble_window/compose.yaml +++ b/quickstart/examples/aggregation/tumble_window/compose.yaml @@ -119,7 +119,7 @@ services: yes Y | apt install openjdk-8-jdk yes Y | apt install gradle /var/lib/dpkg/info/ca-certificates-java.postinst configure - git clone https://github.com/odpf/dagger + git clone https://github.com/goto/dagger cp /resources/local.properties /dagger/dagger-core/env/ cd dagger ./gradlew runFlink diff --git a/quickstart/examples/aggregation/tumble_window/resources/TestLogMessage.proto b/quickstart/examples/aggregation/tumble_window/resources/TestLogMessage.proto index 16f0cb893..a5e45c77b 100644 --- a/quickstart/examples/aggregation/tumble_window/resources/TestLogMessage.proto +++ b/quickstart/examples/aggregation/tumble_window/resources/TestLogMessage.proto @@ -1,9 +1,9 @@ syntax = "proto3"; -package io.odpf.dagger.consumer; +package com.gotocompany.dagger.consumer; option java_multiple_files = true; -option java_package = "io.odpf.dagger.consumer"; +option java_package = "com.gotocompany.dagger.consumer"; option java_outer_classname = "TestLogMessageProto"; import "google/protobuf/struct.proto"; diff --git a/quickstart/examples/aggregation/tumble_window/resources/kafkafeeder.sh b/quickstart/examples/aggregation/tumble_window/resources/kafkafeeder.sh index 772afdc9a..50133a443 100755 --- a/quickstart/examples/aggregation/tumble_window/resources/kafkafeeder.sh +++ b/quickstart/examples/aggregation/tumble_window/resources/kafkafeeder.sh @@ -5,4 +5,4 @@ random_enum_index=$(($RANDOM %3)) declare -a myArray=("FLIGHT" "BUS" "TRAIN") cat sample_message.txt | \ sed "s/replace_timestamp_here/$timestamp_now/g; s/replace_service_type_here/${myArray[$random_enum_index]}/g; s/replace_customer_suffix_here/$random_3char_suffix/g" | \ -protoc --proto_path=./ --encode=io.odpf.dagger.consumer.TestBookingLogMessage ./TestLogMessage.proto > message.bin +protoc --proto_path=./ --encode=com.gotocompany.dagger.consumer.TestBookingLogMessage ./TestLogMessage.proto > message.bin diff --git a/quickstart/examples/aggregation/tumble_window/resources/local.properties b/quickstart/examples/aggregation/tumble_window/resources/local.properties index 07a729ee5..4bc3950c3 100644 --- a/quickstart/examples/aggregation/tumble_window/resources/local.properties +++ b/quickstart/examples/aggregation/tumble_window/resources/local.properties @@ -3,7 +3,7 @@ FLINK_SQL_QUERY=SELECT count(1) as booking_count, TUMBLE_END(rowtime, INTERVAL ' FLINK_WATERMARK_INTERVAL_MS=10000 FLINK_WATERMARK_DELAY_MS=1000 # == Input Stream == -STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"io.odpf.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] +STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"com.gotocompany.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] # == Preprocessor == PROCESSOR_PREPROCESSOR_ENABLE=false @@ -25,7 +25,7 @@ METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS=10000 METRIC_TELEMETRY_ENABLE=true # == Others == -FUNCTION_FACTORY_CLASSES=io.odpf.dagger.functions.udfs.factories.FunctionFactory +FUNCTION_FACTORY_CLASSES=com.gotocompany.dagger.functions.udfs.factories.FunctionFactory FLINK_ROWTIME_ATTRIBUTE_NAME=rowtime # == Python Udf == diff --git a/quickstart/examples/enrichment/elasticsearch_enrichment/README.md b/quickstart/examples/enrichment/elasticsearch_enrichment/README.md index 83cfbf67e..101cb3a46 100644 --- a/quickstart/examples/enrichment/elasticsearch_enrichment/README.md +++ b/quickstart/examples/enrichment/elasticsearch_enrichment/README.md @@ -10,7 +10,7 @@ In this example, we will use Dagger Post-processors to enrich the payment transa 2. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` ## Steps diff --git a/quickstart/examples/enrichment/elasticsearch_enrichment/compose.yaml b/quickstart/examples/enrichment/elasticsearch_enrichment/compose.yaml index eaa132e8c..cbec7ac41 100644 --- a/quickstart/examples/enrichment/elasticsearch_enrichment/compose.yaml +++ b/quickstart/examples/enrichment/elasticsearch_enrichment/compose.yaml @@ -67,7 +67,7 @@ services: yes Y | apt-get install python3 cd resources echo wwww - protoc --descriptor_set_out=file.desc --include_imports io/odpf/dagger/consumer/*.proto + protoc --descriptor_set_out=file.desc --include_imports com/gotocompany/dagger/consumer/*.proto yes Y | apt-get install python3-pip python3 -m pip install simple_http_server python3 -m http.server 2917 @@ -148,7 +148,7 @@ services: yes Y | apt install openjdk-8-jdk yes Y | apt install gradle /var/lib/dpkg/info/ca-certificates-java.postinst configure - git clone https://github.com/odpf/dagger + git clone https://github.com/goto/dagger cp /resources/local.properties /dagger/dagger-core/env/ cd dagger ./gradlew runFlink diff --git a/quickstart/examples/enrichment/elasticsearch_enrichment/resources/io/odpf/dagger/consumer/EnrichedBookingLogMessage.proto b/quickstart/examples/enrichment/elasticsearch_enrichment/resources/com/gotocompany/dagger/consumer/EnrichedBookingLogMessage.proto similarity index 85% rename from quickstart/examples/enrichment/elasticsearch_enrichment/resources/io/odpf/dagger/consumer/EnrichedBookingLogMessage.proto rename to quickstart/examples/enrichment/elasticsearch_enrichment/resources/com/gotocompany/dagger/consumer/EnrichedBookingLogMessage.proto index fd0ae69ba..4710b1947 100644 --- a/quickstart/examples/enrichment/elasticsearch_enrichment/resources/io/odpf/dagger/consumer/EnrichedBookingLogMessage.proto +++ b/quickstart/examples/enrichment/elasticsearch_enrichment/resources/com/gotocompany/dagger/consumer/EnrichedBookingLogMessage.proto @@ -1,16 +1,16 @@ syntax = "proto3"; -package io.odpf.dagger.consumer; +package com.gotocompany.dagger.consumer; option java_multiple_files = true; -option java_package = "io.odpf.dagger.consumer"; +option java_package = "com.gotocompany.dagger.consumer"; option java_outer_classname = "EnrichedBookingLogMessageProto"; import "google/protobuf/timestamp.proto"; -import "io/odpf/dagger/consumer/TestLogMessage.proto"; +import "com/gotocompany/dagger/consumer/TestLogMessage.proto"; message EnrichedBookingLogMessage { - io.odpf.dagger.consumer.TestBookingLogMessage booking_log = 1; + com.gotocompany.dagger.consumer.TestBookingLogMessage booking_log = 1; CustomerLogMessage customer_profile = 2; google.protobuf.Timestamp event_timestamp = 3; } diff --git a/quickstart/examples/enrichment/elasticsearch_enrichment/resources/io/odpf/dagger/consumer/TestLogMessage.proto b/quickstart/examples/enrichment/elasticsearch_enrichment/resources/com/gotocompany/dagger/consumer/TestLogMessage.proto similarity index 95% rename from quickstart/examples/enrichment/elasticsearch_enrichment/resources/io/odpf/dagger/consumer/TestLogMessage.proto rename to quickstart/examples/enrichment/elasticsearch_enrichment/resources/com/gotocompany/dagger/consumer/TestLogMessage.proto index 16f0cb893..a5e45c77b 100644 --- a/quickstart/examples/enrichment/elasticsearch_enrichment/resources/io/odpf/dagger/consumer/TestLogMessage.proto +++ b/quickstart/examples/enrichment/elasticsearch_enrichment/resources/com/gotocompany/dagger/consumer/TestLogMessage.proto @@ -1,9 +1,9 @@ syntax = "proto3"; -package io.odpf.dagger.consumer; +package com.gotocompany.dagger.consumer; option java_multiple_files = true; -option java_package = "io.odpf.dagger.consumer"; +option java_package = "com.gotocompany.dagger.consumer"; option java_outer_classname = "TestLogMessageProto"; import "google/protobuf/struct.proto"; diff --git a/quickstart/examples/enrichment/elasticsearch_enrichment/resources/kafkafeeder.sh b/quickstart/examples/enrichment/elasticsearch_enrichment/resources/kafkafeeder.sh index ac4f535eb..655694c32 100755 --- a/quickstart/examples/enrichment/elasticsearch_enrichment/resources/kafkafeeder.sh +++ b/quickstart/examples/enrichment/elasticsearch_enrichment/resources/kafkafeeder.sh @@ -5,4 +5,4 @@ random_customer_id=$(($RANDOM %12)) declare -a myArray=("FLIGHT" "BUS" "TRAIN") cat sample_message.txt | \ sed "s/replace_timestamp_here/$timestamp_now/g; s/replace_service_type_here/${myArray[$random_enum_index]}/g; s/replace_customer_suffix_here/$random_customer_id/g" | \ -protoc --proto_path=io/odpf/dagger/consumer/ --encode=io.odpf.dagger.consumer.TestBookingLogMessage io/odpf/dagger/consumer/TestLogMessage.proto > message.bin +protoc --proto_path=com/gotocompany/dagger/consumer/ --encode=com.gotocompany.dagger.consumer.TestBookingLogMessage com/gotocompany/dagger/consumer/TestLogMessage.proto > message.bin diff --git a/quickstart/examples/enrichment/elasticsearch_enrichment/resources/local.properties b/quickstart/examples/enrichment/elasticsearch_enrichment/resources/local.properties index a4976d7b4..204dd6cd2 100644 --- a/quickstart/examples/enrichment/elasticsearch_enrichment/resources/local.properties +++ b/quickstart/examples/enrichment/elasticsearch_enrichment/resources/local.properties @@ -3,7 +3,7 @@ FLINK_SQL_QUERY=SELECT * FROM `data_stream_0` FLINK_WATERMARK_INTERVAL_MS=10 FLINK_WATERMARK_DELAY_MS=0 # == Input Stream == -STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"io.odpf.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] +STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"com.gotocompany.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] # == Preprocessor == PROCESSOR_PREPROCESSOR_ENABLE=false @@ -11,7 +11,7 @@ PROCESSOR_PREPROCESSOR_CONFIG={} # == Postprocessor == PROCESSOR_POSTPROCESSOR_ENABLE=true -PROCESSOR_POSTPROCESSOR_CONFIG={ "external_source": { "es": [ { "capacity": "10", "connect_timeout": "5000", "endpoint_pattern": "/customers/_doc/%s", "endpoint_variables": "customer_id", "host": "elasticsearch", "output_mapping": { "customer_profile": { "path": "$._source" } }, "port": "9200", "retry_timeout": "5000", "socket_timeout": "6000", "stream_timeout": "5000", "type": "io.odpf.dagger.consumer.EnrichedBookingLogMessage" } ] }, "internal_source": [ { "output_field": "booking_log", "type": "sql", "value": "*" }, { "output_field": "event_timestamp", "type": "function", "value": "CURRENT_TIMESTAMP" } ] } +PROCESSOR_POSTPROCESSOR_CONFIG={ "external_source": { "es": [ { "capacity": "10", "connect_timeout": "5000", "endpoint_pattern": "/customers/_doc/%s", "endpoint_variables": "customer_id", "host": "elasticsearch", "output_mapping": { "customer_profile": { "path": "$._source" } }, "port": "9200", "retry_timeout": "5000", "socket_timeout": "6000", "stream_timeout": "5000", "type": "com.gotocompany.dagger.consumer.EnrichedBookingLogMessage" } ] }, "internal_source": [ { "output_field": "booking_log", "type": "sql", "value": "*" }, { "output_field": "event_timestamp", "type": "function", "value": "CURRENT_TIMESTAMP" } ] } # == Sink == @@ -26,7 +26,7 @@ METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS=10000 METRIC_TELEMETRY_ENABLE=false # == Others == -FUNCTION_FACTORY_CLASSES=io.odpf.dagger.functions.udfs.factories.FunctionFactory +FUNCTION_FACTORY_CLASSES=com.gotocompany.dagger.functions.udfs.factories.FunctionFactory FLINK_ROWTIME_ATTRIBUTE_NAME=rowtime # == Python Udf == diff --git a/quickstart/examples/joins/inner_join/README.md b/quickstart/examples/joins/inner_join/README.md index fdc348371..b43444f08 100644 --- a/quickstart/examples/joins/inner_join/README.md +++ b/quickstart/examples/joins/inner_join/README.md @@ -11,7 +11,7 @@ In this example, we will use the Inner joins in Dagger to join the data streams 2. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` ## Steps diff --git a/quickstart/examples/joins/inner_join/compose.yaml b/quickstart/examples/joins/inner_join/compose.yaml index 26e236085..1350f9075 100644 --- a/quickstart/examples/joins/inner_join/compose.yaml +++ b/quickstart/examples/joins/inner_join/compose.yaml @@ -124,7 +124,7 @@ services: yes Y | apt install openjdk-8-jdk yes Y | apt install gradle /var/lib/dpkg/info/ca-certificates-java.postinst configure - git clone https://github.com/odpf/dagger + git clone https://github.com/goto/dagger cp /resources/local.properties /dagger/dagger-core/env/ cd dagger ./gradlew runFlink diff --git a/quickstart/examples/joins/inner_join/resources/TestLogMessage.proto b/quickstart/examples/joins/inner_join/resources/TestLogMessage.proto index 16f0cb893..a5e45c77b 100644 --- a/quickstart/examples/joins/inner_join/resources/TestLogMessage.proto +++ b/quickstart/examples/joins/inner_join/resources/TestLogMessage.proto @@ -1,9 +1,9 @@ syntax = "proto3"; -package io.odpf.dagger.consumer; +package com.gotocompany.dagger.consumer; option java_multiple_files = true; -option java_package = "io.odpf.dagger.consumer"; +option java_package = "com.gotocompany.dagger.consumer"; option java_outer_classname = "TestLogMessageProto"; import "google/protobuf/struct.proto"; diff --git a/quickstart/examples/joins/inner_join/resources/kafkafeeder.sh b/quickstart/examples/joins/inner_join/resources/kafkafeeder.sh index 772afdc9a..50133a443 100755 --- a/quickstart/examples/joins/inner_join/resources/kafkafeeder.sh +++ b/quickstart/examples/joins/inner_join/resources/kafkafeeder.sh @@ -5,4 +5,4 @@ random_enum_index=$(($RANDOM %3)) declare -a myArray=("FLIGHT" "BUS" "TRAIN") cat sample_message.txt | \ sed "s/replace_timestamp_here/$timestamp_now/g; s/replace_service_type_here/${myArray[$random_enum_index]}/g; s/replace_customer_suffix_here/$random_3char_suffix/g" | \ -protoc --proto_path=./ --encode=io.odpf.dagger.consumer.TestBookingLogMessage ./TestLogMessage.proto > message.bin +protoc --proto_path=./ --encode=com.gotocompany.dagger.consumer.TestBookingLogMessage ./TestLogMessage.proto > message.bin diff --git a/quickstart/examples/joins/inner_join/resources/local.properties b/quickstart/examples/joins/inner_join/resources/local.properties index 1f7646e22..aeceb51b5 100644 --- a/quickstart/examples/joins/inner_join/resources/local.properties +++ b/quickstart/examples/joins/inner_join/resources/local.properties @@ -3,7 +3,7 @@ FLINK_SQL_QUERY=SELECT data_stream_0.service_type as tag_service_type, count(dat FLINK_WATERMARK_INTERVAL_MS=10000 FLINK_WATERMARK_DELAY_MS=1000 # == Input Stream == -STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"io.odpf.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]},{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v2","INPUT_SCHEMA_TABLE":"data_stream_1","INPUT_SCHEMA_PROTO_CLASS":"io.odpf.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] +STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"com.gotocompany.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]},{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v2","INPUT_SCHEMA_TABLE":"data_stream_1","INPUT_SCHEMA_PROTO_CLASS":"com.gotocompany.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] # == Preprocessor == PROCESSOR_PREPROCESSOR_ENABLE=false @@ -25,7 +25,7 @@ METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS=10000 METRIC_TELEMETRY_ENABLE=true # == Others == -FUNCTION_FACTORY_CLASSES=io.odpf.dagger.functions.udfs.factories.FunctionFactory +FUNCTION_FACTORY_CLASSES=com.gotocompany.dagger.functions.udfs.factories.FunctionFactory FLINK_ROWTIME_ATTRIBUTE_NAME=rowtime # == Python Udf == diff --git a/quickstart/examples/transformer/deduplication_transformer/README.md b/quickstart/examples/transformer/deduplication_transformer/README.md index 05e93a63f..45d51eb42 100644 --- a/quickstart/examples/transformer/deduplication_transformer/README.md +++ b/quickstart/examples/transformer/deduplication_transformer/README.md @@ -11,7 +11,7 @@ In this example, we will use the DeDuplication Transformer in Dagger to remove t 2. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` ## Steps diff --git a/quickstart/examples/transformer/deduplication_transformer/compose.yaml b/quickstart/examples/transformer/deduplication_transformer/compose.yaml index 057e4d715..c89047fb4 100644 --- a/quickstart/examples/transformer/deduplication_transformer/compose.yaml +++ b/quickstart/examples/transformer/deduplication_transformer/compose.yaml @@ -119,7 +119,7 @@ services: yes Y | apt install openjdk-8-jdk yes Y | apt install gradle /var/lib/dpkg/info/ca-certificates-java.postinst configure - git clone https://github.com/odpf/dagger + git clone https://github.com/goto/dagger cp /resources/local.properties /dagger/dagger-core/env/ cd dagger ./gradlew runFlink diff --git a/quickstart/examples/transformer/deduplication_transformer/resources/TestLogMessage.proto b/quickstart/examples/transformer/deduplication_transformer/resources/TestLogMessage.proto index 16f0cb893..a5e45c77b 100644 --- a/quickstart/examples/transformer/deduplication_transformer/resources/TestLogMessage.proto +++ b/quickstart/examples/transformer/deduplication_transformer/resources/TestLogMessage.proto @@ -1,9 +1,9 @@ syntax = "proto3"; -package io.odpf.dagger.consumer; +package com.gotocompany.dagger.consumer; option java_multiple_files = true; -option java_package = "io.odpf.dagger.consumer"; +option java_package = "com.gotocompany.dagger.consumer"; option java_outer_classname = "TestLogMessageProto"; import "google/protobuf/struct.proto"; diff --git a/quickstart/examples/transformer/deduplication_transformer/resources/kafkafeeder.sh b/quickstart/examples/transformer/deduplication_transformer/resources/kafkafeeder.sh index 772afdc9a..50133a443 100755 --- a/quickstart/examples/transformer/deduplication_transformer/resources/kafkafeeder.sh +++ b/quickstart/examples/transformer/deduplication_transformer/resources/kafkafeeder.sh @@ -5,4 +5,4 @@ random_enum_index=$(($RANDOM %3)) declare -a myArray=("FLIGHT" "BUS" "TRAIN") cat sample_message.txt | \ sed "s/replace_timestamp_here/$timestamp_now/g; s/replace_service_type_here/${myArray[$random_enum_index]}/g; s/replace_customer_suffix_here/$random_3char_suffix/g" | \ -protoc --proto_path=./ --encode=io.odpf.dagger.consumer.TestBookingLogMessage ./TestLogMessage.proto > message.bin +protoc --proto_path=./ --encode=com.gotocompany.dagger.consumer.TestBookingLogMessage ./TestLogMessage.proto > message.bin diff --git a/quickstart/examples/transformer/deduplication_transformer/resources/local.properties b/quickstart/examples/transformer/deduplication_transformer/resources/local.properties index 55ddd2f2b..db9619778 100644 --- a/quickstart/examples/transformer/deduplication_transformer/resources/local.properties +++ b/quickstart/examples/transformer/deduplication_transformer/resources/local.properties @@ -3,7 +3,7 @@ FLINK_SQL_QUERY=SELECT order_number, customer_id FROM `data_stream_0` FLINK_WATERMARK_INTERVAL_MS=10000 FLINK_WATERMARK_DELAY_MS=1000 # == Input Stream == -STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"io.odpf.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] +STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"com.gotocompany.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] # == Preprocessor == PROCESSOR_PREPROCESSOR_ENABLE=false @@ -11,7 +11,7 @@ PROCESSOR_PREPROCESSOR_CONFIG={} # == Postprocessor == PROCESSOR_POSTPROCESSOR_ENABLE=true -PROCESSOR_POSTPROCESSOR_CONFIG={"internal_source": [{"output_field":"order_number","value":"order_number","type": "sql" }, { "output_field": "customer_id", "value": "customer_id","type":"sql" }], "transformers": [ {"transformation_arguments": {"key_column": "order_number","ttl_in_seconds":"3600"},"transformation_class": "io.odpf.dagger.functions.transformers.DeDuplicationTransformer" } ]} +PROCESSOR_POSTPROCESSOR_CONFIG={"internal_source": [{"output_field":"order_number","value":"order_number","type": "sql" }, { "output_field": "customer_id", "value": "customer_id","type":"sql" }], "transformers": [ {"transformation_arguments": {"key_column": "order_number","ttl_in_seconds":"3600"},"transformation_class": "DeDuplicationTransformer" } ]} # == Sink == @@ -26,7 +26,7 @@ METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS=10000 METRIC_TELEMETRY_ENABLE=true # == Others == -FUNCTION_FACTORY_CLASSES=io.odpf.dagger.functions.udfs.factories.FunctionFactory +FUNCTION_FACTORY_CLASSES=com.gotocompany.dagger.functions.udfs.factories.FunctionFactory FLINK_ROWTIME_ATTRIBUTE_NAME=rowtime # == Python Udf == diff --git a/quickstart/examples/udfs/distance_udf/README.md b/quickstart/examples/udfs/distance_udf/README.md index f2793e9c5..9a13ad69f 100644 --- a/quickstart/examples/udfs/distance_udf/README.md +++ b/quickstart/examples/udfs/distance_udf/README.md @@ -11,7 +11,7 @@ In this example, we will use a User-Defined Function in Dagger to compute the di 2. Clone Dagger repository into your local ```shell - git clone https://github.com/odpf/dagger.git + git clone https://github.com/goto/dagger.git ``` ## Steps diff --git a/quickstart/examples/udfs/distance_udf/compose.yaml b/quickstart/examples/udfs/distance_udf/compose.yaml index 057e4d715..c89047fb4 100644 --- a/quickstart/examples/udfs/distance_udf/compose.yaml +++ b/quickstart/examples/udfs/distance_udf/compose.yaml @@ -119,7 +119,7 @@ services: yes Y | apt install openjdk-8-jdk yes Y | apt install gradle /var/lib/dpkg/info/ca-certificates-java.postinst configure - git clone https://github.com/odpf/dagger + git clone https://github.com/goto/dagger cp /resources/local.properties /dagger/dagger-core/env/ cd dagger ./gradlew runFlink diff --git a/quickstart/examples/udfs/distance_udf/resources/TestLogMessage.proto b/quickstart/examples/udfs/distance_udf/resources/TestLogMessage.proto index 16f0cb893..a5e45c77b 100644 --- a/quickstart/examples/udfs/distance_udf/resources/TestLogMessage.proto +++ b/quickstart/examples/udfs/distance_udf/resources/TestLogMessage.proto @@ -1,9 +1,9 @@ syntax = "proto3"; -package io.odpf.dagger.consumer; +package com.gotocompany.dagger.consumer; option java_multiple_files = true; -option java_package = "io.odpf.dagger.consumer"; +option java_package = "com.gotocompany.dagger.consumer"; option java_outer_classname = "TestLogMessageProto"; import "google/protobuf/struct.proto"; diff --git a/quickstart/examples/udfs/distance_udf/resources/kafkafeeder.sh b/quickstart/examples/udfs/distance_udf/resources/kafkafeeder.sh index 772afdc9a..50133a443 100755 --- a/quickstart/examples/udfs/distance_udf/resources/kafkafeeder.sh +++ b/quickstart/examples/udfs/distance_udf/resources/kafkafeeder.sh @@ -5,4 +5,4 @@ random_enum_index=$(($RANDOM %3)) declare -a myArray=("FLIGHT" "BUS" "TRAIN") cat sample_message.txt | \ sed "s/replace_timestamp_here/$timestamp_now/g; s/replace_service_type_here/${myArray[$random_enum_index]}/g; s/replace_customer_suffix_here/$random_3char_suffix/g" | \ -protoc --proto_path=./ --encode=io.odpf.dagger.consumer.TestBookingLogMessage ./TestLogMessage.proto > message.bin +protoc --proto_path=./ --encode=com.gotocompany.dagger.consumer.TestBookingLogMessage ./TestLogMessage.proto > message.bin diff --git a/quickstart/examples/udfs/distance_udf/resources/local.properties b/quickstart/examples/udfs/distance_udf/resources/local.properties index 528af046d..4380a0421 100644 --- a/quickstart/examples/udfs/distance_udf/resources/local.properties +++ b/quickstart/examples/udfs/distance_udf/resources/local.properties @@ -3,7 +3,7 @@ FLINK_SQL_QUERY=SELECT Distance( driver_pickup_location.latitude, driver_pickup_ FLINK_WATERMARK_INTERVAL_MS=10000 FLINK_WATERMARK_DELAY_MS=1000 # == Input Stream == -STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"io.odpf.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] +STREAMS=[{"SOURCE_KAFKA_TOPIC_NAMES":"dagger-test-topic-v1","INPUT_SCHEMA_TABLE":"data_stream_0","INPUT_SCHEMA_PROTO_CLASS":"com.gotocompany.dagger.consumer.TestBookingLogMessage","INPUT_SCHEMA_EVENT_TIMESTAMP_FIELD_INDEX":"5","SOURCE_KAFKA_CONSUMER_CONFIG_BOOTSTRAP_SERVERS":"kafka:29094","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_COMMIT_ENABLE":"false","SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET":"latest","SOURCE_KAFKA_CONSUMER_CONFIG_GROUP_ID":"dagger-test-topic-cgroup-v1","SOURCE_KAFKA_NAME":"local-kafka-stream","SOURCE_DETAILS":[{"SOURCE_TYPE":"UNBOUNDED","SOURCE_NAME":"KAFKA_CONSUMER"}]}] # == Preprocessor == PROCESSOR_PREPROCESSOR_ENABLE=false @@ -25,7 +25,7 @@ METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS=10000 METRIC_TELEMETRY_ENABLE=true # == Others == -FUNCTION_FACTORY_CLASSES=io.odpf.dagger.functions.udfs.factories.FunctionFactory +FUNCTION_FACTORY_CLASSES=com.gotocompany.dagger.functions.udfs.factories.FunctionFactory FLINK_ROWTIME_ATTRIBUTE_NAME=rowtime # == Python Udf == diff --git a/version.txt b/version.txt index a0a15177f..ac454c6a1 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.6.3 \ No newline at end of file +0.12.0