version: 2.1 jobs: unit: environment: DBT_INVOCATION_ENV: circle docker: - image: fishtownanalytics/test-container:10 steps: - checkout - run: tox -e flake8,unit # Turning off for now due to flaky runs of tests will turn back on at later date. # integration-spark-session: # environment: # DBT_INVOCATION_ENV: circle # docker: # - image: godatadriven/pyspark:3.1 # steps: # - checkout # - run: apt-get update # - run: python3 -m pip install --upgrade pip # - run: apt-get install -y git gcc g++ unixodbc-dev libsasl2-dev # - run: python3 -m pip install tox # - run: # name: Run integration tests # command: tox -e integration-spark-session # no_output_timeout: 1h # - store_artifacts: # path: ./logs integration-spark-thrift: environment: DBT_INVOCATION_ENV: circle docker: - image: fishtownanalytics/test-container:10 - image: godatadriven/spark:3.1.1 environment: WAIT_FOR: localhost:5432 command: > --class org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 --name Thrift JDBC/ODBC Server - image: postgres:9.6.17-alpine environment: POSTGRES_USER: dbt POSTGRES_PASSWORD: dbt POSTGRES_DB: metastore steps: - checkout - run: name: Wait for Spark-Thrift command: dockerize -wait tcp://localhost:10000 -timeout 15m -wait-retry-interval 5s - run: name: Run integration tests command: tox -e integration-spark-thrift no_output_timeout: 1h - store_artifacts: path: ./logs integration-spark-databricks-http: environment: DBT_INVOCATION_ENV: circle DBT_DATABRICKS_RETRY_ALL: True DBT_TEST_USER_1: "buildbot+dbt_test_user_1@dbtlabs.com" DBT_TEST_USER_2: "buildbot+dbt_test_user_2@dbtlabs.com" DBT_TEST_USER_3: "buildbot+dbt_test_user_3@dbtlabs.com" docker: - image: fishtownanalytics/test-container:10 steps: - checkout - run: name: Run integration tests command: tox -e integration-spark-databricks-http no_output_timeout: 1h - store_artifacts: path: ./logs integration-spark-databricks-odbc-cluster: &databricks-odbc environment: DBT_INVOCATION_ENV: circle ODBC_DRIVER: Simba # TODO: move env var to Docker image DBT_TEST_USER_1: "buildbot+dbt_test_user_1@dbtlabs.com" DBT_TEST_USER_2: "buildbot+dbt_test_user_2@dbtlabs.com" DBT_TEST_USER_3: "buildbot+dbt_test_user_3@dbtlabs.com" docker: # image based on `fishtownanalytics/test-container` w/ Simba ODBC Spark driver installed - image: 828731156495.dkr.ecr.us-east-1.amazonaws.com/dbt-spark-odbc-test-container:latest aws_auth: aws_access_key_id: $AWS_ACCESS_KEY_ID_STAGING aws_secret_access_key: $AWS_SECRET_ACCESS_KEY_STAGING steps: - checkout - run: name: Run integration tests command: tox -e integration-spark-databricks-odbc-cluster no_output_timeout: 1h - store_artifacts: path: ./logs integration-spark-databricks-odbc-endpoint: <<: *databricks-odbc steps: - checkout - run: name: Run integration tests command: tox -e integration-spark-databricks-odbc-sql-endpoint no_output_timeout: 1h - store_artifacts: path: ./logs workflows: version: 2 test-everything: jobs: - unit - integration-spark-session: requires: - unit - integration-spark-thrift: requires: - unit - integration-spark-databricks-http: requires: - integration-spark-thrift - integration-spark-databricks-odbc-cluster: context: aws-credentials requires: - integration-spark-thrift - integration-spark-databricks-odbc-endpoint: context: aws-credentials requires: - integration-spark-thrift