Skip to content
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
remove hive instance hardcode cluster name
  • Loading branch information
Na Zhang committed Sep 26, 2016
commit 5c76f473135d30f64f64ac7dd9fa68f5fe293e5b
8 changes: 6 additions & 2 deletions metadata-etl/src/main/resources/jython/HiveLoad.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,8 @@ def load_dataset_instance(self):
set sdi.dataset_id = d.id where sdi.abstract_dataset_urn = d.urn
and sdi.db_id = {db_id};


# nzhang fix issue hive_instance.*.csv has hard-coded datacenter
INSERT INTO dict_dataset_instance
( dataset_id,
db_id,
Expand All @@ -316,8 +318,8 @@ def load_dataset_instance(self):
created_time,
wh_etl_exec_id
)
select s.dataset_id, s.db_id, s.deployment_tier, s.data_center,
s.server_cluster, s.slice, s.status_id, s.native_name, s.logical_name, s.version,
select s.dataset_id, s.db_id, s.deployment_tier, c.data_center, c.cluster,
s.slice, s.status_id, s.native_name, s.logical_name, s.version,
case when s.version regexp '[0-9]+\.[0-9]+\.[0-9]+'
then cast(substring_index(s.version, '.', 1) as unsigned) * 100000000 +
cast(substring_index(substring_index(s.version, '.', 2), '.', -1) as unsigned) * 10000 +
Expand All @@ -326,6 +328,7 @@ def load_dataset_instance(self):
end version_sort_id, s.schema_text, s.ddl_text,
s.instance_created_time, s.created_time, s.wh_etl_exec_id
from stg_dict_dataset_instance s join dict_dataset d on s.dataset_id = d.id
join cfg_database c on c.db_id = {db_id}
where s.db_id = {db_id}
on duplicate key update
deployment_tier=s.deployment_tier, data_center=s.data_center, server_cluster=s.server_cluster, slice=s.slice,
Expand All @@ -335,6 +338,7 @@ def load_dataset_instance(self):
;
""".format(source_file=self.input_instance_file, db_id=self.db_id, wh_etl_exec_id=self.wh_etl_exec_id)


# didn't load into final table for now

for state in load_cmd.split(";"):
Expand Down