diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 84cca80e22a0..a79fc8e53d20 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -1248,7 +1248,7 @@ def __init__(self, job_id, query, client, https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.createDisposition """ - default_dataset = _TypedProperty('default_dataset', Dataset) + default_dataset = _TypedProperty('default_dataset', DatasetReference) """See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.defaultDataset """ @@ -1437,8 +1437,8 @@ def _copy_configuration_properties(self, configuration): if self.default_dataset is not None: del self.default_dataset else: - self.default_dataset = Dataset( - DatasetReference(def_ds['projectId'], def_ds['datasetId'])) + self.default_dataset = DatasetReference( + def_ds['projectId'], def_ds['datasetId']) udf_resources = [] for udf_mapping in configuration.get(self._UDF_KEY, ()): key_val, = udf_mapping.items() diff --git a/bigquery/google/cloud/bigquery/query.py b/bigquery/google/cloud/bigquery/query.py index 7abbbec76b9b..38400659bdaf 100644 --- a/bigquery/google/cloud/bigquery/query.py +++ b/bigquery/google/cloud/bigquery/query.py @@ -19,7 +19,7 @@ from google.api.core import page_iterator from google.cloud.bigquery._helpers import _TypedProperty from google.cloud.bigquery._helpers import _rows_from_json -from google.cloud.bigquery.dataset import Dataset +from google.cloud.bigquery.dataset import DatasetReference from google.cloud.bigquery.job import QueryJob from google.cloud.bigquery.table import _parse_schema_resource from google.cloud.bigquery._helpers import QueryParametersProperty @@ -273,7 +273,7 @@ def schema(self): """ return _parse_schema_resource(self._properties.get('schema', {})) - default_dataset = _TypedProperty('default_dataset', Dataset) + default_dataset = _TypedProperty('default_dataset', DatasetReference) """See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#defaultDataset """ diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 029db44cd534..ca348704127c 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -1608,10 +1608,10 @@ def _verifyResourceProperties(self, job, resource): else: self.assertIsNone(job.create_disposition) if 'defaultDataset' in query_config: - dataset = job.default_dataset + ds_ref = job.default_dataset ds_ref = { - 'projectId': dataset.project, - 'datasetId': dataset.dataset_id, + 'projectId': ds_ref.project, + 'datasetId': ds_ref.dataset_id, } self.assertEqual(ds_ref, query_config['defaultDataset']) else: @@ -2125,7 +2125,7 @@ def test_result_error(self): self.assertEqual(exc_info.exception.code, http_client.BAD_REQUEST) def test_begin_w_bound_client(self): - from google.cloud.bigquery.dataset import Dataset + from google.cloud.bigquery.dataset import DatasetReference PATH = '/projects/%s/jobs' % (self.PROJECT,) DS_ID = 'DATASET' @@ -2139,7 +2139,7 @@ def test_begin_w_bound_client(self): client = _Client(project=self.PROJECT, connection=conn) job = self._make_one(self.JOB_NAME, self.QUERY, client) - job.default_dataset = Dataset(DatasetReference(self.PROJECT, DS_ID)) + job.default_dataset = DatasetReference(self.PROJECT, DS_ID) job.begin() @@ -2168,7 +2168,6 @@ def test_begin_w_bound_client(self): self._verifyResourceProperties(job, RESOURCE) def test_begin_w_alternate_client(self): - from google.cloud.bigquery.dataset import Dataset from google.cloud.bigquery.dataset import DatasetReference PATH = '/projects/%s/jobs' % (self.PROJECT,) @@ -2204,12 +2203,11 @@ def test_begin_w_alternate_client(self): job = self._make_one(self.JOB_NAME, self.QUERY, client1) dataset_ref = DatasetReference(self.PROJECT, DS_ID) - dataset = Dataset(dataset_ref) table_ref = dataset_ref.table(TABLE) job.allow_large_results = True job.create_disposition = 'CREATE_NEVER' - job.default_dataset = dataset + job.default_dataset = dataset_ref job.destination = table_ref job.flatten_results = True job.priority = 'INTERACTIVE' diff --git a/bigquery/tests/unit/test_query.py b/bigquery/tests/unit/test_query.py index 73f23cb1bf6a..9340689315a7 100644 --- a/bigquery/tests/unit/test_query.py +++ b/bigquery/tests/unit/test_query.py @@ -196,7 +196,7 @@ def test_ctor_w_query_parameters(self): self.assertEqual(query.query_parameters, query_parameters) def test_from_query_job(self): - from google.cloud.bigquery.dataset import Dataset, DatasetReference + from google.cloud.bigquery.dataset import DatasetReference from google.cloud.bigquery.job import QueryJob from google.cloud.bigquery._helpers import UDFResource @@ -206,8 +206,8 @@ def test_from_query_job(self): job = QueryJob( self.JOB_NAME, self.QUERY, client, udf_resources=[UDFResource("resourceUri", RESOURCE_URI)]) - dataset = Dataset(DatasetReference(self.PROJECT, DS_ID)) - job.default_dataset = dataset + ds_ref = DatasetReference(self.PROJECT, DS_ID) + job.default_dataset = ds_ref job.use_query_cache = True job.use_legacy_sql = True klass = self._get_target_class() @@ -219,7 +219,7 @@ def test_from_query_job(self): self.assertIs(query._client, client) self.assertIs(query._job, job) self.assertEqual(query.udf_resources, job.udf_resources) - self.assertIs(query.default_dataset, dataset) + self.assertIs(query.default_dataset, ds_ref) self.assertTrue(query.use_query_cache) self.assertTrue(query.use_legacy_sql) @@ -743,9 +743,9 @@ def __init__(self, project='project', connection=None): self._connection = connection def dataset(self, dataset_id): - from google.cloud.bigquery.dataset import Dataset, DatasetReference + from google.cloud.bigquery.dataset import DatasetReference - return Dataset(DatasetReference(self.project, dataset_id)) + return DatasetReference(self.project, dataset_id) class _Connection(object):