diff --git a/google/cloud/bigquery/routine/routine.py b/google/cloud/bigquery/routine/routine.py index e933fa137..a75b1d04a 100644 --- a/google/cloud/bigquery/routine/routine.py +++ b/google/cloud/bigquery/routine/routine.py @@ -625,6 +625,8 @@ class RemoteFunctionOptions(object): "connection": "connection", "max_batching_rows": "maxBatchingRows", "user_defined_context": "userDefinedContext", + "container_cpu": "containerCpu", + "container_memory": "containerMemory", } def __init__( @@ -633,6 +635,8 @@ def __init__( connection=None, max_batching_rows=None, user_defined_context=None, + container_cpu=None, + container_memory=None, _properties=None, ) -> None: if _properties is None: @@ -647,6 +651,10 @@ def __init__( self.max_batching_rows = max_batching_rows if user_defined_context is not None: self.user_defined_context = user_defined_context + if container_cpu is not None: + self.container_cpu = container_cpu + if container_memory is not None: + self.container_memory = container_memory @property def connection(self): @@ -682,7 +690,7 @@ def max_batching_rows(self): @max_batching_rows.setter def max_batching_rows(self, value): - self._properties["maxBatchingRows"] = _helpers._str_or_none(value) + self._properties["maxBatchingRows"] = _helpers._int_or_none(value) @property def user_defined_context(self): @@ -699,6 +707,24 @@ def user_defined_context(self, value): raise ValueError("value must be dictionary") self._properties["userDefinedContext"] = value + @property + def container_cpu(self): + """Optional[str]: The container CPU for the remote service.""" + return self._properties.get("containerCpu") + + @container_cpu.setter + def container_cpu(self, value): + self._properties["containerCpu"] = _helpers._str_or_none(value) + + @property + def container_memory(self): + """Optional[str]: The container memory for the remote service.""" + return self._properties.get("containerMemory") + + @container_memory.setter + def container_memory(self, value): + self._properties["containerMemory"] = _helpers._str_or_none(value) + @classmethod def from_api_repr(cls, resource: dict) -> "RemoteFunctionOptions": """Factory: construct remote function options given its API representation. diff --git a/tests/unit/routine/test_remote_function_options.py b/tests/unit/routine/test_remote_function_options.py index ffd57e8c1..d0915eb8e 100644 --- a/tests/unit/routine/test_remote_function_options.py +++ b/tests/unit/routine/test_remote_function_options.py @@ -37,11 +37,15 @@ def test_ctor(target_class): connection=CONNECTION, max_batching_rows=MAX_BATCHING_ROWS, user_defined_context=USER_DEFINED_CONTEXT, + container_cpu="1.0", + container_memory="512M", ) assert options.endpoint == ENDPOINT assert options.connection == CONNECTION assert options.max_batching_rows == MAX_BATCHING_ROWS assert options.user_defined_context == USER_DEFINED_CONTEXT + assert options.container_cpu == "1.0" + assert options.container_memory == "512M" def test_empty_ctor(target_class): @@ -64,6 +68,8 @@ def test_from_api_repr(target_class): "connection": CONNECTION, "maxBatchingRows": MAX_BATCHING_ROWS, "userDefinedContext": USER_DEFINED_CONTEXT, + "containerCpu": "1.0", + "containerMemory": "512M", "someRandomField": "someValue", } options = target_class.from_api_repr(resource) @@ -71,6 +77,8 @@ def test_from_api_repr(target_class): assert options.connection == CONNECTION assert options.max_batching_rows == MAX_BATCHING_ROWS assert options.user_defined_context == USER_DEFINED_CONTEXT + assert options.container_cpu == "1.0" + assert options.container_memory == "512M" assert options._properties["someRandomField"] == "someValue" @@ -81,6 +89,8 @@ def test_from_api_repr_w_minimal_resource(target_class): assert options.connection is None assert options.max_batching_rows is None assert options.user_defined_context is None + assert options.container_cpu is None + assert options.container_memory is None def test_from_api_repr_w_unknown_fields(target_class): @@ -95,12 +105,16 @@ def test_eq(target_class): connection=CONNECTION, max_batching_rows=MAX_BATCHING_ROWS, user_defined_context=USER_DEFINED_CONTEXT, + container_cpu="1.0", + container_memory="512M", ) other_options = target_class( endpoint=ENDPOINT, connection=CONNECTION, max_batching_rows=MAX_BATCHING_ROWS, user_defined_context=USER_DEFINED_CONTEXT, + container_cpu="1.0", + container_memory="512M", ) assert options == other_options assert not (options != other_options) @@ -120,8 +134,12 @@ def test_repr(target_class): connection=CONNECTION, max_batching_rows=MAX_BATCHING_ROWS, user_defined_context=USER_DEFINED_CONTEXT, + container_cpu="1.0", + container_memory="512M", ) actual_repr = repr(options) assert actual_repr == ( - "RemoteFunctionOptions(connection='connection_string', endpoint='https://some.endpoint', max_batching_rows=50, user_defined_context={'foo': 'bar'})" + "RemoteFunctionOptions(connection='connection_string', container_cpu='1.0', " + "container_memory='512M', endpoint='https://some.endpoint', " + "max_batching_rows=50, user_defined_context={'foo': 'bar'})" ) diff --git a/tests/unit/routine/test_routine.py b/tests/unit/routine/test_routine.py index acd3bc40e..15f5ca91e 100644 --- a/tests/unit/routine/test_routine.py +++ b/tests/unit/routine/test_routine.py @@ -80,6 +80,8 @@ def test_ctor_w_properties(target_class): connection="connection_string", max_batching_rows=99, user_defined_context={"foo": "bar"}, + container_cpu="1.0", + container_memory="512M", ) actual_routine = target_class( @@ -106,6 +108,8 @@ def test_ctor_w_properties(target_class): actual_routine.determinism_level == bigquery.DeterminismLevel.NOT_DETERMINISTIC ) assert actual_routine.remote_function_options == options + assert actual_routine.remote_function_options.container_cpu == "1.0" + assert actual_routine.remote_function_options.container_memory == "512M" def test_ctor_invalid_remote_function_options(target_class): @@ -153,6 +157,8 @@ def test_from_api_repr(target_class): "userDefinedContext": { "foo": "bar", }, + "containerCpu": "1.0", + "containerMemory": "512M", }, "dataGovernanceType": "DATA_MASKING", } @@ -193,6 +199,8 @@ def test_from_api_repr(target_class): assert actual_routine.remote_function_options.connection == "connection_string" assert actual_routine.remote_function_options.max_batching_rows == 50 assert actual_routine.remote_function_options.user_defined_context == {"foo": "bar"} + assert actual_routine.remote_function_options.container_cpu == "1.0" + assert actual_routine.remote_function_options.container_memory == "512M" assert actual_routine.data_governance_type == "DATA_MASKING" @@ -489,6 +497,28 @@ def test_from_api_repr_w_unknown_fields(target_class): }, }, ), + ( + { + "routineType": "SCALAR_FUNCTION", + "remoteFunctionOptions": { + "endpoint": "https://some_endpoint", + "connection": "connection_string", + "max_batching_rows": 101, + "containerCpu": "1.0", + "containerMemory": "512M", + }, + }, + ["remote_function_options"], + { + "remoteFunctionOptions": { + "endpoint": "https://some_endpoint", + "connection": "connection_string", + "max_batching_rows": 101, + "containerCpu": "1.0", + "containerMemory": "512M", + }, + }, + ), ], ) def test_build_resource(object_under_test, resource, filter_fields, expected): @@ -605,3 +635,13 @@ def test_repr(target_class): model = target_class("my-proj.my_dset.my_routine") actual_routine = repr(model) assert actual_routine == "Routine('my-proj.my_dset.my_routine')" + + +def test_remote_function_options_max_batching_rows_setter(): + options = bigquery.RemoteFunctionOptions() + options.max_batching_rows = 10 + assert options.max_batching_rows == 10 + assert options._properties["maxBatchingRows"] == 10 + options.max_batching_rows = None + assert options.max_batching_rows is None + assert options._properties["maxBatchingRows"] is None