diff --git a/sdk/monitor/azure-monitor-ingestion/CHANGELOG.md b/sdk/monitor/azure-monitor-ingestion/CHANGELOG.md index e7a414fc1113..baf7b33ec18f 100644 --- a/sdk/monitor/azure-monitor-ingestion/CHANGELOG.md +++ b/sdk/monitor/azure-monitor-ingestion/CHANGELOG.md @@ -10,7 +10,8 @@ ### Other Changes -* Bumped minimum dependency on `azure-core` to `>=1.28.0`. +- Bumped minimum dependency on `azure-core` to `>=1.28.0`. +- Added additional type validation for the `logs` parameter in the `upload` method to ensure that a string hasn't been passed in. ([#33976](https://github.com/Azure/azure-sdk-for-python/pull/33976)) ## 1.0.3 (2023-11-07) diff --git a/sdk/monitor/azure-monitor-ingestion/README.md b/sdk/monitor/azure-monitor-ingestion/README.md index a515c1bca416..36bcdc0b8f06 100644 --- a/sdk/monitor/azure-monitor-ingestion/README.md +++ b/sdk/monitor/azure-monitor-ingestion/README.md @@ -102,6 +102,7 @@ The logs that were uploaded using this library can be queried using the [Azure M ## Examples - [Upload custom logs](#upload-custom-logs) +- [Upload data from JSON file or string](#upload-data-from-json-file-or-string) - [Upload with custom error handling](#upload-with-custom-error-handling) ### Upload custom logs @@ -116,11 +117,12 @@ from azure.identity import DefaultAzureCredential from azure.monitor.ingestion import LogsIngestionClient endpoint = os.environ['DATA_COLLECTION_ENDPOINT'] -credential = DefaultAzureCredential() +rule_id = os.environ['LOGS_DCR_RULE_ID'] +stream_name = os.environ['LOGS_DCR_STREAM_NAME'] +credential = DefaultAzureCredential() client = LogsIngestionClient(endpoint=endpoint, credential=credential, logging_enable=True) -rule_id = os.environ['LOGS_DCR_RULE_ID'] body = [ { "Time": "2021-12-08T23:51:14.1104269Z", @@ -135,7 +137,44 @@ body = [ ] try: - client.upload(rule_id=rule_id, stream_name=os.environ['LOGS_DCR_STREAM_NAME'], logs=body) + client.upload(rule_id=rule_id, stream_name=stream_name, logs=body) +except HttpResponseError as e: + print(f"Upload failed: {e}") +``` + +### Upload data from JSON file or string + +This example shows uploading when the data is in a JSON file or string. + +```python +import json +import os + +from azure.core.exceptions import HttpResponseError +from azure.identity import DefaultAzureCredential +from azure.monitor.ingestion import LogsIngestionClient + +endpoint = os.environ["DATA_COLLECTION_ENDPOINT"] +rule_id = os.environ['LOGS_DCR_RULE_ID'] +stream_name = os.environ["LOGS_DCR_STREAM_NAME"] + +credential = DefaultAzureCredential() +client = LogsIngestionClient(endpoint=endpoint, credential=credential, logging_enable=True) + +# If you have a JSON file containing an array of JSON objects +file_path = "./test-logs.json" +with open(file_path, "r") as f: + logs = json.load(f) + try: + client.upload(rule_id=rule_id, stream_name=stream_name, logs=logs) + except HttpResponseError as e: + print(f"Upload failed: {e}") + +# If you have a JSON string representing an array of JSON objects +string = '[{"Time": "2023-12-08T23:51:14.1104269Z", "Computer": "Computer1", "AdditionalContext": "context-2"}]' +logs = json.loads(string) +try: + client.upload(rule_id=rule_id, stream_name=stream_name, logs=logs) except HttpResponseError as e: print(f"Upload failed: {e}") ``` @@ -155,7 +194,7 @@ def on_error(error): def on_error_pass(error): pass -client.upload(rule_id=rule_id, stream_name=os.environ['LOGS_DCR_STREAM_NAME'], logs=body, on_error=on_error) +client.upload(rule_id=rule_id, stream_name=stream_name, logs=body, on_error=on_error) ``` ## Troubleshooting diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_patch.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_patch.py index a5e480107a91..4768d10331eb 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_patch.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_patch.py @@ -68,8 +68,10 @@ def upload( super()._upload(rule_id, stream=stream_name, body=logs, content_encoding=content_encoding, **kwargs) return - if not isinstance(logs, Sequence): - raise ValueError("The 'logs' parameter must be a list of JSON objects or an I/O stream that is readable.") + if not isinstance(logs, Sequence) or isinstance(logs, str): + raise ValueError( + "The 'logs' parameter must be a list of mappings/dictionaries or an I/O stream that is readable." + ) for gzip_data, log_chunk in _create_gzip_requests(cast(List[JSON], logs)): try: diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_patch.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_patch.py index a2dfd08f499c..581a4e18d2ed 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_patch.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_patch.py @@ -68,8 +68,10 @@ async def upload( await super()._upload(rule_id, stream=stream_name, body=logs, content_encoding=content_encoding, **kwargs) return - if not isinstance(logs, Sequence): - raise ValueError("The 'logs' parameter must be a list of JSON objects or an I/O stream that is readable.") + if not isinstance(logs, Sequence) or isinstance(logs, str): + raise ValueError( + "The 'logs' parameter must be a list of mappings/dictionaries or an I/O stream that is readable." + ) for gzip_data, log_chunk in _create_gzip_requests(cast(List[JSON], logs)): try: diff --git a/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion.py b/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion.py index 83d58900c40e..fb3b22e0c1dd 100644 --- a/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion.py +++ b/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion.py @@ -140,10 +140,10 @@ def on_error(e): assert on_error.called - def test_invalid_logs_format(self, monitor_info): + @pytest.mark.parametrize("logs", ['[{"foo": "bar"}]', "foo", {"foo": "bar"}, None]) + def test_invalid_logs_format(self, monitor_info, logs): client = self.create_client_from_credential( LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce']) - body = {"foo": "bar"} with pytest.raises(ValueError): - client.upload(rule_id="rule", stream_name="stream", logs=body) + client.upload(rule_id="rule", stream_name="stream", logs=logs) diff --git a/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion_async.py b/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion_async.py index 5220b09e64b6..6365e038c728 100644 --- a/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion_async.py +++ b/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion_async.py @@ -166,12 +166,12 @@ async def on_error(e): await credential.close() @pytest.mark.asyncio - async def test_invalid_logs_format(self, monitor_info): + @pytest.mark.parametrize("logs", ['[{"foo": "bar"}]', "foo", {"foo": "bar"}, None]) + async def test_invalid_logs_format(self, monitor_info, logs): credential = self.get_credential(LogsIngestionClient, is_async=True) client = self.create_client_from_credential(LogsIngestionClient, credential, endpoint=monitor_info['dce']) - body = {"foo": "bar"} async with client: with pytest.raises(ValueError): - await client.upload(rule_id="rule", stream_name="stream", logs=body) + await client.upload(rule_id="rule", stream_name="stream", logs=logs) await credential.close()