Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion sdk/monitor/azure-monitor-ingestion/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@

### Other Changes

* Bumped minimum dependency on `azure-core` to `>=1.28.0`.
- Bumped minimum dependency on `azure-core` to `>=1.28.0`.
- Added additional type validation for the `logs` parameter in the `upload` method to ensure that a string hasn't been passed in. ([#33976](https://github.com/Azure/azure-sdk-for-python/pull/33976))

## 1.0.3 (2023-11-07)

Expand Down
47 changes: 43 additions & 4 deletions sdk/monitor/azure-monitor-ingestion/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ The logs that were uploaded using this library can be queried using the [Azure M
## Examples

- [Upload custom logs](#upload-custom-logs)
- [Upload data from JSON file or string](#upload-data-from-json-file-or-string)
- [Upload with custom error handling](#upload-with-custom-error-handling)

### Upload custom logs
Expand All @@ -116,11 +117,12 @@ from azure.identity import DefaultAzureCredential
from azure.monitor.ingestion import LogsIngestionClient

endpoint = os.environ['DATA_COLLECTION_ENDPOINT']
credential = DefaultAzureCredential()
rule_id = os.environ['LOGS_DCR_RULE_ID']
stream_name = os.environ['LOGS_DCR_STREAM_NAME']

credential = DefaultAzureCredential()
client = LogsIngestionClient(endpoint=endpoint, credential=credential, logging_enable=True)

rule_id = os.environ['LOGS_DCR_RULE_ID']
body = [
{
"Time": "2021-12-08T23:51:14.1104269Z",
Expand All @@ -135,7 +137,44 @@ body = [
]

try:
client.upload(rule_id=rule_id, stream_name=os.environ['LOGS_DCR_STREAM_NAME'], logs=body)
client.upload(rule_id=rule_id, stream_name=stream_name, logs=body)
except HttpResponseError as e:
print(f"Upload failed: {e}")
```

### Upload data from JSON file or string

This example shows uploading when the data is in a JSON file or string.

```python
import json
import os

from azure.core.exceptions import HttpResponseError
from azure.identity import DefaultAzureCredential
from azure.monitor.ingestion import LogsIngestionClient

endpoint = os.environ["DATA_COLLECTION_ENDPOINT"]
rule_id = os.environ['LOGS_DCR_RULE_ID']
stream_name = os.environ["LOGS_DCR_STREAM_NAME"]

credential = DefaultAzureCredential()
client = LogsIngestionClient(endpoint=endpoint, credential=credential, logging_enable=True)

# If you have a JSON file containing an array of JSON objects
file_path = "./test-logs.json"
with open(file_path, "r") as f:
logs = json.load(f)
try:
client.upload(rule_id=rule_id, stream_name=stream_name, logs=logs)
except HttpResponseError as e:
print(f"Upload failed: {e}")

# If you have a JSON string representing an array of JSON objects
string = '[{"Time": "2023-12-08T23:51:14.1104269Z", "Computer": "Computer1", "AdditionalContext": "context-2"}]'
logs = json.loads(string)
try:
client.upload(rule_id=rule_id, stream_name=stream_name, logs=logs)
except HttpResponseError as e:
print(f"Upload failed: {e}")
```
Expand All @@ -155,7 +194,7 @@ def on_error(error):
def on_error_pass(error):
pass

client.upload(rule_id=rule_id, stream_name=os.environ['LOGS_DCR_STREAM_NAME'], logs=body, on_error=on_error)
client.upload(rule_id=rule_id, stream_name=stream_name, logs=body, on_error=on_error)
```

## Troubleshooting
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,10 @@ def upload(
super()._upload(rule_id, stream=stream_name, body=logs, content_encoding=content_encoding, **kwargs)
return

if not isinstance(logs, Sequence):
raise ValueError("The 'logs' parameter must be a list of JSON objects or an I/O stream that is readable.")
if not isinstance(logs, Sequence) or isinstance(logs, str):
raise ValueError(
"The 'logs' parameter must be a list of mappings/dictionaries or an I/O stream that is readable."
)

for gzip_data, log_chunk in _create_gzip_requests(cast(List[JSON], logs)):
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,10 @@ async def upload(
await super()._upload(rule_id, stream=stream_name, body=logs, content_encoding=content_encoding, **kwargs)
return

if not isinstance(logs, Sequence):
raise ValueError("The 'logs' parameter must be a list of JSON objects or an I/O stream that is readable.")
if not isinstance(logs, Sequence) or isinstance(logs, str):
raise ValueError(
"The 'logs' parameter must be a list of mappings/dictionaries or an I/O stream that is readable."
)

for gzip_data, log_chunk in _create_gzip_requests(cast(List[JSON], logs)):
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,10 +140,10 @@ def on_error(e):

assert on_error.called

def test_invalid_logs_format(self, monitor_info):
@pytest.mark.parametrize("logs", ['[{"foo": "bar"}]', "foo", {"foo": "bar"}, None])
def test_invalid_logs_format(self, monitor_info, logs):
client = self.create_client_from_credential(
LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce'])

body = {"foo": "bar"}
with pytest.raises(ValueError):
client.upload(rule_id="rule", stream_name="stream", logs=body)
client.upload(rule_id="rule", stream_name="stream", logs=logs)
Original file line number Diff line number Diff line change
Expand Up @@ -166,12 +166,12 @@ async def on_error(e):
await credential.close()

@pytest.mark.asyncio
async def test_invalid_logs_format(self, monitor_info):
@pytest.mark.parametrize("logs", ['[{"foo": "bar"}]', "foo", {"foo": "bar"}, None])
async def test_invalid_logs_format(self, monitor_info, logs):
credential = self.get_credential(LogsIngestionClient, is_async=True)
client = self.create_client_from_credential(LogsIngestionClient, credential, endpoint=monitor_info['dce'])

body = {"foo": "bar"}
async with client:
with pytest.raises(ValueError):
await client.upload(rule_id="rule", stream_name="stream", logs=body)
await client.upload(rule_id="rule", stream_name="stream", logs=logs)
await credential.close()