From ef28cb47d2d0b675951b77b1c233ab7782aa48c5 Mon Sep 17 00:00:00 2001 From: Paul Van Eck Date: Tue, 8 Jul 2025 02:15:47 +0000 Subject: [PATCH] [Monitor] Add TypeSpec-based Logs Query package Signed-off-by: Paul Van Eck --- .../azure-monitor-querylogs/CHANGELOG.md | 14 + sdk/monitor/azure-monitor-querylogs/LICENSE | 21 + .../azure-monitor-querylogs/MANIFEST.in | 8 + sdk/monitor/azure-monitor-querylogs/README.md | 481 ++++ .../TROUBLESHOOTING.md | 176 ++ .../azure-monitor-querylogs/_metadata.json | 3 + .../apiview-properties.json | 23 + .../azure-monitor-querylogs/assets.json | 6 + .../azure-monitor-querylogs/azure/__init__.py | 1 + .../azure/monitor/__init__.py | 1 + .../azure/monitor/querylogs/__init__.py | 32 + .../azure/monitor/querylogs/_enums.py | 18 + .../azure/monitor/querylogs/_exceptions.py | 55 + .../monitor/querylogs/_generated/__init__.py | 32 + .../monitor/querylogs/_generated/_client.py | 98 + .../querylogs/_generated/_configuration.py | 63 + .../_generated/_operations/__init__.py | 22 + .../_generated/_operations/_operations.py | 651 ++++++ .../_generated/_operations/_patch.py | 21 + .../monitor/querylogs/_generated/_patch.py | 21 + .../querylogs/_generated/_utils/__init__.py | 6 + .../querylogs/_generated/_utils/model_base.py | 1232 ++++++++++ .../_generated/_utils/serialization.py | 2032 +++++++++++++++++ .../querylogs/_generated/_utils/utils.py | 25 + .../monitor/querylogs/_generated/_version.py | 9 + .../querylogs/_generated/aio/__init__.py | 29 + .../querylogs/_generated/aio/_client.py | 100 + .../_generated/aio/_configuration.py | 63 + .../_generated/aio/_operations/__init__.py | 22 + .../_generated/aio/_operations/_operations.py | 573 +++++ .../_generated/aio/_operations/_patch.py | 21 + .../querylogs/_generated/aio/_patch.py | 21 + .../querylogs/_generated/models/__init__.py | 52 + .../querylogs/_generated/models/_enums.py | 35 + .../querylogs/_generated/models/_models.py | 494 ++++ .../querylogs/_generated/models/_patch.py | 21 + .../monitor/querylogs/_generated/py.typed | 1 + .../azure/monitor/querylogs/_helpers.py | 146 ++ .../monitor/querylogs/_logs_query_client.py | 276 +++ .../azure/monitor/querylogs/_models.py | 264 +++ .../azure/monitor/querylogs/_version.py | 8 + .../azure/monitor/querylogs/aio/__init__.py | 9 + .../monitor/querylogs/aio/_helpers_async.py | 26 + .../querylogs/aio/_logs_query_client_async.py | 277 +++ .../azure/monitor/querylogs/py.typed | 0 .../dev_requirements.txt | 4 + .../azure-monitor-querylogs/samples/README.md | 69 + .../sample_authentication_async.py | 45 + .../sample_logs_single_query_async.py | 61 + .../notebooks/sample_large_query.ipynb | 710 ++++++ .../sample_machine_learning_sklearn.ipynb | 902 ++++++++ .../samples/sample_authentication.py | 39 + .../samples/sample_batch_query.py | 71 + .../sample_logs_query_key_value_form.py | 51 + .../sample_logs_query_multiple_workspaces.py | 47 + .../samples/sample_logs_single_query.py | 57 + ...sample_logs_single_query_partial_result.py | 61 + .../samples/sample_resource_logs_query.py | 53 + .../samples/sample_server_timeout.py | 53 + .../sample_single_log_query_without_pandas.py | 49 + .../sdk_packaging.toml | 2 + sdk/monitor/azure-monitor-querylogs/setup.py | 72 + .../tests/base_testcase.py | 29 + .../azure-monitor-querylogs/tests/conftest.py | 51 + .../tests/test_exceptions.py | 155 ++ .../tests/test_exceptions_async.py | 129 ++ .../tests/test_helpers.py | 33 + .../tests/test_logs_client.py | 311 +++ .../tests/test_logs_client_async.py | 269 +++ .../tests/test_logs_response.py | 84 + .../tests/test_logs_timespans.py | 102 + .../azure-monitor-querylogs/tsp-location.yaml | 4 + sdk/monitor/ci.yml | 2 + 73 files changed, 11004 insertions(+) create mode 100644 sdk/monitor/azure-monitor-querylogs/CHANGELOG.md create mode 100644 sdk/monitor/azure-monitor-querylogs/LICENSE create mode 100644 sdk/monitor/azure-monitor-querylogs/MANIFEST.in create mode 100644 sdk/monitor/azure-monitor-querylogs/README.md create mode 100644 sdk/monitor/azure-monitor-querylogs/TROUBLESHOOTING.md create mode 100644 sdk/monitor/azure-monitor-querylogs/_metadata.json create mode 100644 sdk/monitor/azure-monitor-querylogs/apiview-properties.json create mode 100644 sdk/monitor/azure-monitor-querylogs/assets.json create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/__init__.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/__init__.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/__init__.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_enums.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_exceptions.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/__init__.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_client.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_configuration.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/__init__.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/_operations.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/_patch.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_patch.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/__init__.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/model_base.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/serialization.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/utils.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_version.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/__init__.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_client.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_configuration.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/__init__.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/_operations.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/_patch.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_patch.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/__init__.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_enums.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_models.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_patch.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/py.typed create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_helpers.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_logs_query_client.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_models.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_version.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/__init__.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/_helpers_async.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/_logs_query_client_async.py create mode 100644 sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/py.typed create mode 100644 sdk/monitor/azure-monitor-querylogs/dev_requirements.txt create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/README.md create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_authentication_async.py create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_logs_single_query_async.py create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/notebooks/sample_large_query.ipynb create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/notebooks/sample_machine_learning_sklearn.ipynb create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/sample_authentication.py create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/sample_batch_query.py create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_key_value_form.py create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_multiple_workspaces.py create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query.py create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query_partial_result.py create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/sample_resource_logs_query.py create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/sample_server_timeout.py create mode 100644 sdk/monitor/azure-monitor-querylogs/samples/sample_single_log_query_without_pandas.py create mode 100644 sdk/monitor/azure-monitor-querylogs/sdk_packaging.toml create mode 100644 sdk/monitor/azure-monitor-querylogs/setup.py create mode 100644 sdk/monitor/azure-monitor-querylogs/tests/base_testcase.py create mode 100644 sdk/monitor/azure-monitor-querylogs/tests/conftest.py create mode 100644 sdk/monitor/azure-monitor-querylogs/tests/test_exceptions.py create mode 100644 sdk/monitor/azure-monitor-querylogs/tests/test_exceptions_async.py create mode 100644 sdk/monitor/azure-monitor-querylogs/tests/test_helpers.py create mode 100644 sdk/monitor/azure-monitor-querylogs/tests/test_logs_client.py create mode 100644 sdk/monitor/azure-monitor-querylogs/tests/test_logs_client_async.py create mode 100644 sdk/monitor/azure-monitor-querylogs/tests/test_logs_response.py create mode 100644 sdk/monitor/azure-monitor-querylogs/tests/test_logs_timespans.py create mode 100644 sdk/monitor/azure-monitor-querylogs/tsp-location.yaml diff --git a/sdk/monitor/azure-monitor-querylogs/CHANGELOG.md b/sdk/monitor/azure-monitor-querylogs/CHANGELOG.md new file mode 100644 index 000000000000..ae2dcaa50adf --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/CHANGELOG.md @@ -0,0 +1,14 @@ +# Release History + +## 1.0.0 (Unreleased) + +### Features Added + +- Initial release + +### Breaking Changes + +### Bugs Fixed + +### Other Changes + diff --git a/sdk/monitor/azure-monitor-querylogs/LICENSE b/sdk/monitor/azure-monitor-querylogs/LICENSE new file mode 100644 index 000000000000..63447fd8bbbf --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-querylogs/MANIFEST.in b/sdk/monitor/azure-monitor-querylogs/MANIFEST.in new file mode 100644 index 000000000000..f85b439d376c --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/MANIFEST.in @@ -0,0 +1,8 @@ +include *.md +include LICENSE +include azure/monitor/querylogs/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/monitor/__init__.py +include azure/monitor/querylogs/__init__.py diff --git a/sdk/monitor/azure-monitor-querylogs/README.md b/sdk/monitor/azure-monitor-querylogs/README.md new file mode 100644 index 000000000000..6134c54e3095 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/README.md @@ -0,0 +1,481 @@ +# Azure Monitor Query Logs client library for Python + +The Azure Monitor Query Logs client library is used to execute read-only queries against [Azure Monitor][azure_monitor_overview]'s Logs data platform: + +- [Logs](https://learn.microsoft.com/azure/azure-monitor/logs/data-platform-logs) - Collects and organizes log and performance data from monitored resources. Data from different sources such as platform logs from Azure services, log and performance data from virtual machines agents, and usage and performance data from apps can be consolidated into a single [Azure Log Analytics workspace](https://learn.microsoft.com/azure/azure-monitor/logs/data-platform-logs#log-analytics-and-workspaces). The various data types can be analyzed together using the [Kusto Query Language][kusto_query_language]. + +**Resources:** + +- [Source code][source] +- [Package (PyPI)][package] +- [API reference documentation][python-query-ref-docs] +- [Service documentation][azure_monitor_overview] +- [Samples][samples] +- [Change log][changelog] + +## Getting started + +### Prerequisites + +- Python 3.9 or later +- An [Azure subscription][azure_subscription] +- To query Logs, you need one of the following things: + - An [Azure Log Analytics workspace][azure_monitor_create_using_portal] + - An Azure resource of any kind (Storage Account, Key Vault, Cosmos DB, etc.) + +### Install the package + +Install the Azure Monitor Query Logs client library for Python with [pip][pip]: + +```bash +pip install azure-monitor-querylogs +``` + +### Create the client + +An authenticated client is required to query Logs. The library includes both synchronous and asynchronous forms of the client. To authenticate, create an instance of a token credential. Use that instance when creating a `LogsQueryClient`. The following examples use `DefaultAzureCredential` from the [azure-identity](https://pypi.org/project/azure-identity/) package. + +#### Synchronous client + +Consider the following example, which creates a synchronous client for Logs querying: + +```python +from azure.identity import DefaultAzureCredential +from azure.monitor.querylogs import LogsQueryClient + +credential = DefaultAzureCredential() +logs_query_client = LogsQueryClient(credential) +``` + +#### Asynchronous client + +The asynchronous form of the query client API is found in the `.aio`-suffixed namespace. For example: + +```python +from azure.identity.aio import DefaultAzureCredential +from azure.monitor.querylogs.aio import LogsQueryClient + +credential = DefaultAzureCredential() +async_logs_query_client = LogsQueryClient(credential) +``` + +To use the asynchronous clients, you must also install an async transport, such as [aiohttp](https://pypi.org/project/aiohttp/). + +```sh +pip install aiohttp +``` + +#### Configure client for Azure sovereign cloud + +By default, the client is configured to use the Azure public cloud. To use a sovereign cloud, provide the correct `endpoint` argument when using `LogsQueryClient`. For example: + +```python +from azure.identity import AzureAuthorityHosts, DefaultAzureCredential +from azure.monitor.querylogs import LogsQueryClient + +# Authority can also be set via the AZURE_AUTHORITY_HOST environment variable. +credential = DefaultAzureCredential(authority=AzureAuthorityHosts.AZURE_GOVERNMENT) + +logs_query_client = LogsQueryClient(credential, endpoint="https://api.loganalytics.us/v1") +``` + +### Execute the query + +For examples of Logs queries, see the [Examples](#examples) section. + +## Key concepts + +### Logs query rate limits and throttling + +The Log Analytics service applies throttling when the request rate is too high. Limits, such as the maximum number of rows returned, are also applied on the Kusto queries. For more information, see [Query API](https://learn.microsoft.com/azure/azure-monitor/service-limits#la-query-api). + +If you're executing a batch logs query, a throttled request returns a `LogsQueryError` object. That object's `code` value is `ThrottledError`. + +## Examples + +- [Logs query](#logs-query) + - [Resource-centric logs query](#resource-centric-logs-query) + - [Specify timespan](#specify-timespan) + - [Handle logs query response](#handle-logs-query-response) +- [Batch logs query](#batch-logs-query) +- [Advanced logs query scenarios](#advanced-logs-query-scenarios) + - [Set logs query timeout](#set-logs-query-timeout) + - [Query multiple workspaces](#query-multiple-workspaces) + - [Include statistics](#include-statistics) + - [Include visualization](#include-visualization) + +### Logs query + +This example shows how to query a Log Analytics workspace. To handle the response and view it in a tabular form, the [`pandas`](https://pypi.org/project/pandas/) library is used. See the [samples][samples] if you choose not to use `pandas`. + +#### Resource-centric logs query + +The following example demonstrates how to query logs directly from an Azure resource without the use of a Log Analytics workspace. Here, the `query_resource` method is used instead of `query_workspace`. Instead of a workspace ID, an Azure resource identifier is passed in. For example, `/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider}/{resource-type}/{resource-name}`. + +```python +import os +import pandas as pd +from datetime import timedelta +from azure.monitor.querylogs import LogsQueryClient, LogsQueryStatus +from azure.core.exceptions import HttpResponseError +from azure.identity import DefaultAzureCredential + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +query = """AzureActivity | take 5""" + +try: + response = client.query_resource(os.environ['LOGS_RESOURCE_ID'], query, timespan=timedelta(days=1)) + if response.status == LogsQueryStatus.SUCCESS: + data = response.tables + else: + # LogsQueryPartialResult + error = response.partial_error + data = response.partial_data + print(error) + + for table in data: + df = pd.DataFrame(data=table.rows, columns=table.columns) + print(df) +except HttpResponseError as err: + print("something fatal happened") + print(err) +``` + +#### Specify timespan + +The `timespan` parameter specifies the time duration for which to query the data. This value can take one of the following forms: + +- a `timedelta` +- a `timedelta` and a start `datetime` +- a start `datetime`/end `datetime` + +For example: + +```python +import os +import pandas as pd +from datetime import datetime, timezone +from azure.monitor.querylogs import LogsQueryClient, LogsQueryResult +from azure.identity import DefaultAzureCredential +from azure.core.exceptions import HttpResponseError + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +query = """AppRequests | take 5""" + +start_time=datetime(2021, 7, 2, tzinfo=timezone.utc) +end_time=datetime(2021, 7, 4, tzinfo=timezone.utc) + +try: + response = client.query_workspace( + workspace_id=os.environ['LOG_WORKSPACE_ID'], + query=query, + timespan=(start_time, end_time) + ) + if response.status == LogsQueryStatus.SUCCESS: + data = response.tables + else: + # LogsQueryPartialResult + error = response.partial_error + data = response.partial_data + print(error) + + for table in data: + df = pd.DataFrame(data=table.rows, columns=table.columns) + print(df) +except HttpResponseError as err: + print("something fatal happened") + print(err) +``` + +#### Handle logs query response + +The `query_workspace` API returns either a `LogsQueryResult` or a `LogsQueryPartialResult` object. The `batch_query` API returns a list that can contain `LogsQueryResult`, `LogsQueryPartialResult`, and `LogsQueryError` objects. Here's a hierarchy of the response: + +``` +LogsQueryResult +|---statistics +|---visualization +|---tables (list of `LogsTable` objects) + |---name + |---rows + |---columns + |---columns_types + +LogsQueryPartialResult +|---statistics +|---visualization +|---partial_error (a `LogsQueryError` object) + |---code + |---message + |---details + |---status +|---partial_data (list of `LogsTable` objects) + |---name + |---rows + |---columns + |---columns_types +``` + +The `LogsQueryResult` directly iterates over the table as a convenience. For example, to handle a logs query response with tables and display it using `pandas`: + +```python +response = client.query(...) +for table in response: + df = pd.DataFrame(table.rows, columns=[col.name for col in table.columns]) +``` + +A full sample can be found [here](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query.py). + +In a similar fashion, to handle a batch logs query response: + +```python +for result in response: + if result.status == LogsQueryStatus.SUCCESS: + for table in result: + df = pd.DataFrame(table.rows, columns=table.columns) + print(df) +``` + +A full sample can be found [here](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_batch_query.py). + +### Batch logs query + +The following example demonstrates sending multiple queries at the same time using the batch query API. The queries can either be represented as a list of `LogsBatchQuery` objects or a dictionary. This example uses the former approach. + +```python +import os +from datetime import timedelta, datetime, timezone +import pandas as pd +from azure.monitor.querylogs import LogsQueryClient, LogsBatchQuery, LogsQueryStatus +from azure.identity import DefaultAzureCredential + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) +requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id=os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """bad query""", + timespan=timedelta(days=1), + workspace_id=os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id=os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2, tzinfo=timezone.utc), datetime(2021, 6, 5, tzinfo=timezone.utc)), # (start, end) + include_statistics=True + ), +] +results = client.query_batch(requests) + +for res in results: + if res.status == LogsQueryStatus.PARTIAL: + ## this will be a LogsQueryPartialResult + print(res.partial_error) + for table in res.partial_data: + df = pd.DataFrame(table.rows, columns=table.columns) + print(df) + elif res.status == LogsQueryStatus.SUCCESS: + ## this will be a LogsQueryResult + table = res.tables[0] + df = pd.DataFrame(table.rows, columns=table.columns) + print(df) + else: + # this will be a LogsQueryError + print(res.message) + +``` + +### Advanced logs query scenarios + +#### Set logs query timeout + +The following example shows setting a server timeout in seconds. A gateway timeout is raised if the query takes more time than the mentioned timeout. The default is 180 seconds and can be set up to 10 minutes (600 seconds). + +```python +import os +from datetime import timedelta +from azure.monitor.querylogs import LogsQueryClient +from azure.identity import DefaultAzureCredential + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +response = client.query_workspace( + os.environ['LOG_WORKSPACE_ID'], + "range x from 1 to 10000000000 step 1 | count", + timespan=timedelta(days=1), + server_timeout=600 # sets the timeout to 10 minutes + ) +``` + +#### Query multiple workspaces + +The same logs query can be executed across multiple Log Analytics workspaces. In addition to the Kusto query, the following parameters are required: + +- `workspace_id` - The first (primary) workspace ID +- `additional_workspaces` - A list of workspaces, excluding the workspace provided in the `workspace_id` parameter. The parameter's list items can consist of the following identifier formats: + - Qualified workspace names + - Workspace IDs + - Azure resource IDs + +For example, the following query executes in three workspaces: + +```python +client.query_workspace( + , + query, + timespan=timedelta(days=1), + additional_workspaces=['', ''] + ) +``` + +A full sample can be found [here](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_log_query_multiple_workspaces.py). + +#### Include statistics + +To get logs query execution statistics, such as CPU and memory consumption: + +1. Set the `include_statistics` parameter to `True`. +1. Access the `statistics` field inside the `LogsQueryResult` object. + +The following example prints the query execution time: + +```python +query = "AzureActivity | top 10 by TimeGenerated" +result = client.query_workspace( + , + query, + timespan=timedelta(days=1), + include_statistics=True + ) + +execution_time = result.statistics.get("query", {}).get("executionTime") +print(f"Query execution time: {execution_time}") +``` + +The `statistics` field is a `dict` that corresponds to the raw JSON response, and its structure can vary by query. The statistics are found within the `query` property. For example: + +```python +{ + "query": { + "executionTime": 0.0156478, + "resourceUsage": {...}, + "inputDatasetStatistics": {...}, + "datasetStatistics": [{...}] + } +} +``` + +#### Include visualization + +To get visualization data for logs queries using the [render operator](https://learn.microsoft.com/azure/data-explorer/kusto/query/renderoperator?pivots=azuremonitor): + +1. Set the `include_visualization` property to `True`. +1. Access the `visualization` field inside the `LogsQueryResult` object. + +For example: + +```python +query = ( + "StormEvents" + "| summarize event_count = count() by State" + "| where event_count > 10" + "| project State, event_count" + "| render columnchart" +) +result = client.query_workspace( + , + query, + timespan=timedelta(days=1), + include_visualization=True + ) + +print(f"Visualization result: {result.visualization}") +``` + +The `visualization` field is a `dict` that corresponds to the raw JSON response, and its structure can vary by query. For example: + +```python +{ + "visualization": "columnchart", + "title": "the chart title", + "accumulate": False, + "isQuerySorted": False, + "kind": None, + "legend": None, + "series": None, + "yMin": "NaN", + "yMax": "NaN", + "xAxis": None, + "xColumn": None, + "xTitle": "x axis title", + "yAxis": None, + "yColumns": None, + "ySplit": None, + "yTitle": None, + "anomalyColumns": None +} +``` + +Interpretation of the visualization data is left to the library consumer. To use this data with the [Plotly graphing library](https://plotly.com/python/), see the [synchronous](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_visualization.py) or [asynchronous](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_logs_query_visualization_async.py) code samples. + +## Troubleshooting + +See our [troubleshooting guide][troubleshooting_guide] for details on how to diagnose various failure scenarios. + +## Next steps + +To learn more about Azure Monitor, see the [Azure Monitor service documentation][azure_monitor_overview]. + +### Samples + +The following code samples show common scenarios with the Azure Monitor Query Logs client library. + +#### Logs query samples + +- [Send a single query with LogsQueryClient and handle the response as a table](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query.py) ([async sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_log_query_async.py)) +- [Send a single query with LogsQueryClient and handle the response in key-value form](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_key_value_form.py) +- [Send a single query with LogsQueryClient without pandas](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_single_log_query_without_pandas.py) +- [Send a single query with LogsQueryClient across multiple workspaces](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_log_query_multiple_workspaces.py) +- [Send multiple queries with LogsQueryClient](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_batch_query.py) +- [Send a single query with LogsQueryClient using server timeout](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_server_timeout.py) + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit [cla.microsoft.com][cla]. + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repositories using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct][code_of_conduct]. For more information see the [Code of Conduct FAQ][coc_faq] or contact [opencode@microsoft.com][coc_contact] with any additional questions or comments. + + + +[azure_core_exceptions]: https://aka.ms/azsdk/python/core/docs#module-azure.core.exceptions +[azure_core_ref_docs]: https://aka.ms/azsdk/python/core/docs +[azure_monitor_create_using_portal]: https://learn.microsoft.com/azure/azure-monitor/logs/quick-create-workspace +[azure_monitor_overview]: https://learn.microsoft.com/azure/azure-monitor/ +[azure_subscription]: https://azure.microsoft.com/free/python/ +[changelog]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-querylogs/CHANGELOG.md +[kusto_query_language]: https://learn.microsoft.com/azure/data-explorer/kusto/query/ +[package]: https://aka.ms/azsdk-python-monitor-query-pypi +[pip]: https://pypi.org/project/pip/ +[python_logging]: https://docs.python.org/3/library/logging.html +[python-query-ref-docs]: https://aka.ms/azsdk/python/monitor-querylogs/docs +[samples]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-querylogs/samples +[source]: https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/ +[troubleshooting_guide]: https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/TROUBLESHOOTING.md + +[cla]: https://cla.microsoft.com +[code_of_conduct]: https://opensource.microsoft.com/codeofconduct/ +[coc_faq]: https://opensource.microsoft.com/codeofconduct/faq/ +[coc_contact]: mailto:opencode@microsoft.com diff --git a/sdk/monitor/azure-monitor-querylogs/TROUBLESHOOTING.md b/sdk/monitor/azure-monitor-querylogs/TROUBLESHOOTING.md new file mode 100644 index 000000000000..00f60bba8306 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/TROUBLESHOOTING.md @@ -0,0 +1,176 @@ +# Troubleshooting Azure Monitor Query Logs client library issues + +This troubleshooting guide contains instructions to diagnose frequently encountered issues while using the Azure Monitor Query Logs client library for Python. + +## Table of contents + +* [General Troubleshooting](#general-troubleshooting) + * [Enable client logging](#enable-client-logging) + * [Troubleshooting authentication issues with logs query requests](#authentication-errors) + * [Troubleshooting running async APIs](#errors-with-running-async-apis) +* [Troubleshooting Logs Query](#troubleshooting-logs-query) + * [Troubleshooting insufficient access error](#troubleshooting-insufficient-access-error-for-logs-query) + * [Troubleshooting invalid Kusto query](#troubleshooting-invalid-kusto-query) + * [Troubleshooting empty log query results](#troubleshooting-empty-log-query-results) + * [Troubleshooting server timeouts when executing logs query request](#troubleshooting-server-timeouts-when-executing-logs-query-request) + * [Troubleshooting partially successful logs query requests](#troubleshooting-partially-successful-logs-query-requests) +* [Additional azure-core configurations](#additional-azure-core-configurations) + +## General Troubleshooting + +Monitor query raises exceptions described in [`azure-core`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/README.md) + +### Enable client logging + +To troubleshoot issues with Azure Monitor Query Logs library, it is important to first enable logging to monitor the behavior of the application. The errors and warnings in the logs generally provide useful insights into what went wrong and sometimes include corrective actions to fix issues. + +This library uses the standard [logging](https://docs.python.org/3/library/logging.html) library for logging. Basic information about HTTP sessions, such as URLs and headers, is logged at the INFO level. +Detailed DEBUG level logging, including request/response bodies and unredacted headers, can be enabled on a client with the logging_enable argument: + +```python +import logging +from azure.monitor.querylogs import LogsQueryClient + +# Create a logger for the 'azure.monitor.querylogs' SDK +logger = logging.getLogger('azure.monitor.querylogs') +logger.setLevel(logging.DEBUG) + +# Configure a console output +handler = logging.StreamHandler(stream=sys.stdout) +logger.addHandler(handler) + +client = LogsQueryClient(credential, logging_enable=True) +``` + +Similarly, logging_enable can enable detailed logging for a single operation, even when it isn't enabled for the client: + +```python +client.query_workspace(logging_enable=True) +``` + +### Authentication errors + +Azure Monitor Query Logs supports Azure Active Directory authentication. The LogsQueryClient has methods to set the `credential`. To provide a valid credential, you can use +`azure-identity` dependency. For more details on getting started, refer to +the [README](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-querylogs#create-the-client) +of Azure Monitor Query Logs library. You can also refer to +the [Azure Identity documentation](https://learn.microsoft.com/python/api/overview/azure/identity-readme) +for more details on the various types of credential supported in `azure-identity`. + +For more help on troubleshooting authentication errors please see the Azure Identity client library [troubleshooting guide](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/identity/azure-identity/TROUBLESHOOTING.md). + +### Errors with running async APIs + +The async transport is designed to be opt-in. [AioHttp](https://pypi.org/project/aiohttp/) is one of the supported implementations of async transport. It is not installed by default. You need to install it separately as follows: + +``` +pip install aiohttp +``` + +## Troubleshooting Logs Query + +### Troubleshooting insufficient access error for logs query + +If you get an HTTP error with status code 403 (Forbidden), it means that the provided credentials does not have +sufficient permissions to query the workspace. +```text +"{"error":{"message":"The provided credentials have insufficient access to perform the requested operation","code":"InsufficientAccessError","correlationId":""}}" +``` + +1. Check that the application or user that is making the request has sufficient permissions: + * You can refer to this document to [manage access to workspaces](https://learn.microsoft.com/azure/azure-monitor/logs/manage-access#manage-access-using-workspace-permissions) +2. If the user or application is granted sufficient privileges to query the workspace, make sure you are + authenticating as that user/application. If you are authenticating using the + [DefaultAzureCredential](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#defaultazurecredential) + then check the logs to verify that the credential used is the one you expected. To enable logging, see [enable + client logging](#enable-client-logging) section above. + +For more help on troubleshooting authentication errors please see the Azure Identity client library [troubleshooting guide](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/identity/azure-identity/TROUBLESHOOTING.md). + +### Troubleshooting invalid Kusto query + +If you get an HTTP error with status code 400 (Bad Request), you may have an error in your Kusto query and you'll +see an error message similar to the one below. + +```text +(BadArgumentError) The request had some invalid properties +Code: BadArgumentError +Message: The request had some invalid properties +Inner error: { + "code": "SemanticError", + "message": "A semantic error occurred.", + "innererror": { + "code": "SEM0100", + "message": "'take' operator: Failed to resolve table or column expression named 'Appquests'" + } +} +``` + +The error message in the innererror may include the where the Kusto query has an error. You may also refer to the [Kusto Query Language](https://learn.microsoft.com/azure/data-explorer/kusto/query) reference docs to learn more about querying logs using KQL. + +### Troubleshooting empty log query results + +If your Kusto query returns empty no logs, please validate the following: + +- You have the right workspace ID +- You are setting the correct time interval for the query. Try expanding the time interval for your query to see if that + returns any results. +- If your Kusto query also has a time interval, the query is evaluated for the intersection of the time interval in the + query string and the time interval set in the `timespan` param provided the query API. The intersection of + these time intervals may not have any logs. To avoid any confusion, it's recommended to remove any time interval in + the Kusto query string and use `timespan` explicitly. Please note that the `timespan` param can be a timedelta, + a timedelta and a start datetime, or a start datetime/end datetime. + +### Troubleshooting server timeouts when executing logs query request + +Some complex Kusto queries can take a long time to complete and such queries are aborted by the +service if they run for more than 3 minutes. For such scenarios, the query APIs on `LogsQueryClient`, provide options to +configure the timeout on the server. The server timeout can be extended up to 10 minutes. + +You may see an error as follows: + +```text +Code: GatewayTimeout +Message: Gateway timeout +Inner error: { + "code": "GatewayTimeout", + "message": "Unable to unzip response" +} +``` + +The following code shows a sample on how to set the server timeout to 10 minutes. Note that by setting this server +timeout, the Azure Monitor Query library will automatically also extend the client timeout to wait for 10 minutes for +the server to respond. You don't need to configure your HTTP client to extend the response timeout as shown in the +previous section. + +```python +from azure.monitor.querylogs import LogsQueryClient +from azure.identity import DefaultAzureCredential + +credential = DefaultAzureCredential() + +client = LogsQueryClient(credential) + +client.query_workspace( + "{workspaceId}", + "{kusto-query-string}", + timespan="{timespan}", + server_timeout=600) +``` + +### Troubleshooting partially successful logs query requests + +If the execution of a Kusto query resulted in a partially successful response, the Azure Monitor Query +client library will return `partial_data` and `partial_error` indicating that the query was not fully successful. + +```python +response = client.query_workspace("{workspaceId}", "{kusto-query-string}", timespan="{timespan}") + +data = response.partial_data +error = response.partial_error +``` + +## Additional azure-core configurations + +When calling the methods, some properties including `retry_mode`, `timeout`, `connection_verify` can be configured by passing in as keyword arguments. See +[configurations](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/README.md#configurations) for list of all such properties. diff --git a/sdk/monitor/azure-monitor-querylogs/_metadata.json b/sdk/monitor/azure-monitor-querylogs/_metadata.json new file mode 100644 index 000000000000..4cca976b742b --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "2022-10-27" +} \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-querylogs/apiview-properties.json b/sdk/monitor/azure-monitor-querylogs/apiview-properties.json new file mode 100644 index 000000000000..229bcbb3818d --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/apiview-properties.json @@ -0,0 +1,23 @@ +{ + "CrossLanguagePackageId": "MonitorQueryLogs", + "CrossLanguageDefinitionId": { + "azure.monitor.querylogs._generated.models.BatchQueryRequest": "MonitorQueryLogs.BatchQueryRequest", + "azure.monitor.querylogs._generated.models.BatchQueryResponse": "MonitorQueryLogs.BatchQueryResponse", + "azure.monitor.querylogs._generated.models.BatchQueryResults": "MonitorQueryLogs.BatchQueryResults", + "azure.monitor.querylogs._generated.models.BatchRequest": "MonitorQueryLogs.BatchRequest", + "azure.monitor.querylogs._generated.models.BatchResponse": "MonitorQueryLogs.BatchResponse", + "azure.monitor.querylogs._generated.models.Column": "MonitorQueryLogs.Column", + "azure.monitor.querylogs._generated.models.ErrorDetail": "MonitorQueryLogs.ErrorDetail", + "azure.monitor.querylogs._generated.models.ErrorInfo": "MonitorQueryLogs.ErrorInfo", + "azure.monitor.querylogs._generated.models.QueryBody": "MonitorQueryLogs.QueryBody", + "azure.monitor.querylogs._generated.models.QueryResults": "MonitorQueryLogs.QueryResults", + "azure.monitor.querylogs._generated.models.Table": "MonitorQueryLogs.Table", + "azure.monitor.querylogs._generated.models.ColumnDataType": "MonitorQueryLogs.ColumnDataType", + "azure.monitor.querylogs._generated.MonitorQueryLogsClient.execute": "ClientCustomizations.MonitorQueryLogsClient.execute", + "azure.monitor.querylogs._generated.aio.MonitorQueryLogsClient.execute": "ClientCustomizations.MonitorQueryLogsClient.execute", + "azure.monitor.querylogs._generated.MonitorQueryLogsClient.execute_with_resource_id": "ClientCustomizations.MonitorQueryLogsClient.executeWithResourceId", + "azure.monitor.querylogs._generated.aio.MonitorQueryLogsClient.execute_with_resource_id": "ClientCustomizations.MonitorQueryLogsClient.executeWithResourceId", + "azure.monitor.querylogs._generated.MonitorQueryLogsClient.batch": "ClientCustomizations.MonitorQueryLogsClient.batch", + "azure.monitor.querylogs._generated.aio.MonitorQueryLogsClient.batch": "ClientCustomizations.MonitorQueryLogsClient.batch" + } +} \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-querylogs/assets.json b/sdk/monitor/azure-monitor-querylogs/assets.json new file mode 100644 index 000000000000..21a1cd025d65 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/assets.json @@ -0,0 +1,6 @@ +{ + "AssetsRepo": "Azure/azure-sdk-assets", + "AssetsRepoPrefixPath": "python", + "TagPrefix": "python/monitor/azure-monitor-querylogs", + "Tag": "python/monitor/azure-monitor-querylogs_a1d1de506b" +} diff --git a/sdk/monitor/azure-monitor-querylogs/azure/__init__.py b/sdk/monitor/azure-monitor-querylogs/azure/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/__init__.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/__init__.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/__init__.py new file mode 100644 index 000000000000..a1916a57336c --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/__init__.py @@ -0,0 +1,32 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from ._logs_query_client import LogsQueryClient + +from ._enums import LogsQueryStatus +from ._exceptions import LogsQueryError +from ._models import ( + LogsQueryResult, + LogsTable, + LogsQueryPartialResult, + LogsTableRow, + LogsBatchQuery, +) + +from ._version import VERSION + +__all__ = [ + "LogsQueryClient", + "LogsQueryResult", + "LogsQueryPartialResult", + "LogsQueryStatus", + "LogsQueryError", + "LogsTable", + "LogsTableRow", + "LogsBatchQuery", +] + +__version__ = VERSION diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_enums.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_enums.py new file mode 100644 index 000000000000..de70f1bc5240 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_enums.py @@ -0,0 +1,18 @@ +# +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# cspell:ignore milli +from enum import Enum + +from azure.core import CaseInsensitiveEnumMeta + + +class LogsQueryStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of the result object.""" + + PARTIAL = "PartialError" + SUCCESS = "Success" + FAILURE = "Failure" diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_exceptions.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_exceptions.py new file mode 100644 index 000000000000..d511208a8ac2 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_exceptions.py @@ -0,0 +1,55 @@ +# +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import sys +from typing import Any, List, Optional, Literal + +from ._enums import LogsQueryStatus + +if sys.version_info >= (3, 9): + from collections.abc import Mapping +else: + from typing import Mapping + + +JSON = Mapping[str, Any] # pylint: disable=unsubscriptable-object + + +class LogsQueryError: + """The code and message for an error.""" + + code: str + """A machine readable error code.""" + message: str + """A human readable error message.""" + details: Optional[List[JSON]] = None + """A list of additional details about the error.""" + status: Literal[LogsQueryStatus.FAILURE] + """Status for error item when iterating over list of results. Always "Failure" for an instance of a + LogsQueryError.""" + + def __init__(self, **kwargs: Any) -> None: + self.code = kwargs.get("code", "") + self.message = kwargs.get("message", "") + self.details = kwargs.get("details", None) + self.status = LogsQueryStatus.FAILURE + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def _from_generated(cls, generated): + if not generated: + return None + innererror = generated + while innererror.get("innererror"): + innererror = innererror["innererror"] + message = innererror.get("message") + return cls( + code=generated.get("code"), + message=message, + details=generated.get("details"), + ) diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/__init__.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/__init__.py new file mode 100644 index 000000000000..6b026266ea45 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import MonitorQueryLogsClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "MonitorQueryLogsClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_client.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_client.py new file mode 100644 index 000000000000..88f6f435970d --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_client.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import PipelineClient +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse + +from ._configuration import MonitorQueryLogsClientConfiguration +from ._operations._operations import _MonitorQueryLogsClientOperationsMixin +from ._utils.serialization import Deserializer, Serializer + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class MonitorQueryLogsClient(_MonitorQueryLogsClientOperationsMixin): + """MonitorQueryLogsClient. + + :param endpoint: The Log Analytics service endpoint. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is "2022-10-27". + Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = MonitorQueryLogsClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_configuration.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_configuration.py new file mode 100644 index 000000000000..a565460f9c8c --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_configuration.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class MonitorQueryLogsClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for MonitorQueryLogsClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: The Log Analytics service endpoint. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is "2022-10-27". + Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2022-10-27") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://api.loganalytics.io/.default"]) + kwargs.setdefault("sdk_moniker", "monitor-querylogs/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/__init__.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/__init__.py new file mode 100644 index 000000000000..933fcd7d1b55 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/__init__.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/_operations.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/_operations.py new file mode 100644 index 000000000000..3877a9356b65 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/_operations.py @@ -0,0 +1,651 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from .. import models as _models +from .._configuration import MonitorQueryLogsClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize +from .._utils.serialization import Serializer +from .._utils.utils import ClientMixinABC + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_monitor_query_logs_execute_request( + workspace_id: str, *, prefer: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-27")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/workspaces/{workspaceId}/query" + path_format_arguments = { + "workspaceId": _SERIALIZER.url("workspace_id", workspace_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if prefer is not None: + _headers["Prefer"] = _SERIALIZER.header("prefer", prefer, "str") + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_monitor_query_logs_execute_with_resource_id_request( # pylint: disable=name-too-long + resource_id: str, *, prefer: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-27")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{resourceId}/query" + path_format_arguments = { + "resourceId": _SERIALIZER.url("resource_id", resource_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if prefer is not None: + _headers["Prefer"] = _SERIALIZER.header("prefer", prefer, "str") + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_monitor_query_logs_batch_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-27")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/$batch" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class _MonitorQueryLogsClientOperationsMixin( + ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], MonitorQueryLogsClientConfiguration] +): + + @overload + def execute( + self, + workspace_id: str, + body: _models.QueryBody, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query. + + Executes an Analytics query for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :type workspace_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: ~azure.monitor.querylogs._generated.models.QueryBody + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def execute( + self, + workspace_id: str, + body: JSON, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query. + + Executes an Analytics query for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :type workspace_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: JSON + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def execute( + self, + workspace_id: str, + body: IO[bytes], + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query. + + Executes an Analytics query for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :type workspace_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def execute( + self, + workspace_id: str, + body: Union[_models.QueryBody, JSON, IO[bytes]], + *, + prefer: Optional[str] = None, + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query. + + Executes an Analytics query for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :type workspace_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. Is + one of the following types: QueryBody, JSON, IO[bytes] Required. + :type body: ~azure.monitor.querylogs._generated.models.QueryBody or JSON or IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models.QueryResults] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_execute_request( + workspace_id=workspace_id, + prefer=prefer, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QueryResults, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def execute_with_resource_id( + self, + resource_id: str, + body: _models.QueryBody, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query using resource ID. + + Executes an Analytics query for data in the context of a resource. + `Here `_ + is an example for using POST with an Analytics query. + + :param resource_id: The identifier of the resource. Required. + :type resource_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: ~azure.monitor.querylogs._generated.models.QueryBody + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def execute_with_resource_id( + self, + resource_id: str, + body: JSON, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query using resource ID. + + Executes an Analytics query for data in the context of a resource. + `Here `_ + is an example for using POST with an Analytics query. + + :param resource_id: The identifier of the resource. Required. + :type resource_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: JSON + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def execute_with_resource_id( + self, + resource_id: str, + body: IO[bytes], + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query using resource ID. + + Executes an Analytics query for data in the context of a resource. + `Here `_ + is an example for using POST with an Analytics query. + + :param resource_id: The identifier of the resource. Required. + :type resource_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def execute_with_resource_id( + self, + resource_id: str, + body: Union[_models.QueryBody, JSON, IO[bytes]], + *, + prefer: Optional[str] = None, + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query using resource ID. + + Executes an Analytics query for data in the context of a resource. + `Here `_ + is an example for using POST with an Analytics query. + + :param resource_id: The identifier of the resource. Required. + :type resource_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. Is + one of the following types: QueryBody, JSON, IO[bytes] Required. + :type body: ~azure.monitor.querylogs._generated.models.QueryBody or JSON or IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models.QueryResults] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_execute_with_resource_id_request( + resource_id=resource_id, + prefer=prefer, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QueryResults, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def batch( + self, body: _models.BatchRequest, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.BatchResponse: + """Execute a batch of Analytics queries. + + Executes a batch of Analytics queries for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param body: The batch request body. Required. + :type body: ~azure.monitor.querylogs._generated.models.BatchRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BatchResponse. The BatchResponse is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.BatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def batch(self, body: JSON, *, content_type: str = "application/json", **kwargs: Any) -> _models.BatchResponse: + """Execute a batch of Analytics queries. + + Executes a batch of Analytics queries for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param body: The batch request body. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BatchResponse. The BatchResponse is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.BatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def batch(self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any) -> _models.BatchResponse: + """Execute a batch of Analytics queries. + + Executes a batch of Analytics queries for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param body: The batch request body. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: BatchResponse. The BatchResponse is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.BatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def batch(self, body: Union[_models.BatchRequest, JSON, IO[bytes]], **kwargs: Any) -> _models.BatchResponse: + """Execute a batch of Analytics queries. + + Executes a batch of Analytics queries for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param body: The batch request body. Is one of the following types: BatchRequest, JSON, + IO[bytes] Required. + :type body: ~azure.monitor.querylogs._generated.models.BatchRequest or JSON or IO[bytes] + :return: BatchResponse. The BatchResponse is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.BatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models.BatchResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_batch_request( + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.BatchResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/_patch.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_patch.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/__init__.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/model_base.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/model_base.py new file mode 100644 index 000000000000..49d5c7259389 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/model_base.py @@ -0,0 +1,1232 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, value, module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + value: typing.Any, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, value) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/serialization.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/serialization.py new file mode 100644 index 000000000000..eb86ea23c965 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/serialization.py @@ -0,0 +1,2032 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/utils.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/utils.py new file mode 100644 index 000000000000..35c9c836f85f --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_utils/utils.py @@ -0,0 +1,25 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import Generic, TYPE_CHECKING, TypeVar + +if TYPE_CHECKING: + from .serialization import Deserializer, Serializer + + +TClient = TypeVar("TClient") +TConfig = TypeVar("TConfig") + + +class ClientMixinABC(ABC, Generic[TClient, TConfig]): + """DO NOT use this class. It is for internal typing use only.""" + + _client: TClient + _config: TConfig + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_version.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/__init__.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/__init__.py new file mode 100644 index 000000000000..d984b517c76e --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import MonitorQueryLogsClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "MonitorQueryLogsClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_client.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_client.py new file mode 100644 index 000000000000..d4bdcc327074 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_client.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .._utils.serialization import Deserializer, Serializer +from ._configuration import MonitorQueryLogsClientConfiguration +from ._operations._operations import _MonitorQueryLogsClientOperationsMixin + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class MonitorQueryLogsClient(_MonitorQueryLogsClientOperationsMixin): + """MonitorQueryLogsClient. + + :param endpoint: The Log Analytics service endpoint. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is "2022-10-27". + Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = MonitorQueryLogsClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_configuration.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_configuration.py new file mode 100644 index 000000000000..e16e3efa6298 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_configuration.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class MonitorQueryLogsClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for MonitorQueryLogsClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: The Log Analytics service endpoint. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is "2022-10-27". + Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2022-10-27") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://api.loganalytics.io/.default"]) + kwargs.setdefault("sdk_moniker", "monitor-querylogs/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/__init__.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/__init__.py new file mode 100644 index 000000000000..933fcd7d1b55 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/__init__.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/_operations.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/_operations.py new file mode 100644 index 000000000000..3cfae4b784b8 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/_operations.py @@ -0,0 +1,573 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core import AsyncPipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ... import models as _models +from ..._operations._operations import ( + build_monitor_query_logs_batch_request, + build_monitor_query_logs_execute_request, + build_monitor_query_logs_execute_with_resource_id_request, +) +from ..._utils.model_base import SdkJSONEncoder, _deserialize +from ..._utils.utils import ClientMixinABC +from .._configuration import MonitorQueryLogsClientConfiguration + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class _MonitorQueryLogsClientOperationsMixin( + ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], MonitorQueryLogsClientConfiguration] +): + + @overload + async def execute( + self, + workspace_id: str, + body: _models.QueryBody, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query. + + Executes an Analytics query for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :type workspace_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: ~azure.monitor.querylogs._generated.models.QueryBody + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def execute( + self, + workspace_id: str, + body: JSON, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query. + + Executes an Analytics query for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :type workspace_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: JSON + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def execute( + self, + workspace_id: str, + body: IO[bytes], + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query. + + Executes an Analytics query for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :type workspace_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def execute( + self, + workspace_id: str, + body: Union[_models.QueryBody, JSON, IO[bytes]], + *, + prefer: Optional[str] = None, + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query. + + Executes an Analytics query for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :type workspace_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. Is + one of the following types: QueryBody, JSON, IO[bytes] Required. + :type body: ~azure.monitor.querylogs._generated.models.QueryBody or JSON or IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models.QueryResults] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_execute_request( + workspace_id=workspace_id, + prefer=prefer, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QueryResults, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def execute_with_resource_id( + self, + resource_id: str, + body: _models.QueryBody, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query using resource ID. + + Executes an Analytics query for data in the context of a resource. + `Here `_ + is an example for using POST with an Analytics query. + + :param resource_id: The identifier of the resource. Required. + :type resource_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: ~azure.monitor.querylogs._generated.models.QueryBody + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def execute_with_resource_id( + self, + resource_id: str, + body: JSON, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query using resource ID. + + Executes an Analytics query for data in the context of a resource. + `Here `_ + is an example for using POST with an Analytics query. + + :param resource_id: The identifier of the resource. Required. + :type resource_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: JSON + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def execute_with_resource_id( + self, + resource_id: str, + body: IO[bytes], + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query using resource ID. + + Executes an Analytics query for data in the context of a resource. + `Here `_ + is an example for using POST with an Analytics query. + + :param resource_id: The identifier of the resource. Required. + :type resource_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :type body: IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def execute_with_resource_id( + self, + resource_id: str, + body: Union[_models.QueryBody, JSON, IO[bytes]], + *, + prefer: Optional[str] = None, + **kwargs: Any + ) -> _models.QueryResults: + """Execute an Analytics query using resource ID. + + Executes an Analytics query for data in the context of a resource. + `Here `_ + is an example for using POST with an Analytics query. + + :param resource_id: The identifier of the resource. Required. + :type resource_id: str + :param body: The Analytics query. Learn more about the `Analytics query + syntax + `_. Is + one of the following types: QueryBody, JSON, IO[bytes] Required. + :type body: ~azure.monitor.querylogs._generated.models.QueryBody or JSON or IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models.QueryResults] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_execute_with_resource_id_request( + resource_id=resource_id, + prefer=prefer, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QueryResults, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def batch( + self, body: _models.BatchRequest, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.BatchResponse: + """Execute a batch of Analytics queries. + + Executes a batch of Analytics queries for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param body: The batch request body. Required. + :type body: ~azure.monitor.querylogs._generated.models.BatchRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BatchResponse. The BatchResponse is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.BatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def batch( + self, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.BatchResponse: + """Execute a batch of Analytics queries. + + Executes a batch of Analytics queries for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param body: The batch request body. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: BatchResponse. The BatchResponse is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.BatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def batch( + self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.BatchResponse: + """Execute a batch of Analytics queries. + + Executes a batch of Analytics queries for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param body: The batch request body. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: BatchResponse. The BatchResponse is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.BatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def batch(self, body: Union[_models.BatchRequest, JSON, IO[bytes]], **kwargs: Any) -> _models.BatchResponse: + """Execute a batch of Analytics queries. + + Executes a batch of Analytics queries for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param body: The batch request body. Is one of the following types: BatchRequest, JSON, + IO[bytes] Required. + :type body: ~azure.monitor.querylogs._generated.models.BatchRequest or JSON or IO[bytes] + :return: BatchResponse. The BatchResponse is compatible with MutableMapping + :rtype: ~azure.monitor.querylogs._generated.models.BatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models.BatchResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_batch_request( + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.BatchResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/_patch.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_patch.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/aio/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/__init__.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/__init__.py new file mode 100644 index 000000000000..9f0b4bd534f0 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/__init__.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + BatchQueryRequest, + BatchQueryResponse, + BatchQueryResults, + BatchRequest, + BatchResponse, + Column, + ErrorDetail, + ErrorInfo, + QueryBody, + QueryResults, + Table, +) + +from ._enums import ( # type: ignore + ColumnDataType, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "BatchQueryRequest", + "BatchQueryResponse", + "BatchQueryResults", + "BatchRequest", + "BatchResponse", + "Column", + "ErrorDetail", + "ErrorInfo", + "QueryBody", + "QueryResults", + "Table", + "ColumnDataType", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_enums.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_enums.py new file mode 100644 index 000000000000..9ff306417f47 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_enums.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class ColumnDataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The data type of a column.""" + + BOOL = "bool" + """Boolean data type""" + DATETIME = "datetime" + """DateTime data type""" + DYNAMIC = "dynamic" + """Dynamic data type""" + INT = "int" + """Integer data type""" + LONG = "long" + """Long integer data type""" + REAL = "real" + """Real/floating point data type""" + STRING = "string" + """String data type""" + GUID = "guid" + """GUID data type""" + DECIMAL = "decimal" + """Decimal data type""" + TIMESPAN = "timespan" + """Timespan data type""" diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_models.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_models.py new file mode 100644 index 000000000000..a70dbd145563 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_models.py @@ -0,0 +1,494 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +from typing import Any, Dict, List, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from .._utils.model_base import Model as _Model, rest_field + +if TYPE_CHECKING: + from .. import models as _models + + +class BatchQueryRequest(_Model): + """A single request in a batch. + + :ivar id: Unique ID corresponding to each request in the batch. Required. + :vartype id: str + :ivar headers: Headers of the request. Can use prefer header to set server timeout and to + query statistics and visualization information. + :vartype headers: dict[str, str] + :ivar body: The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required. + :vartype body: ~azure.monitor.querylogs._generated.models.QueryBody + :ivar path: The query path of a single request in a batch, defaults to /query. Default value is + "/query". + :vartype path: str + :ivar method: The method of a single request in a batch, defaults to POST. Default value is + "POST". + :vartype method: str + :ivar workspace: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :vartype workspace: str + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique ID corresponding to each request in the batch. Required.""" + headers: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Headers of the request. Can use prefer header to set server timeout and to + query statistics and visualization information.""" + body: "_models.QueryBody" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The Analytics query. Learn more about the `Analytics query + syntax + `_. + Required.""" + path: Optional[Literal["/query"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The query path of a single request in a batch, defaults to /query. Default value is \"/query\".""" + method: Optional[Literal["POST"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The method of a single request in a batch, defaults to POST. Default value is \"POST\".""" + workspace: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Primary Workspace ID of the query. This is the Workspace ID from the Properties + blade in the Azure portal. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + body: "_models.QueryBody", + workspace: str, + headers: Optional[Dict[str, str]] = None, + path: Optional[Literal["/query"]] = None, + method: Optional[Literal["POST"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BatchQueryResponse(_Model): + """Contains the batch query response and the headers, id, and status of the request. + + :ivar id: Unique ID corresponding to each request in the batch. + :vartype id: str + :ivar status: The HTTP status code of the response. + :vartype status: int + :ivar body: Contains the tables, columns & rows resulting from a query. + :vartype body: ~azure.monitor.querylogs._generated.models.BatchQueryResults + :ivar headers: Dictionary of . + :vartype headers: dict[str, str] + """ + + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique ID corresponding to each request in the batch.""" + status: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The HTTP status code of the response.""" + body: Optional["_models.BatchQueryResults"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Contains the tables, columns & rows resulting from a query.""" + headers: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Dictionary of .""" + + @overload + def __init__( + self, + *, + id: Optional[str] = None, # pylint: disable=redefined-builtin + status: Optional[int] = None, + body: Optional["_models.BatchQueryResults"] = None, + headers: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BatchQueryResults(_Model): + """Contains the tables, columns & rows resulting from a query. + + :ivar tables: The results of the query in tabular format. + :vartype tables: list[~azure.monitor.querylogs._generated.models.Table] + :ivar statistics: Statistics represented in JSON format. + :vartype statistics: dict[str, any] + :ivar render: Visualization data in JSON format. + :vartype render: dict[str, any] + :ivar error: The code and message for an error. + :vartype error: ~azure.monitor.querylogs._generated.models.ErrorInfo + """ + + tables: Optional[List["_models.Table"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The results of the query in tabular format.""" + statistics: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Statistics represented in JSON format.""" + render: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Visualization data in JSON format.""" + error: Optional["_models.ErrorInfo"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The code and message for an error.""" + + @overload + def __init__( + self, + *, + tables: Optional[List["_models.Table"]] = None, + statistics: Optional[Dict[str, Any]] = None, + render: Optional[Dict[str, Any]] = None, + error: Optional["_models.ErrorInfo"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BatchRequest(_Model): + """An array of requests. + + :ivar requests: An single request in a batch. Required. + :vartype requests: list[~azure.monitor.querylogs._generated.models.BatchQueryRequest] + """ + + requests: List["_models.BatchQueryRequest"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An single request in a batch. Required.""" + + @overload + def __init__( + self, + *, + requests: List["_models.BatchQueryRequest"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BatchResponse(_Model): + """Response to a batch query. + + :ivar responses: An array of responses corresponding to each individual request in a batch. + :vartype responses: list[~azure.monitor.querylogs._generated.models.BatchQueryResponse] + """ + + responses: Optional[List["_models.BatchQueryResponse"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """An array of responses corresponding to each individual request in a batch.""" + + @overload + def __init__( + self, + *, + responses: Optional[List["_models.BatchQueryResponse"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Column(_Model): + """A column in a table. + + :ivar name: The name of this column. Required. + :vartype name: str + :ivar type: The data type of this column. Required. Known values are: "bool", "datetime", + "dynamic", "int", "long", "real", "string", "guid", "decimal", and "timespan". + :vartype type: str or ~azure.monitor.querylogs._generated.models.ColumnDataType + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of this column. Required.""" + type: Union[str, "_models.ColumnDataType"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The data type of this column. Required. Known values are: \"bool\", \"datetime\", \"dynamic\", + \"int\", \"long\", \"real\", \"string\", \"guid\", \"decimal\", and \"timespan\".""" + + @overload + def __init__( + self, + *, + name: str, + type: Union[str, "_models.ColumnDataType"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorDetail(_Model): + """Error details. + + :ivar code: The error's code. Required. + :vartype code: str + :ivar message: A human readable error message. Required. + :vartype message: str + :ivar target: Indicates which property in the request is responsible for the error. + :vartype target: str + :ivar value: Indicates which value in 'target' is responsible for the error. + :vartype value: str + :ivar resources: Indicates resources which were responsible for the error. + :vartype resources: list[str] + :ivar additional_properties: Additional properties that can be provided on the error details + object. + :vartype additional_properties: dict[str, any] + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error's code. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human readable error message. Required.""" + target: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates which property in the request is responsible for the error.""" + value: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates which value in 'target' is responsible for the error.""" + resources: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates resources which were responsible for the error.""" + additional_properties: Optional[Dict[str, Any]] = rest_field( + name="additionalProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Additional properties that can be provided on the error details object.""" + + @overload + def __init__( + self, + *, + code: str, + message: str, + target: Optional[str] = None, + value: Optional[str] = None, + resources: Optional[List[str]] = None, + additional_properties: Optional[Dict[str, Any]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorInfo(_Model): + """The code and message for an error. + + :ivar code: A machine readable error code. Required. + :vartype code: str + :ivar message: A human readable error message. Required. + :vartype message: str + :ivar details: error details. + :vartype details: list[~azure.monitor.querylogs._generated.models.ErrorDetail] + :ivar innererror: Inner error details if they exist. + :vartype innererror: ~azure.monitor.querylogs._generated.models.ErrorInfo + :ivar additional_properties: Additional properties that can be provided on the error info + object. + :vartype additional_properties: dict[str, any] + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A machine readable error code. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human readable error message. Required.""" + details: Optional[List["_models.ErrorDetail"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """error details.""" + innererror: Optional["_models.ErrorInfo"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Inner error details if they exist.""" + additional_properties: Optional[Dict[str, Any]] = rest_field( + name="additionalProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Additional properties that can be provided on the error info object.""" + + @overload + def __init__( + self, + *, + code: str, + message: str, + details: Optional[List["_models.ErrorDetail"]] = None, + innererror: Optional["_models.ErrorInfo"] = None, + additional_properties: Optional[Dict[str, Any]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QueryBody(_Model): + """The Analytics query. Learn more about the `Analytics query + syntax + `_. + + :ivar query: The query to execute. Required. + :vartype query: str + :ivar timespan: Optional. The timespan over which to query data. This is an ISO8601 time period + value. This timespan is applied in addition to any that are specified in the + query expression. + :vartype timespan: str + :ivar workspaces: A list of workspaces to query in addition to the primary workspace. + :vartype workspaces: list[str] + """ + + query: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The query to execute. Required.""" + timespan: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional. The timespan over which to query data. This is an ISO8601 time period + value. This timespan is applied in addition to any that are specified in the + query expression.""" + workspaces: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A list of workspaces to query in addition to the primary workspace.""" + + @overload + def __init__( + self, + *, + query: str, + timespan: Optional[str] = None, + workspaces: Optional[List[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QueryResults(_Model): + """Contains the tables, columns & rows resulting from a query. + + :ivar tables: The results of the query in tabular format. Required. + :vartype tables: list[~azure.monitor.querylogs._generated.models.Table] + :ivar statistics: Statistics represented in JSON format. + :vartype statistics: dict[str, any] + :ivar render: Visualization data in JSON format. + :vartype render: dict[str, any] + :ivar error: The code and message for an error. + :vartype error: ~azure.monitor.querylogs._generated.models.ErrorInfo + """ + + tables: List["_models.Table"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The results of the query in tabular format. Required.""" + statistics: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Statistics represented in JSON format.""" + render: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Visualization data in JSON format.""" + error: Optional["_models.ErrorInfo"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The code and message for an error.""" + + @overload + def __init__( + self, + *, + tables: List["_models.Table"], + statistics: Optional[Dict[str, Any]] = None, + render: Optional[Dict[str, Any]] = None, + error: Optional["_models.ErrorInfo"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Table(_Model): + """Contains the columns and rows for one table in a query response. + + :ivar name: The name of the table. Required. + :vartype name: str + :ivar columns: The list of columns in this table. Required. + :vartype columns: list[~azure.monitor.querylogs._generated.models.Column] + :ivar rows: The resulting rows from this query. Required. + :vartype rows: list[list[dict[str, any]]] + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the table. Required.""" + columns: List["_models.Column"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The list of columns in this table. Required.""" + rows: List[List[Dict[str, Any]]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The resulting rows from this query. Required.""" + + @overload + def __init__( + self, + *, + name: str, + columns: List["_models.Column"], + rows: List[List[Dict[str, Any]]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_patch.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/models/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/py.typed b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_generated/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_helpers.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_helpers.py new file mode 100644 index 000000000000..13622684fa13 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_helpers.py @@ -0,0 +1,146 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from datetime import datetime, timedelta +from typing import List, Dict, Any, Optional + +from azure.core.credentials import TokenCredential +from azure.core.exceptions import HttpResponseError +from azure.core.pipeline.policies import BearerTokenCredentialPolicy + +from ._generated._utils.serialization import Serializer, Deserializer + + +def get_authentication_policy(credential: TokenCredential, audience: str) -> BearerTokenCredentialPolicy: + """Returns the correct authentication policy. + + :param credential: The credential to use for authentication with the service. + :type credential: ~azure.core.credentials.TokenCredential + :param str audience: The audience for the token. + :returns: The correct authentication policy. + :rtype: ~azure.core.pipeline.policies.BearerTokenCredentialPolicy + """ + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + scope = audience.rstrip("/") + "/.default" + if hasattr(credential, "get_token"): + return BearerTokenCredentialPolicy(credential, scope) + + raise TypeError("Unsupported credential") + + +def order_results(request_order: List, mapping: Dict[str, Any], **kwargs: Any) -> List: + ordered = [mapping[id] for id in request_order] + results = [] + for item in ordered: + if not item["body"].get("error"): + result_obj = kwargs.get("obj") + if result_obj: + results.append(result_obj._from_generated(item["body"])) # pylint: disable=protected-access + else: + error = item["body"]["error"] + if error.get("code") == "PartialError": + partial_err = kwargs.get("partial_err") + if partial_err: + res = partial_err._from_generated( # pylint: disable=protected-access + item["body"], kwargs.get("raise_with") + ) + results.append(res) + else: + err = kwargs.get("err") + if err: + results.append(err._from_generated(error)) # pylint: disable=protected-access + return results + + +def construct_iso8601(timespan=None) -> Optional[str]: + if not timespan: + return None + start, end, duration = None, None, None + try: + if isinstance(timespan[1], datetime): # we treat this as start_time, end_time + start, end = timespan[0], timespan[1] + elif isinstance(timespan[1], timedelta): # we treat this as start_time, duration + start, duration = timespan[0], timespan[1] + else: + raise ValueError("Tuple must be a start datetime with a timedelta or an end datetime.") + except TypeError: + duration = timespan # it means only duration (timedelta) is provideds + duration_str = "" + if duration: + try: + duration_str = "PT{}S".format(duration.total_seconds()) + except AttributeError as e: + raise ValueError("timespan must be a timedelta or a tuple.") from e + iso_str = None + if start is not None: + start = Serializer.serialize_iso(start) + if end is not None: + end = Serializer.serialize_iso(end) + iso_str = f"{start}/{end}" + elif duration_str: + iso_str = f"{start}/{duration_str}" + else: # means that an invalid value None that is provided with start_time + raise ValueError("Duration or end_time cannot be None when provided with start_time.") + else: + iso_str = duration_str + return iso_str + + +def native_col_type(col_type, value): + if col_type == "datetime": + try: + value = Deserializer.deserialize_iso(value) + except Exception: # pylint: disable=broad-except + # if there is any exception in deserializing the iso, + # return the value to the user + pass + elif col_type in ("timespan", "guid"): + value = str(value) + return value + + +def process_row(col_types, row) -> List[Any]: + return [native_col_type(col_types[ind], val) for ind, val in enumerate(row)] + + +def process_error(error, model): + try: + model = model._from_generated(error.model.error) # pylint: disable=protected-access + except AttributeError: # model can be none + pass + raise HttpResponseError(message=error.message, response=error.response, model=model) + + +def process_prefer(server_timeout, include_statistics, include_visualization): + prefer = "" + if server_timeout: + prefer += "wait=" + str(server_timeout) + "," + if include_statistics: + prefer += "include-statistics=true," + if include_visualization: + prefer += "include-render=true" + return prefer.rstrip(",") + + +def get_subscription_id_from_resource(resource_id: str) -> str: + """Get the subscription ID from the provided resource ID. + + The format of the resource ID is: + /subscriptions/{subscriptionId}/resourceGroups/{group}/providers/{provider}/{type}/{name} + + :param str resource_id: The resource ID to parse. + :returns: The subscription ID. + :rtype: str + """ + if not resource_id: + raise ValueError("Resource ID must not be None or empty.") + + parts = resource_id.split("subscriptions/") + if len(parts) != 2: + raise ValueError("Resource ID must contain a subscription ID.") + + subscription_id = parts[1].split("/")[0] + return subscription_id diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_logs_query_client.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_logs_query_client.py new file mode 100644 index 000000000000..efbe5b5bbe07 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_logs_query_client.py @@ -0,0 +1,276 @@ +# +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from datetime import timedelta, datetime +from typing import Any, Union, Sequence, Dict, List, cast, Tuple, Optional, MutableMapping +from urllib.parse import urlparse + +from azure.core.credentials import TokenCredential +from azure.core.exceptions import HttpResponseError +from azure.core.tracing.decorator import distributed_trace + +from ._generated._client import MonitorQueryLogsClient +from ._helpers import ( + get_authentication_policy, + construct_iso8601, + order_results, + process_error, + process_prefer, +) +from ._models import LogsBatchQuery, LogsQueryResult, LogsQueryPartialResult +from ._exceptions import LogsQueryError +from ._version import SDK_MONIKER + +JSON = MutableMapping[str, Any] + + +class LogsQueryClient(object): # pylint: disable=client-accepts-api-version-keyword + """LogsQueryClient. Use this client to collect and organize log and performance data from + monitored resources. Data from different sources such as platform logs from Azure services, + log and performance data from virtual machines agents, and usage and performance data from + apps can be consolidated into a single Azure Log Analytics workspace. + + The various data types can be analyzed together using the + [Kusto Query Language](https://learn.microsoft.com/azure/data-explorer/kusto/query/) + + :param credential: The credential to authenticate the client. + :type credential: ~azure.core.credentials.TokenCredential + :keyword endpoint: The endpoint to connect to. Defaults to 'https://api.loganalytics.io/v1'. + :paramtype endpoint: Optional[str] + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_authentication.py + :start-after: [START create_logs_query_client] + :end-before: [END create_logs_query_client] + :language: python + :dedent: 4 + :caption: Creating the LogsQueryClient with a TokenCredential. + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_authentication.py + :start-after: [START create_logs_query_client_sovereign_cloud] + :end-before: [END create_logs_query_client_sovereign_cloud] + :language: python + :dedent: 4 + :caption: Creating the LogsQueryClient for use with a sovereign cloud (i.e. non-public cloud). + """ + + def __init__(self, credential: TokenCredential, **kwargs: Any) -> None: + endpoint = kwargs.pop("endpoint", "https://api.loganalytics.io/v1") + if not endpoint.startswith("https://") and not endpoint.startswith("http://"): + endpoint = "https://" + endpoint + parsed_endpoint = urlparse(endpoint) + audience = kwargs.pop("audience", f"{parsed_endpoint.scheme}://{parsed_endpoint.netloc}") + self._endpoint = endpoint + auth_policy = kwargs.pop("authentication_policy", None) + kwargs.setdefault("sdk_moniker", SDK_MONIKER) + self._client = MonitorQueryLogsClient( + endpoint=self._endpoint, + credential=credential, + authentication_policy=auth_policy or get_authentication_policy(credential, audience), + **kwargs, + ) + + @distributed_trace + def query_workspace( + self, + workspace_id: str, + query: str, + *, + timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]], + server_timeout: Optional[int] = None, + include_statistics: bool = False, + include_visualization: bool = False, + additional_workspaces: Optional[Sequence[str]] = None, + **kwargs: Any, + ) -> Union[LogsQueryResult, LogsQueryPartialResult]: + """Execute a Kusto query. + + Executes a Kusto query for data. + + :param workspace_id: ID of the workspace. This is Workspace ID from the Properties blade in the + Azure portal. + :type workspace_id: str + :param query: The Kusto query. Learn more about the `Kusto query syntax + `_. + :type query: str + :keyword timespan: Required. The timespan for which to query the data. This can be a timedelta, + a timedelta and a start datetime, or a start datetime/end datetime. Set to None to not constrain + the query to a timespan. + :paramtype timespan: ~datetime.timedelta or tuple[~datetime.datetime, ~datetime.timedelta] + or tuple[~datetime.datetime, ~datetime.datetime] or None + :keyword int server_timeout: the server timeout in seconds. The default timeout is 3 minutes, + and the maximum timeout is 10 minutes. + :keyword bool include_statistics: To get information about query statistics. + :keyword bool include_visualization: In the query language, it is possible to specify different + visualization options. By default, the API does not return information regarding the type of + visualization to show. If your client requires this information, specify the preference. + :keyword additional_workspaces: A list of workspaces that are included in the query. + These can be qualified workspace names, workspace IDs, or Azure resource IDs. + :paramtype additional_workspaces: Optional[list[str]] + :return: LogsQueryResult if there is a success or LogsQueryPartialResult when there is a partial success. + :rtype: Union[~azure.monitor.query.LogsQueryResult, ~azure.monitor.query.LogsQueryPartialResult] + :raises ~azure.core.exceptions.HttpResponseError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_logs_single_query.py + :start-after: [START send_logs_query] + :end-before: [END send_logs_query] + :language: python + :dedent: 0 + :caption: Get a response for a single log query. + """ + timespan_iso = construct_iso8601(timespan) + prefer = process_prefer(server_timeout, include_statistics, include_visualization) + + body = {"query": query, "timespan": timespan_iso, "workspaces": additional_workspaces} + + generated_response: JSON = {} + try: + generated_response = self._client.execute(workspace_id=workspace_id, body=body, prefer=prefer, **kwargs) + except HttpResponseError as err: + process_error(err, LogsQueryError) + + response: Union[LogsQueryResult, LogsQueryPartialResult] + if not generated_response.get("error"): + response = LogsQueryResult._from_generated(generated_response) # pylint: disable=protected-access + else: + response = LogsQueryPartialResult._from_generated( # pylint: disable=protected-access + generated_response, LogsQueryError + ) + return response + + @distributed_trace + def query_batch( + self, queries: Union[Sequence[Dict], Sequence[LogsBatchQuery]], **kwargs: Any + ) -> List[Union[LogsQueryResult, LogsQueryError, LogsQueryPartialResult]]: + """Execute a list of Kusto queries. Each request can be either a LogsBatchQuery + object or an equivalent serialized model. + + **NOTE**: The response is returned in the same order as that of the requests sent. + + :param queries: The list of Kusto queries to execute. + :type queries: list[dict] or list[~azure.monitor.query.LogsBatchQuery] + :return: List of LogsQueryResult, LogsQueryPartialResult and LogsQueryError. + For a given query, a LogsQueryResult is returned if the response is a success, LogsQueryPartialResult + is returned when there is a partial success and a LogsQueryError is returned when there is a failure. + The status of each response can be checked using `LogsQueryStatus` enum. + :rtype: list[Union[~azure.monitor.query.LogsQueryResult, ~azure.monitor.query.LogsQueryPartialResult, + ~azure.monitor.query.LogsQueryError] + :raises ~azure.core.exceptions.HttpResponseError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_batch_query.py + :start-after: [START send_query_batch] + :end-before: [END send_query_batch] + :language: python + :dedent: 0 + :caption: Execute multiple queries in a batch. + """ + try: + queries = [LogsBatchQuery(**cast(Dict, q)) for q in queries] + except (KeyError, TypeError): + pass + queries = [cast(LogsBatchQuery, q)._to_generated() for q in queries] # pylint: disable=protected-access + request_order = [req["id"] for req in queries] + batch = {"requests": queries} + generated = self._client.batch(batch, **kwargs) + mapping = {item["id"]: item for item in generated["responses"]} + return order_results( + request_order, + mapping, + obj=LogsQueryResult, + err=LogsQueryError, + partial_err=LogsQueryPartialResult, + raise_with=LogsQueryError, + ) + + @distributed_trace + def query_resource( + self, + resource_id: str, + query: str, + *, + timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]], + server_timeout: Optional[int] = None, + include_statistics: bool = False, + include_visualization: bool = False, + additional_workspaces: Optional[Sequence[str]] = None, + **kwargs: Any, + ) -> Union[LogsQueryResult, LogsQueryPartialResult]: + """Execute a Kusto query on a resource. + + Returns all the Azure Monitor logs matching the given Kusto query for an Azure resource. + + :param resource_id: The identifier of the resource. The expected format is + '/subscriptions//resourceGroups//providers///'. + :type resource_id: str + :param query: The Kusto query. Learn more about the `Kusto query syntax + `_. + :type query: str + :keyword timespan: Required. The timespan for which to query the data. This can be a timedelta, + a timedelta and a start datetime, or a start datetime/end datetime. Set to None to not constrain + the query to a timespan. + :paramtype timespan: ~datetime.timedelta or tuple[~datetime.datetime, ~datetime.timedelta] + or tuple[~datetime.datetime, ~datetime.datetime] or None + :keyword int server_timeout: the server timeout in seconds. The default timeout is 3 minutes, + and the maximum timeout is 10 minutes. + :keyword bool include_statistics: To get information about query statistics. + :keyword bool include_visualization: In the query language, it is possible to specify different + visualization options. By default, the API does not return information regarding the type of + visualization to show. If your client requires this information, specify the preference. + :keyword additional_workspaces: A list of workspaces that are included in the query. + These can be qualified workspace names, workspace IDs, or Azure resource IDs. + :paramtype additional_workspaces: Optional[list[str]] + :return: LogsQueryResult if there is a success or LogsQueryPartialResult when there is a partial success. + :rtype: Union[~azure.monitor.query.LogsQueryResult, ~azure.monitor.query.LogsQueryPartialResult] + :raises ~azure.core.exceptions.HttpResponseError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_resource_logs_query.py + :start-after: [START resource_logs_query] + :end-before: [END resource_logs_query] + :language: python + :dedent: 0 + :caption: Get a response for a single query on a resource's logs. + """ + timespan_iso = construct_iso8601(timespan) + prefer = process_prefer(server_timeout, include_statistics, include_visualization) + + body = {"query": query, "timespan": timespan_iso, "workspaces": additional_workspaces} + + generated_response: JSON = {} + try: + generated_response = self._client.execute_with_resource_id( + resource_id=resource_id, body=body, prefer=prefer, **kwargs + ) + except HttpResponseError as err: + process_error(err, LogsQueryError) + + response: Union[LogsQueryResult, LogsQueryPartialResult] + if not generated_response.get("error"): + response = LogsQueryResult._from_generated(generated_response) # pylint: disable=protected-access + else: + response = LogsQueryPartialResult._from_generated( # pylint: disable=protected-access + generated_response, LogsQueryError + ) + return response + + def close(self) -> None: + """Close the :class:`~azure.monitor.query.LogsQueryClient` session.""" + return self._client.close() + + def __enter__(self) -> "LogsQueryClient": + self._client.__enter__() + return self + + def __exit__(self, *args: Any) -> None: + self._client.__exit__(*args) diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_models.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_models.py new file mode 100644 index 000000000000..81e1d3d28ddc --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_models.py @@ -0,0 +1,264 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# cspell:ignore milli +import uuid +from collections.abc import Mapping +from datetime import datetime, timedelta +from typing import Any, Optional, List, Union, Tuple, Dict, Iterator, Literal + +from ._enums import LogsQueryStatus +from ._exceptions import LogsQueryError +from ._helpers import construct_iso8601, process_row + + +JSON = Mapping[str, Any] # pylint: disable=unsubscriptable-object + + +class LogsTableRow: + """Represents a single row in logs table. + + This type is gettable by both column name and column index. + """ + + index: int + """The index of the row in the table""" + + def __init__(self, **kwargs: Any) -> None: + _col_types = kwargs["col_types"] + row = kwargs["row"] + self._row = process_row(_col_types, row) + self.index = kwargs["row_index"] + _columns = kwargs["columns"] + self._row_dict = {_columns[i]: self._row[i] for i in range(len(self._row))} + + def __iter__(self) -> Iterator[Any]: + """This will iterate over the row directly. + + :return: An iterator over the row. + :rtype: Iterator + """ + return iter(self._row) + + def __len__(self) -> int: + return len(self._row) + + def __repr__(self) -> str: + return repr(self._row) + + def __getitem__(self, column: Union[str, int]) -> Any: + """This type must be subscriptable directly to row. + Must be gettable by both column name and row index + + :param column: The name of the column or the index of the element in a row. + :type column: str or int + :return: The value of the column or the element in the row. + :rtype: Any + """ + try: + return self._row_dict[column] + except KeyError: + return self._row[int(column)] + + +class LogsTable: + """Contains the columns and rows for one table in a query response. + + All required parameters must be populated in order to send to Azure. + """ + + name: str + """Required. The name of the table.""" + rows: List[LogsTableRow] + """Required. The resulting rows from this query.""" + columns: List[str] + """Required. The labels of columns in this table.""" + columns_types: List[str] + """Required. The types of columns in this table.""" + + def __init__(self, **kwargs: Any) -> None: + self.name = kwargs.pop("name", "") + self.columns = kwargs.pop("columns", []) + self.columns_types = kwargs.pop("columns_types", []) + _rows = kwargs.pop("rows", []) + self.rows: List[LogsTableRow] = [ + LogsTableRow( + row=row, + row_index=ind, + col_types=self.columns_types, + columns=self.columns, + ) + for ind, row in enumerate(_rows) + ] + + @classmethod + def _from_generated(cls, generated) -> "LogsTable": + return cls( + name=generated.get("name"), + columns=[col["name"] for col in generated.get("columns", [])], + columns_types=[col["type"] for col in generated.get("columns", [])], + rows=generated.get("rows"), + ) + + +class LogsBatchQuery: + """A single request in a batch. The batch query API accepts a list of these objects. + + :param workspace_id: Workspace ID to be included in the query. + :type workspace_id: str + :param query: The Analytics query. Learn more about the `Analytics query syntax + `_. + :type query: str + :keyword timespan: Required. The timespan for which to query the data. This can be a timedelta, + a timedelta and a start datetime, or a start datetime/end datetime. Set to None to not constrain + the query to a timespan. + :paramtype timespan: ~datetime.timedelta or tuple[~datetime.datetime, ~datetime.timedelta] + or tuple[~datetime.datetime, ~datetime.datetime] or None + :keyword additional_workspaces: A list of workspaces that are included in the query. + These can be qualified workspace names, workspace IDs, or Azure resource IDs. + :paramtype additional_workspaces: Optional[list[str]] + :keyword server_timeout: the server timeout. The default timeout is 3 minutes, + and the maximum timeout is 10 minutes. + :paramtype server_timeout: Optional[int] + :keyword include_statistics: To get information about query statistics. + :paramtype include_statistics: Optional[bool] + :keyword include_visualization: In the query language, it is possible to specify different + visualization options. By default, the API does not return information regarding the type of + visualization to show. + :paramtype include_visualization: Optional[bool] + """ + + def __init__( + self, + workspace_id: str, + query: str, + *, + timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]], + **kwargs: Any, + ) -> None: + include_statistics = kwargs.pop("include_statistics", False) + include_visualization = kwargs.pop("include_visualization", False) + server_timeout = kwargs.pop("server_timeout", None) + prefer = "" + if server_timeout: + prefer += "wait=" + str(server_timeout) + if include_statistics: + if len(prefer) > 0: + prefer += "," + prefer += "include-statistics=true" + if include_visualization: + if len(prefer) > 0: + prefer += "," + prefer += "include-render=true" + + headers = {"Prefer": prefer} + timespan_iso = construct_iso8601(timespan) + additional_workspaces = kwargs.pop("additional_workspaces", None) + self.id: str = str(uuid.uuid4()) + self.body: Dict[str, Any] = { + "query": query, + "timespan": timespan_iso, + "workspaces": additional_workspaces, + } + self.headers = headers + self.workspace = workspace_id + + def _to_generated(self) -> Dict[str, Any]: + return { + "id": self.id, + "body": self.body, + "headers": self.headers, + "workspace": self.workspace, + "path": "/query", + "method": "POST", + } + + +class LogsQueryResult: + """The LogsQueryResult type is returned when the response of a query is a success.""" + + tables: List[LogsTable] + """The list of tables, columns and rows.""" + statistics: Optional[JSON] = None + """This will include a statistics property in the response that describes various performance + statistics such as query execution time and resource usage.""" + visualization: Optional[JSON] = None + """This will include a visualization property in the response that specifies the type of visualization selected + by the query and any properties for that visualization.""" + status: Literal[LogsQueryStatus.SUCCESS] + """The status of the result. Always 'Success' for an instance of a LogsQueryResult.""" + + def __init__(self, **kwargs: Any) -> None: + self.tables = kwargs.get("tables", []) + self.statistics = kwargs.get("statistics", None) + self.visualization = kwargs.get("visualization", None) + self.status = LogsQueryStatus.SUCCESS + + def __iter__(self) -> Iterator[LogsTable]: + return iter(self.tables) + + @classmethod + def _from_generated(cls, generated) -> "LogsQueryResult": + if not generated: + return cls() + tables = [] + if "body" in generated: + generated = generated["body"] + if generated.get("tables"): + tables = [ + LogsTable._from_generated(table) for table in generated["tables"] # pylint: disable=protected-access + ] + return cls( + tables=tables, + statistics=generated.get("statistics"), + visualization=generated.get("render"), + ) + + +class LogsQueryPartialResult: + """The LogsQueryPartialResult type is returned when the response of a query is a + partial success (or partial failure). + """ + + partial_data: List[LogsTable] + """The list of tables, columns and rows.""" + statistics: Optional[JSON] = None + """This will include a statistics property in the response that describes various performance statistics + such as query execution time and resource usage.""" + visualization: Optional[JSON] = None + """This will include a visualization property in the response that specifies the type of visualization + selected by the query and any properties for that visualization.""" + partial_error: Optional[LogsQueryError] = None + """The partial error info.""" + status: Literal[LogsQueryStatus.PARTIAL] + """The status of the result. Always 'PartialError' for an instance of a LogsQueryPartialResult.""" + + def __init__(self, **kwargs: Any) -> None: + self.partial_data = kwargs.get("partial_data", []) + self.partial_error = kwargs.get("partial_error", None) + self.statistics = kwargs.get("statistics", None) + self.visualization = kwargs.get("visualization", None) + self.status = LogsQueryStatus.PARTIAL + + def __iter__(self) -> Iterator[LogsTable]: + return iter(self.partial_data) + + @classmethod + def _from_generated(cls, generated, error) -> "LogsQueryPartialResult": + if not generated: + return cls() + partial_data = None + if "body" in generated: + generated = generated["body"] + if generated.get("tables"): + partial_data = [ + LogsTable._from_generated(table) for table in generated["tables"] # pylint: disable=protected-access + ] + return cls( + partial_data=partial_data, + partial_error=error._from_generated(generated.get("error")), # pylint: disable=protected-access + statistics=generated.get("statistics"), + visualization=generated.get("render"), + ) diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_version.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_version.py new file mode 100644 index 000000000000..6a28d7037438 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/_version.py @@ -0,0 +1,8 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0" +SDK_MONIKER = f"monitor-querylogs/{VERSION}" diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/__init__.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/__init__.py new file mode 100644 index 000000000000..2fc0f3ee31e1 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/__init__.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- + +from ._logs_query_client_async import LogsQueryClient + +__all__ = ["LogsQueryClient"] diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/_helpers_async.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/_helpers_async.py new file mode 100644 index 000000000000..a32542728d54 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/_helpers_async.py @@ -0,0 +1,26 @@ +# +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from azure.core.credentials_async import AsyncTokenCredential +from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy + + +def get_authentication_policy(credential: AsyncTokenCredential, audience: str) -> AsyncBearerTokenCredentialPolicy: + """Returns the correct authentication policy. + + :param credential: The credential to use for authentication with the service. + :type credential: ~azure.core.credentials.AsyncTokenCredential + :param str audience: The audience for the token. + :returns: The correct authentication policy. + :rtype: ~azure.core.pipeline.policies.AsyncBearerTokenCredentialPolicy + """ + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + scope = audience.rstrip("/") + "/.default" + if hasattr(credential, "get_token"): + return AsyncBearerTokenCredentialPolicy(credential, scope) + + raise TypeError("Unsupported credential") diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/_logs_query_client_async.py b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/_logs_query_client_async.py new file mode 100644 index 000000000000..d56a73d959ec --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/aio/_logs_query_client_async.py @@ -0,0 +1,277 @@ +# +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from datetime import datetime, timedelta +from typing import Any, cast, Tuple, Union, Sequence, Dict, List, Optional, MutableMapping +from urllib.parse import urlparse + +from azure.core.credentials_async import AsyncTokenCredential +from azure.core.exceptions import HttpResponseError +from azure.core.tracing.decorator_async import distributed_trace_async + +from .._generated.aio._client import MonitorQueryLogsClient +from .._helpers import construct_iso8601, order_results, process_error, process_prefer +from .._models import LogsQueryResult, LogsBatchQuery, LogsQueryPartialResult +from ._helpers_async import get_authentication_policy +from .._exceptions import LogsQueryError +from .._version import SDK_MONIKER + +JSON = MutableMapping[str, Any] + + +class LogsQueryClient(object): # pylint: disable=client-accepts-api-version-keyword + """LogsQueryClient. Use this client to collect and organize log and performance data from + monitored resources. Data from different sources such as platform logs from Azure services, + log and performance data from virtual machines agents, and usage and performance data from + apps can be consolidated into a single Azure Log Analytics workspace. + + The various data types can be analyzed together using the + [Kusto Query Language](https://learn.microsoft.com/azure/data-explorer/kusto/query/) + + :param credential: The credential to authenticate the client + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword endpoint: The endpoint to connect to. Defaults to 'https://api.loganalytics.io/v1'. + :paramtype endpoint: Optional[str] + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_authentication_async.py + :start-after: [START create_logs_query_client_async] + :end-before: [END create_logs_query_client_async] + :language: python + :dedent: 4 + :caption: Creating the asynchronous LogsQueryClient with a TokenCredential. + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_authentication_async.py + :start-after: [START create_logs_query_client_sovereign_cloud_async] + :end-before: [END create_logs_query_client_sovereign_cloud_async] + :language: python + :dedent: 4 + :caption: Creating the LogsQueryClient for use with a sovereign cloud (i.e. non-public cloud). + """ + + def __init__(self, credential: AsyncTokenCredential, **kwargs: Any) -> None: + endpoint = kwargs.pop("endpoint", "https://api.loganalytics.io/v1") + if not endpoint.startswith("https://") and not endpoint.startswith("http://"): + endpoint = "https://" + endpoint + parsed_endpoint = urlparse(endpoint) + # Assume audience is the base URL of the endpoint, unless a value is explicitly passed in. + audience = kwargs.pop("audience", f"{parsed_endpoint.scheme}://{parsed_endpoint.netloc}") + self._endpoint = endpoint + auth_policy = kwargs.pop("authentication_policy", None) + kwargs.setdefault("sdk_moniker", SDK_MONIKER) + self._client = MonitorQueryLogsClient( + credential=credential, + authentication_policy=auth_policy or get_authentication_policy(credential, audience), + endpoint=self._endpoint, + **kwargs, + ) + + @distributed_trace_async + async def query_workspace( + self, + workspace_id: str, + query: str, + *, + timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]], + server_timeout: Optional[int] = None, + include_statistics: bool = False, + include_visualization: bool = False, + additional_workspaces: Optional[Sequence[str]] = None, + **kwargs: Any, + ) -> Union[LogsQueryResult, LogsQueryPartialResult]: + """Execute an Analytics query. + + Executes an Analytics query for data. + + :param workspace_id: ID of the workspace. This is Workspace ID from the Properties blade in the + Azure portal. + :type workspace_id: str + :param query: The Kusto query. Learn more about the `Kusto query syntax + `_. + :type query: str + :keyword timespan: Required. The timespan for which to query the data. This can be a timedelta, + a timedelta and a start datetime, or a start datetime/end datetime. Set to None to not constrain + the query to a timespan. + :paramtype timespan: ~datetime.timedelta or tuple[~datetime.datetime, ~datetime.timedelta] + or tuple[~datetime.datetime, ~datetime.datetime] or None + :keyword int server_timeout: the server timeout in seconds. The default timeout is 3 minutes, + and the maximum timeout is 10 minutes. + :keyword bool include_statistics: To get information about query statistics. + :keyword bool include_visualization: In the query language, it is possible to specify different + visualization options. By default, the API does not return information regarding the type of + visualization to show. If your client requires this information, specify the preference. + :keyword additional_workspaces: A list of workspaces that are included in the query. + These can be qualified workspace names, workspace IDs, or Azure resource IDs. + :paramtype additional_workspaces: Optional[List[str]] + :return: LogsQueryResult if there is a success or LogsQueryPartialResult when there is a partial success. + :rtype: ~azure.monitor.query.LogsQueryResult or ~azure.monitor.query.LogsQueryPartialResult + :raises ~azure.core.exceptions.HttpResponseError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_log_query_async.py + :start-after: [START send_logs_query_async] + :end-before: [END send_logs_query_async] + :language: python + :dedent: 0 + :caption: Get a response for a single log query. + """ + timespan_iso = construct_iso8601(timespan) + prefer = process_prefer(server_timeout, include_statistics, include_visualization) + + body = {"query": query, "timespan": timespan_iso, "workspaces": additional_workspaces} + + generated_response: JSON = {} + try: + generated_response = await self._client.execute( + workspace_id=workspace_id, body=body, prefer=prefer, **kwargs + ) + except HttpResponseError as err: + process_error(err, LogsQueryError) + response: Union[LogsQueryResult, LogsQueryPartialResult] + if not generated_response.get("error"): + response = LogsQueryResult._from_generated(generated_response) # pylint: disable=protected-access + else: + response = LogsQueryPartialResult._from_generated( # pylint: disable=protected-access + generated_response, LogsQueryError + ) + return response + + @distributed_trace_async + async def query_batch( + self, queries: Union[Sequence[Dict], Sequence[LogsBatchQuery]], **kwargs: Any + ) -> List[Union[LogsQueryResult, LogsQueryError, LogsQueryPartialResult]]: + """Execute a list of analytics queries. Each request can be either a LogsBatchQuery + object or an equivalent serialized model. + + **NOTE**: The response is returned in the same order as that of the requests sent. + + :param queries: The list of Kusto queries to execute. + :type queries: list[dict] or list[~azure.monitor.query.LogsBatchQuery] + :return: List of LogsQueryResult, LogsQueryPartialResult and LogsQueryError. + For a given query, a LogsQueryResult is returned if the response is a success, LogsQueryPartialResult + is returned when there is a partial success and a LogsQueryError is returned when there is a failure. + The status of each response can be checked using `LogsQueryStatus` enum. + :rtype: list[~azure.monitor.query.LogsQueryResult or ~azure.monitor.query.LogsQueryPartialResult + or ~azure.monitor.query.LogsQueryError] + :raises ~azure.core.exceptions.HttpResponseError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_batch_query_async.py + :start-after: [START send_query_batch_async] + :end-before: [END send_query_batch_async] + :language: python + :dedent: 0 + :caption: Execute multiple queries in a batch. + """ + try: + queries = [LogsBatchQuery(**cast(Dict, q)) for q in queries] + except (KeyError, TypeError): + pass + queries = [cast(LogsBatchQuery, q)._to_generated() for q in queries] # pylint: disable=protected-access + request_order = [req["id"] for req in queries] + batch = {"requests": queries} + generated = await self._client.batch(batch, **kwargs) + mapping = {item["id"]: item for item in generated["responses"]} + return order_results( + request_order, + mapping, + obj=LogsQueryResult, + err=LogsQueryError, + partial_err=LogsQueryPartialResult, + raise_with=LogsQueryError, + ) + + @distributed_trace_async + async def query_resource( + self, + resource_id: str, + query: str, + *, + timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]], + server_timeout: Optional[int] = None, + include_statistics: bool = False, + include_visualization: bool = False, + additional_workspaces: Optional[Sequence[str]] = None, + **kwargs: Any, + ) -> Union[LogsQueryResult, LogsQueryPartialResult]: + """Execute a Kusto query on a resource. + + Returns all the Azure Monitor logs matching the given Kusto query for an Azure resource. + + :param resource_id: The identifier of the resource. The expected format is + '/subscriptions//resourceGroups//providers///'. + :type resource_id: str + :param query: The Kusto query. Learn more about the `Kusto query syntax + `_. + :type query: str + :keyword timespan: Required. The timespan for which to query the data. This can be a timedelta, + a timedelta and a start datetime, or a start datetime/end datetime. Set to None to not constrain + the query to a timespan. + :paramtype timespan: ~datetime.timedelta or tuple[~datetime.datetime, ~datetime.timedelta] + or tuple[~datetime.datetime, ~datetime.datetime] or None + :keyword int server_timeout: the server timeout in seconds. The default timeout is 3 minutes, + and the maximum timeout is 10 minutes. + :keyword bool include_statistics: To get information about query statistics. + :keyword bool include_visualization: In the query language, it is possible to specify different + visualization options. By default, the API does not return information regarding the type of + visualization to show. If your client requires this information, specify the preference. + :keyword additional_workspaces: A list of workspaces that are included in the query. + These can be qualified workspace names, workspace IDs, or Azure resource IDs. + :paramtype additional_workspaces: Optional[List[str]] + :return: LogsQueryResult if there is a success or LogsQueryPartialResult when there is a partial success. + :rtype: Union[~azure.monitor.query.LogsQueryResult, ~azure.monitor.query.LogsQueryPartialResult] + :raises ~azure.core.exceptions.HttpResponseError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_resource_logs_query_async.py + :start-after: [START resource_logs_query_async] + :end-before: [END resource_logs_query_async] + :language: python + :dedent: 0 + :caption: Get a response for a single query on a resource's logs. + """ + timespan_iso = construct_iso8601(timespan) + prefer = process_prefer(server_timeout, include_statistics, include_visualization) + + body = { + "query": query, + "timespan": timespan_iso, + "additional_workspaces": additional_workspaces, + } + + generated_response: JSON = {} + try: + generated_response = await self._client.execute_with_resource_id( + resource_id=resource_id, body=body, prefer=prefer, **kwargs + ) + except HttpResponseError as err: + process_error(err, LogsQueryError) + + response: Union[LogsQueryResult, LogsQueryPartialResult] + if not generated_response.get("error"): + response = LogsQueryResult._from_generated(generated_response) # pylint: disable=protected-access + else: + response = LogsQueryPartialResult._from_generated( # pylint: disable=protected-access + generated_response, LogsQueryError + ) + return response + + async def __aenter__(self) -> "LogsQueryClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *args: Any) -> None: + await self._client.__aexit__(*args) + + async def close(self) -> None: + """Close the :class:`~azure.monitor.query.aio.LogsQueryClient` session.""" + await self._client.__aexit__() diff --git a/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/py.typed b/sdk/monitor/azure-monitor-querylogs/azure/monitor/querylogs/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/monitor/azure-monitor-querylogs/dev_requirements.txt b/sdk/monitor/azure-monitor-querylogs/dev_requirements.txt new file mode 100644 index 000000000000..0b4dbf4f9396 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/dev_requirements.txt @@ -0,0 +1,4 @@ +-e ../../../tools/azure-sdk-tools +../../core/azure-core +../../identity/azure-identity +aiohttp>=3.0 diff --git a/sdk/monitor/azure-monitor-querylogs/samples/README.md b/sdk/monitor/azure-monitor-querylogs/samples/README.md new file mode 100644 index 000000000000..1d75a316cab9 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/README.md @@ -0,0 +1,69 @@ +--- +page_type: sample +languages: + - python +products: + - azure + - azure-monitor +urlFragment: query-logs-azuremonitor-samples +--- + +# Azure Monitor Query Logs client library Python samples + +## Samples + +The following code samples show common scenarios with the Azure Monitor Query Logs client library. + +For examples on authenticating with the Azure Monitor service, see [sample_authentication.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_authentication.py) and [sample_authentication_async.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_authentication_async.py). + +### Logs query samples + +- [Send a single workspace query with LogsQueryClient and handle the response as a table](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query.py) ([async sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_logs_single_query_async.py)) +- [Send a single workspace query with LogsQueryClient and handle the response in key-value form](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_key_value_form.py) +- [Send a single workspace query with LogsQueryClient without pandas](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_single_log_query_without_pandas.py) +- [Send a single workspace query with LogsQueryClient across multiple workspaces](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_multiple_workspaces.py) +- [Send multiple workspace queries with LogsQueryClient](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_batch_query.py) +- [Send a single workspace query with LogsQueryClient using server timeout](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_server_timeout.py) +- [Send a single resource query with LogsQueryClient](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_resource_logs_query.py) +- [Handle partial query results](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query_partial_result.py) + +#### Notebook samples + +- [Split a large query into multiple smaller queries to avoid hitting service limits](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/notebooks/sample_large_query.ipynb) +- [Detect anomalies in Azure Monitor log data using machine learning techniques](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-querylogs/samples/notebooks/sample_machine_learning_sklearn.ipynb) + +## Prerequisites + +- Python 3.9 or later +- An [Azure subscription][azure_subscription] +- To query Logs, you need an [Azure Log Analytics workspace][azure_monitor_create_using_portal]. + +## Setup + +1. Install the Azure Monitor Query Logs client library for Python with [pip][pip]: + +```bash +pip install azure-monitor-querylogs +``` + +2. **Optional**: To use the samples, you may also need to install [pandas][pandas] for easier data manipulation: + +```bash +pip install pandas +``` + +## Running the samples + +1. Open a terminal window and `cd` to the directory that the samples are saved in. +2. Set the environment variables specified in the sample file you wish to run. +3. Follow the usage described in the file, e.g. `python sample_logs_single_query.py` + +## Next steps + +Check out the [API reference documentation][query_ref_docs] to learn more about what you can do with the Azure Monitor Query Logs client library. + +[azure_subscription]: https://azure.microsoft.com/free/ +[azure_monitor_create_using_portal]: https://learn.microsoft.com/azure/azure-monitor/logs/quick-create-workspace +[pandas]: https://pandas.pydata.org/ +[pip]: https://pypi.org/project/pip/ +[query_ref_docs]: https://learn.microsoft.com/python/api/azure-monitor-querylogs/azure.monitor.querylogs diff --git a/sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_authentication_async.py b/sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_authentication_async.py new file mode 100644 index 000000000000..0f135432509f --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_authentication_async.py @@ -0,0 +1,45 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_authentication_async.py +DESCRIPTION: + This sample demonstrates how to authenticate to the Azure Monitor Logs service using + LogsQueryClient in async mode. +USAGE: + python sample_authentication_async.py + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. +""" +import asyncio + + +async def create_logs_query_client_async(): + # [START create_logs_query_client_async] + from azure.identity.aio import DefaultAzureCredential + from azure.monitor.querylogs.aio import LogsQueryClient + + credential = DefaultAzureCredential() + client = LogsQueryClient(credential) + # [END create_logs_query_client_async] + + +async def create_logs_query_client_sovereign_cloud_async(): + # [START create_logs_query_client_sovereign_cloud_async] + from azure.identity import AzureAuthorityHosts + from azure.identity.aio import DefaultAzureCredential + from azure.monitor.querylogs.aio import LogsQueryClient + + credential = DefaultAzureCredential(authority=AzureAuthorityHosts.AZURE_GOVERNMENT) + client = LogsQueryClient(credential, endpoint="https://api.loganalytics.us/v1") + # [END create_logs_query_client_sovereign_cloud_async] + + +async def main(): + await create_logs_query_client_async() + await create_logs_query_client_sovereign_cloud_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_logs_single_query_async.py b/sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_logs_single_query_async.py new file mode 100644 index 000000000000..bb1b9663f5b8 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/async_samples/sample_logs_single_query_async.py @@ -0,0 +1,61 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_logs_single_query_async.py +DESCRIPTION: + This sample demonstrates authenticating the LogsQueryClient and executing a single + Kusto query. +USAGE: + python sample_logs_single_query_async.py + Set the environment variables with your own values before running the sample: + 1) LOGS_WORKSPACE_ID - The first (primary) workspace ID. + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. + +**Note** - Although this example uses pandas to print the response, it's optional and +isn't a required package for querying. Alternatively, native Python can be used as well. +""" +import asyncio + +# [START send_logs_query_async] +from datetime import timedelta +import os + +from azure.core.exceptions import HttpResponseError +from azure.identity.aio import DefaultAzureCredential +from azure.monitor.querylogs.aio import LogsQueryClient +from azure.monitor.querylogs import LogsQueryStatus +import pandas as pd + + +async def logs_query(): + credential = DefaultAzureCredential() + client = LogsQueryClient(credential) + + query = "AppRequests | take 5" + + async with client: + try: + response = await client.query_workspace(os.environ["LOGS_WORKSPACE_ID"], query, timespan=timedelta(days=1)) + if response.status == LogsQueryStatus.SUCCESS: + data = response.tables + else: + # LogsQueryPartialResult - handle error here + error = response.partial_error + data = response.partial_data + print(error) + for table in data: + df = pd.DataFrame(data=table.rows, columns=table.columns) + print(df) + except HttpResponseError as err: + print("something fatal happened") + print(err) + await credential.close() + + +# [END send_logs_query_async] + +if __name__ == "__main__": + asyncio.run(logs_query()) diff --git a/sdk/monitor/azure-monitor-querylogs/samples/notebooks/sample_large_query.ipynb b/sdk/monitor/azure-monitor-querylogs/samples/notebooks/sample_large_query.ipynb new file mode 100644 index 000000000000..25b13ebc8835 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/notebooks/sample_large_query.ipynb @@ -0,0 +1,710 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Query with large result sets\n", + "\n", + "This sample notebook demonstrates how to query large amounts of data using the Azure Monitor Query client library.\n", + "\n", + "Due to Log Analytics [service limits](https://learn.microsoft.com/azure/azure-monitor/service-limits#la-query-api), sometimes it may not be possible to retrieve all the expected data in a single query. For example, the number of rows returned or the maximum size of the data returned may exceed the stated limits. One approach for overcoming these limits is to split the queries into multiple smaller queries using different time ranges.\n", + "\n", + "In this notebook, you will learn how your data in a Log Analytics workspace can first be queried to determine the time ranges that can be used to split the data retrieval into multiple smaller queries without exceeding the service limits. Then, you will asynchronously execute the smaller queries and output the results to separate files which can be used for further processing or analysis. Afterwards, this notebook also shows how to export the data to an [Azure Data Lake Storage (ADLS)](https://learn.microsoft.com/azure/storage/blobs/data-lake-storage-introduction) account.\n", + "\n", + "**Disclaimer**: This approach of splitting data retrieval into multiple smaller queries is good when:\n", + " 1. Dealing with a few GB or a few millions of records per hour. For larger data sets, [exporting](https://learn.microsoft.com/azure/azure-monitor/logs/logs-data-export) is recommended.\n", + " 2. The data retrieval query only uses simple data retrieval operators outlined [here](https://learn.microsoft.com/azure/azure-monitor/logs/basic-logs-query?tabs=portal-1#kql-language-limits)." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Getting started\n", + "\n", + "For this notebook, it is assumed that you have an existing Azure subscription and an active [Log Analytics workspace](https://learn.microsoft.com/azure/azure-monitor/logs/log-analytics-workspace-overview) that contains at least one table populated with data.\n", + "\n", + "Start by installing the Azure Monitor Query Logs and Azure Identity client libraries along with the `pandas` data analysis library." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "\n", + "!{sys.executable} -m pip install --upgrade azure-monitor-querylogs azure-identity pandas\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup\n", + "\n", + "An authenticated client is required to query data from Log Analytics. The following code shows how to create a `LogsQueryClient` using the `DefaultAzureCredential`. Note that an async credential and client are used." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from azure.identity.aio import DefaultAzureCredential\n", + "from azure.monitor.querylogs.aio import LogsQueryClient\n", + "\n", + "credential = DefaultAzureCredential()\n", + "client = LogsQueryClient(credential)\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Set Log Analytics workspace ID\n", + "\n", + "Set the `LOGS_WORKSPACE_ID` variable below to the ID of your Log Analytics workspace." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "LOGS_WORKSPACE_ID = \"\"\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Define helper functions\n", + "\n", + "In order to overcome the service limits, the strategy is to query data in smaller chunks based on some time column (i.e. `TimeGenerated`). The following helper functions are useful for this by querying your data in order to find suitable start and end times for the batch queries.\n", + "\n", + "- The `get_batch_endpoints_by_row_count` function will return a list of times that can be used in the query time spans while ensuring that the number of rows returned will be less than the specified row limit. \n", + "- The `get_batch_endpoints_by_size` function will return a list of times that can be used in the query time spans while ensuring that the size of the data returned is less than the specified byte size limit." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from datetime import datetime, timedelta\n", + "\n", + "import pandas as pd\n", + "\n", + "from azure.core.exceptions import HttpResponseError\n", + "from azure.monitor.querylogs import LogsQueryPartialResult, LogsQueryStatus\n", + "\n", + "\n", + "async def get_batch_endpoints_by_row_count(\n", + " query: str,\n", + " end_time: datetime,\n", + " days_back: int,\n", + " max_rows_per_query: int = int(1e5),\n", + " time_col: str = \"TimeGenerated\",\n", + "):\n", + " \"\"\"\n", + " Determine the timestamp endpoints for each chunked query\n", + " such that number of rows returned by each query is (approximately) `max_rows_per_query`\n", + " \"\"\"\n", + "\n", + " # This query will assign a batch number to each row depending on the maximum number of rows per query.\n", + " # Then the earliest timestamp for each batch number is used for each query endpoint.\n", + " find_batch_endpoints_query = f\"\"\"\n", + " {query}\n", + " | sort by {time_col} desc\n", + " | extend batch_num = row_cumsum(1) / {max_rows_per_query}\n", + " | summarize endpoint=min({time_col}) by batch_num\n", + " | sort by batch_num asc\n", + " | project endpoint\n", + " \"\"\"\n", + "\n", + " start_time = end_time - timedelta(days=days_back)\n", + " try:\n", + " response = await client.query_workspace(\n", + " workspace_id=LOGS_WORKSPACE_ID,\n", + " query=find_batch_endpoints_query,\n", + " timespan=(start_time, end_time),\n", + " )\n", + " except HttpResponseError as e:\n", + " print(\"Error batching endpoints by row count\")\n", + " raise e\n", + "\n", + " if response.status == LogsQueryStatus.PARTIAL:\n", + " raise Exception(f\"Error batching endpoints by data size: {response.partial_error}\")\n", + "\n", + " batch_endpoints = [end_time]\n", + " batch_endpoints += [row[0] for row in response.tables[0].rows]\n", + " return batch_endpoints\n", + "\n", + "\n", + "async def get_batch_endpoints_by_byte_size(\n", + " query: str,\n", + " end_time: datetime,\n", + " days_back: int,\n", + " max_bytes_per_query: int = 100 * 1024 * 1024, # 100 MiB\n", + " time_col: str = \"TimeGenerated\",\n", + "):\n", + " \"\"\"\n", + " Determine the timestamp endpoints for each chunked query such that\n", + " the size of the data returned is less than `max_bytes_per_query`.\n", + " \"\"\"\n", + "\n", + " # This query will assign a batch number to each row depending on the estimated data size.\n", + " # Then the earliest timestamp for each batch number is used for each query endpoint.\n", + " find_batch_endpoints_query = f\"\"\"\n", + " {query}\n", + " | sort by {time_col} desc\n", + " | extend batch_num = row_cumsum(estimate_data_size(*)) / {max_bytes_per_query}\n", + " | summarize endpoint=min({time_col}) by batch_num\n", + " | sort by batch_num asc\n", + " | project endpoint\n", + " \"\"\"\n", + "\n", + " start_time = end_time - timedelta(days=days_back)\n", + " try:\n", + " response = await client.query_workspace(\n", + " workspace_id=LOGS_WORKSPACE_ID,\n", + " query=find_batch_endpoints_query,\n", + " timespan=(start_time, end_time)\n", + " )\n", + " except HttpResponseError as e:\n", + " print(\"Error batching endpoints by byte size\")\n", + " raise e\n", + "\n", + " if response.status == LogsQueryStatus.PARTIAL:\n", + " raise Exception(f\"Error batching endpoints by byte size: {response.partial_error}\")\n", + "\n", + " batch_endpoints = [end_time]\n", + " batch_endpoints += [row[0] for row in response.tables[0].rows]\n", + " return batch_endpoints\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, define a function that will asynchronously execute a given query over a time range specified by a given start time and end time. This function will call the `query_workspace` method of the `LogsQueryClient`. The Azure Monitor Query library will automatically handle retries in case of connection-related errors or server errors (i.e. 500, 503, and 504 status codes). Check [here](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/core/azure-core#configurations) for more information on configuring retries." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "async def execute_query(\n", + " query: str,\n", + " start_time: datetime,\n", + " end_time: datetime,\n", + " *,\n", + " query_id: str = \"\",\n", + " correlation_request_id: str = \"\",\n", + "):\n", + " \"\"\"\n", + " Asynchronously execute the given query, restricted to the given time range, and parse the API response.\n", + "\n", + " :param str query: The query to execute.\n", + " :param datetime start_time: The start of the time range to query.\n", + " :param datetime end_time: The end of the time range to query.\n", + " :keyword str query_id: Optional identifier for the query, used for printing.\n", + " :keyword str correlation_request_id, Optional correlation ID to use in the query headers for tracing.\n", + " \"\"\"\n", + " headers = {}\n", + " if correlation_request_id:\n", + " headers[\"x-ms-correlation-request-id\"] = correlation_request_id\n", + "\n", + " try:\n", + " response = await client.query_workspace(\n", + " workspace_id=LOGS_WORKSPACE_ID,\n", + " query=query,\n", + " timespan=(start_time, end_time),\n", + " server_timeout=360,\n", + " include_statistics=False, # Can be used for debugging.\n", + " headers=headers,\n", + " retry_on_methods=[\"POST\"]\n", + " )\n", + " except HttpResponseError as e:\n", + " print(f\"Error when attempting query {query_id} (query time span: {start_time} to {end_time}):\\n\\t\", e)\n", + " return []\n", + "\n", + " if response.status == LogsQueryStatus.SUCCESS:\n", + " print(f\"Query {query_id} successful (query time span: {start_time} to {end_time}). Row count: {len(response.tables[0].rows)}\")\n", + " return response.tables[0]\n", + " else:\n", + " # This will be a LogsQueryPartialResult.\n", + " error = response.partial_error\n", + " print(f\"Partial results returned for query {query_id} (query time span: {start_time} to {end_time}):\\n\\t\", error)\n", + " return response.partial_data[0]\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Query data\n", + "\n", + "With the helper functions defined, you can now query the data in chunks that won't hit the row count and data size service limits.\n", + "\n", + "### Set variables\n", + "\n", + "Before running the queries, some variables will need to be configured.\n", + "\n", + "- `QUERY` - KQL query to run. Change the table name and specify any required columns and filters as needed. When constructing this query, the recommendation is to use [reduced KQL](https://learn.microsoft.com/azure/azure-monitor/logs/basic-logs-query?tabs=portal-1#kql-language-limits) which are optimized for data retrieval. To get all rows/columns, just set `QUERY = `. \n", + "- `END_TIME` - End of the time range to query over.\n", + "- `DAYS_BACK` - The number of days to go back from the end time. For example, if `END_TIME = datetime.now()` and `DAYS_BACK = 7`, the query will return data from the last 7 days. Note that fetched data will (initially) be stored in memory on your system, so it is possible to run into memory limitations if the query returns a large amount of data. If this issue is encountered, consider querying the data in time segments. For example, instead of querying 365 days of data at once, query 100 days of data at a time by setting the values of `END_TIME` and `DAYS_BACK` appropriately and re-running the notebook from this cell onwards for each separate segment.\n", + "- `MAX_ROWS_PER_QUERY` - The max number of rows that is returned from a single query. This is defaulted to the service limit of 500,000 rows multiplied by some factor to allow for some wiggle room. This limit may sometimes be exceeded if many entries share the same timestamp.\n", + "- `MAX_BYTES_PER_QUERY` - The max size in bytes of data returned from a single query. This is defaulted to the service limit of 100 MiB multiplied by some factor to allow for some wiggle room.\n", + "- `MAX_CONCURRENT_QUERIES` - The max number of concurrent queries to run at once. This is defaulted to 5. Reducing this can help avoid errors due to rate limits.\n", + "- `OUTPUT_DIRECTORY` - The directory where the query results will be stored. This is defaulted to \"./query_results\".\n", + "- `OUTPUT_FILE_PREFIX` - The prefix of each output file name. This is defaulted to \"query_results\"." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# EDIT THIS VALUE WITH YOUR QUERY.\n", + "# If necessary, add a KQL `project` operator or any filtering operators to limit the number of rows returned.\n", + "QUERY = \"AppRequests\"\n", + "\n", + "# Use the current time in the system's local timezone as the end time.\n", + "END_TIME = datetime.now().astimezone()\n", + "\n", + "# If you want to use a different end time, uncomment the following line and adjust as needed.\n", + "# END_TIME = datetime.strptime(\"2023-01-01 00:00:00 +0000\", \"%Y-%m-%d %H:%M:%S %z\")\n", + "\n", + "DAYS_BACK = 90\n", + "\n", + "MAX_ROWS_PER_QUERY_SERVICE_LIMIT = int(5e5) # 500K\n", + "MAX_ROWS_PER_QUERY = int(MAX_ROWS_PER_QUERY_SERVICE_LIMIT * 0.9)\n", + "\n", + "MAX_BYTES_PER_QUERY_SERVICE_LIMIT = 100 * 1024 * 1024\n", + "MAX_BYTES_PER_QUERY = int(MAX_BYTES_PER_QUERY_SERVICE_LIMIT * 0.6) # 64 MB of compressed data is the limit. This ensures we stay under that.\n", + "\n", + "MAX_CONCURRENT_QUERIES = 5\n", + "\n", + "OUTPUT_DIRECTORY = \"./query_results\"\n", + "OUTPUT_FILE_PREFIX = \"query_results\"\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Estimate data and costs (optional)\n", + "\n", + "Before running the chunked queries, it might first be prudent to estimate the size of the data if planning on exporting the query results to another service. The below cell defines another helper function that can be used to estimate the size of the data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "async def estimate_data_size(query: str, end_time: datetime, days_back: int):\n", + " query = f\"{query} | summarize n_rows = count(), estimate_data_size = sum(estimate_data_size(*))\"\n", + " start_time = end_time - timedelta(days=days_back)\n", + " response = await client.query_workspace(\n", + " workspace_id=LOGS_WORKSPACE_ID,\n", + " query=query,\n", + " timespan=(start_time, end_time),\n", + " )\n", + "\n", + " if response.status == LogsQueryStatus.PARTIAL:\n", + " raise Exception(f\"Error estimating data size: {response.partial_error}\")\n", + "\n", + " columns = response.tables[0].columns\n", + " rows = response.tables[0].rows\n", + " df = pd.DataFrame(data=rows, columns=columns)\n", + " return df\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, run the following cell to estimate the size of the data that will be returned by the queries. Note that this is just an estimate and the actual size may vary slightly. This information can be used in conjunction with the Azure storage [pricing calculator](https://azure.microsoft.com/pricing/calculator/?service=storage) to determine costs that will be incurred for your storage setup. If using Azure Data Lake Storage Gen2, full billing details can be found [here](https://azure.microsoft.com/pricing/details/storage/data-lake/)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "data_size_df = await estimate_data_size(QUERY, END_TIME, DAYS_BACK)\n", + "data_size_df[\"estimate_data_size_MB\"] = data_size_df[\"estimate_data_size\"] / (1000 **2)\n", + "data_size_df[\"estimate_data_size_GB\"] = data_size_df[\"estimate_data_size_MB\"] / 1000\n", + "data_size_df\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Fetch log data\n", + "\n", + "Use the helper functions to create an async wrapper function that will query the data in chunks using the variables defined above.\n", + "\n", + "In order to be memory efficient, it's better to process data as it is returned from the queries instead of storing all the data in memory. Inside `fetch_logs` below, there is a section of code where you can add code to process the data on the fly. \n", + "\n", + "By default, all the queried data will be written to binary pickle files in the directory defined by `OUTPUT_DIRECTORY`. Each file will contain the results of a single query, and each file name will be be prefixed with the value of `OUTPUT_FILE_PREFIX`.\n", + "\n", + "There is also a section inside `process_logs` where custom code for mutating the data in each chunk's DataFrame can be added." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import asyncio\n", + "import os\n", + "\n", + "# Limit the number of concurrent queries.\n", + "semaphore = asyncio.Semaphore(MAX_CONCURRENT_QUERIES)\n", + "\n", + "async def fetch_logs(query: str, start_time: datetime, end_time: datetime, query_id: str, correlation_request_id: str):\n", + " async with semaphore:\n", + " response = await execute_query(query, start_time, end_time, query_id=query_id, correlation_request_id=correlation_request_id)\n", + " # Do some post-processing on the response and get the final DataFrame.\n", + " df = await process_logs(response)\n", + " if df is not None and not df.empty:\n", + " # Can do something with the DataFrame here. For example, write to a file, insert into a database, etc.\n", + "\n", + " # ADD YOUR CUSTOM CODE HERE. Remember to remove/uncomment the following line as needed.\n", + " write_to_file(df, query_id)\n", + "\n", + " # Return the number of rows in the DataFrame.\n", + " return len(df)\n", + " return 0\n", + "\n", + "\n", + "async def process_logs(response):\n", + " if response:\n", + " df = pd.DataFrame(data=response.rows, columns=response.columns)\n", + "\n", + " # Can modify/mutate the DataFrame here.\n", + " return df\n", + " return None\n", + "\n", + "\n", + "def write_to_file(df, query_id):\n", + " path = os.path.join(OUTPUT_DIRECTORY, f\"{OUTPUT_FILE_PREFIX}_{query_id}.pkl\")\n", + " df.to_pickle(path)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import uuid\n", + "\n", + "\n", + "async def run():\n", + " # Below, we combine the endpoints retrieved from both endpoint methods to ensure that the number of rows\n", + " # and the size of the data returned are both within the limits.\n", + " # Worst case performance is double the theoretical minimum number of queries necessary.\n", + " print(\"Calculating batch endpoints...\")\n", + " row_batch_endpoints = await get_batch_endpoints_by_row_count(QUERY, END_TIME, days_back=DAYS_BACK, max_rows_per_query=MAX_ROWS_PER_QUERY)\n", + " byte_batch_endpoints = await get_batch_endpoints_by_byte_size(QUERY, END_TIME, days_back=DAYS_BACK, max_bytes_per_query=MAX_BYTES_PER_QUERY)\n", + " batch_endpoints = sorted(set(byte_batch_endpoints + row_batch_endpoints), reverse=True)\n", + "\n", + " print(\"Clearing output directory...\")\n", + " if os.path.exists(OUTPUT_DIRECTORY):\n", + " for filename in os.listdir(OUTPUT_DIRECTORY):\n", + " if filename.startswith(OUTPUT_FILE_PREFIX):\n", + " os.remove(os.path.join(OUTPUT_DIRECTORY, filename))\n", + " else:\n", + " os.makedirs(OUTPUT_DIRECTORY)\n", + "\n", + " if len(batch_endpoints) == 1:\n", + " print(f\"No data with time generated earlier than {batch_endpoints[0]} was found in the queried data. \"\n", + " \"Verify that the query is correct and that the data exists in your specified time range.\")\n", + " print(f\"Will attempt to query the data with the start and end time both set to {batch_endpoints[0]}. This may fail if \"\n", + " \"the data exceeds API limits. Another field to split on in the query may be necessary.\")\n", + " batch_endpoints.append(batch_endpoints[0])\n", + "\n", + " queries = []\n", + " end_time = batch_endpoints[0]\n", + " correlation_request_id = str(uuid.uuid4())\n", + "\n", + " print(f\"Querying {len(batch_endpoints) - 1} time ranges, from {batch_endpoints[-1]} to {end_time}\")\n", + " print(f\"Correlation request ID: {correlation_request_id}\")\n", + "\n", + "\n", + " for i in range(1, len(batch_endpoints)):\n", + " start_time = batch_endpoints[i]\n", + " queries.append(fetch_logs(QUERY, start_time, end_time, query_id=str(i), correlation_request_id=correlation_request_id))\n", + " end_time = start_time - timedelta(microseconds=1) # Subtract 1 microsecond to avoid overlap between queries.\n", + "\n", + " counts = await asyncio.gather(*queries)\n", + " # Return total number of rows retrieved across all queries.\n", + " return sum(counts)\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, go ahead and run the following cell to fetch the data. Note that this may take some time depending on the size of the data and the number of queries that need to be run." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "count = await run()\n", + "print(f\"Retrieved {count} rows\")\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "One of the output files can be analyzed to see what the data looks like. The following cell will output the first 30 rows of the first file." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Inspect some of the data.\n", + "for file in os.listdir(OUTPUT_DIRECTORY):\n", + " if file.startswith(OUTPUT_FILE_PREFIX):\n", + " path = os.path.join(OUTPUT_DIRECTORY, file)\n", + " df = pd.read_pickle(path)\n", + " break\n", + "\n", + "df.head(30)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### (Optional) Combine data into single DataFrame\n", + "\n", + "If system memory permits, the data can be combined into a single `DataFrame` for more comprehensive analysis. Uncomment the line calling `combine_all_files_to_df` and run the cell." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "full_data_df = pd.DataFrame()\n", + "\n", + "\n", + "def combine_all_files_to_df():\n", + " global full_data_df\n", + " for file in os.listdir(OUTPUT_DIRECTORY):\n", + " if file.startswith(OUTPUT_FILE_PREFIX):\n", + " path = os.path.join(OUTPUT_DIRECTORY, file)\n", + " df = pd.read_pickle(path)\n", + " full_data_df = pd.concat([full_data_df, df], ignore_index=True)\n", + "\n", + "# Combine all files into a single DataFrame.\n", + "# combine_all_files_to_df()\n", + "\n", + "# Inspect the combined DataFrame.\n", + "print(f\"Shape of combined DataFrame: {full_data_df.shape}\")\n", + "full_data_df.head(30)\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Optional: Export data to Azure Data Lake Storage (ADLS)\n", + "\n", + "If desired, the data queried from your Log Analytics workspace can be exported to an [Azure Data Lake Storage (ADLS)](https://learn.microsoft.com/azure/storage/blobs/data-lake-storage-introduction) account. This can be useful for storing the data for longer periods of time or for using it in other applications. To do this, the `azure-storage-file-datalake` Python package will be needed which uses the [ADLS Gen2 REST API](https://learn.microsoft.com/azure/storage/blobs/data-lake-storage-directory-file-acl-python) under the hood.\n", + "\n", + "### Setup\n", + "\n", + "First, ensure you have the required package installed:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "\n", + "!{sys.executable} -m pip install --upgrade azure-storage-file-datalake\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, input your storage [connection string](https://learn.microsoft.com/azure/storage/common/storage-account-keys-manage) below and instantiate the ADLS service client." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from azure.storage.filedatalake import DataLakeServiceClient\n", + "\n", + "AZURE_STORAGE_CONNECTION_STRING = \"\"\n", + "\n", + "try:\n", + " adls_service_client = DataLakeServiceClient.from_connection_string(AZURE_STORAGE_CONNECTION_STRING)\n", + "except Exception as e:\n", + " print(e)\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "Next, define a helper function that can be used to interact with the ADLS storage account(s) to which the queried data will be exported to." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def upload_df_to_adls_path(\n", + " df: pd.DataFrame,\n", + " adls_dirname: str,\n", + " adls_filename: str,\n", + " container_name: str,\n", + "):\n", + " \"\"\"\n", + " Upload a pandas DataFrame to the specified ADLS path as a single JSON file.\n", + " \"\"\"\n", + " json_data = df.to_json(orient=\"records\", lines=True, date_format=\"iso\")\n", + " file_system_client = adls_service_client.get_file_system_client(file_system=container_name)\n", + "\n", + " try:\n", + " file_system_client.create_directory(adls_dirname)\n", + " except Exception as e:\n", + " print(e)\n", + "\n", + " try:\n", + " directory_client = file_system_client.get_directory_client(adls_dirname)\n", + " file_client = directory_client.get_file_client(adls_filename)\n", + " file_client.upload_data(json_data, overwrite=True)\n", + " except Exception as e:\n", + " print(e)\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Upload data\n", + "\n", + "Now, run the following cell to upload the data using the helper function defined above after configuring the variables below. Here, the `upload_df_to_adls_path` function is called on each data file created previously.\n", + "\n", + "To be more memory efficient, the `upload_df_to_adls_path` method can be called on each chunk's DataFrame from within the `fetch_logs` function above." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# NAme of the storage container. This must already exist.\n", + "CONTAINER_NAME = \"\"\n", + "\n", + "# Name of the directory to write to. This will be created if it does not exist.\n", + "DIRECTORY_NAME = \"monitor-log-dump\"\n", + "\n", + "# Name of the file to write to (include the .json extension).\n", + "FILENAME = \"monitor-log-dump.json\"\n", + "\n", + "\n", + "for file in os.listdir(OUTPUT_DIRECTORY):\n", + " if file.startswith(OUTPUT_FILE_PREFIX):\n", + " path = os.path.join(OUTPUT_DIRECTORY, file)\n", + " df = pd.read_pickle(path)\n", + " upload_df_to_adls_path(df, DIRECTORY_NAME, FILENAME, CONTAINER_NAME)\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Conclusion\n", + "\n", + "In this notebook, you learned how to query data from a Log Analytics workspace in chunks to avoid hitting the service limits. You also learned how to export the data to an Azure Data Lake Storage (ADLS) account." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.6" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "665f5865bb085838e35a9597206be80722fad7fd0d11e0dfbe620869aad35c71" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/sdk/monitor/azure-monitor-querylogs/samples/notebooks/sample_machine_learning_sklearn.ipynb b/sdk/monitor/azure-monitor-querylogs/samples/notebooks/sample_machine_learning_sklearn.ipynb new file mode 100644 index 000000000000..b6838811833b --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/notebooks/sample_machine_learning_sklearn.ipynb @@ -0,0 +1,902 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Detect anomalies in Azure Monitor log data using machine learning techniques \n", + "\n", + "This notebook demonstrates how a user can use the [`azure-monitor-query`](https://pypi.org/project/azure-monitor-query/) library to retrieve Azure Monitor log data for training a machine learning model to detect anomalies. The [scikit-learn](https://scikit-learn.org/stable/) library is used to train two regression models on historical data, and then the trained model with better performance is used to predict new values and identify anomalies.\n", + "\n", + "1. [**Getting Started**](#getting-started) - Install dependencies, and define helper functions and constants.\n", + "2. [**Query and visualize data**](#query-and-visualize) - Explore data from a Log Analytics workspace.\n", + "3. [**Analyze data using machine learning techniques**](#analyze-data)\n", + " * [**Prepare data for model training**](#prepare-data) - Prepare data for model training.\n", + " * [**Train and test regression models**](#train-regression-models) - Train a linear regression model and a gradient boosting regression model on historical data.\n", + " * [**Predict new values and identify anomalies**](#identify-anomalies) - Score new data, or predict new values, using one of the trained models to identify anomalies.\n", + "4. [**Ingest anomalies**](#ingest-anomalies) - Upload detected anomalies into a custom table in your Log Analytics workspace for further analysis. (optional)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "\n", + "## 1. Getting started\n", + "\n", + "Let's start by installing the Azure Monitor Query Logs, Azure Identity and Azure Monitor Ingestion client libraries along with the `pandas` data analysis library, `plotly` visualization library, and `scikit-learn` machine learning library." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667411859 + } + }, + "outputs": [], + "source": [ + "import sys\n", + "\n", + "!{sys.executable} -m pip install --upgrade azure-monitor-querylogs azure-identity azure-monitor-ingestion\n", + "\n", + "!{sys.executable} -m pip install --upgrade pandas numpy plotly scikit-learn nbformat" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup\n", + "\n", + "Some initial setup is needed before we can run the sample code.\n", + "\n", + "#### Set Log Analytics workspace ID\n", + "\n", + "Set the `LOGS_WORKSPACE_ID` variable below to the ID of your Log Analytics workspace. Currently, it is set to use the [Azure Monitor Demo workspace](https://portal.azure.com/#blade/Microsoft_Azure_Monitoring_Logs/DemoLogsBlade), but it is recommended to use your own workspace if available." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667412121 + } + }, + "outputs": [], + "source": [ + "LOGS_WORKSPACE_ID = \"DEMO_WORKSPACE\"" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Create LogsQueryClient\n", + "\n", + "An authenticated client is needed to query Azure Monitor Logs. The following code shows how to create a `LogsQueryClient` using `DefaultAzureCredential`.\n", + "\n", + "Note, that `LogsQueryClient` typically only supports authentication with Azure Active Directory (Azure AD) token credentials. However, we can pass in a custom authentication policy to enable the use of API keys. This allows the client to query the [demo workspace](https://learn.microsoft.com/azure/azure-monitor/logs/api/access-api#authenticate-with-a-demo-api-key). Do note that the availability and access to this demo workspace is subject to change, and it is recommended to use your own Log Analytics workspace." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667412326 + } + }, + "outputs": [], + "source": [ + "from azure.core.credentials import AzureKeyCredential\n", + "from azure.core.pipeline.policies import AzureKeyCredentialPolicy\n", + "from azure.identity import DefaultAzureCredential\n", + "from azure.monitor.querylogs import LogsQueryClient\n", + "\n", + "if LOGS_WORKSPACE_ID == \"DEMO_WORKSPACE\":\n", + " credential = AzureKeyCredential(\"DEMO_KEY\")\n", + " header_name = \"X-Api-Key\"\n", + " authentication_policy = AzureKeyCredentialPolicy(name=header_name, credential=credential)\n", + "else:\n", + " credential = DefaultAzureCredential()\n", + " authentication_policy = None\n", + "\n", + "logs_query_client = LogsQueryClient(credential, authentication_policy=authentication_policy)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Define helper functions\n", + "\n", + "Next, we'll define some helper functions that will be used throughout the notebook.\n", + "\n", + "- `query_logs_workspace` - Queries the Log Analytics workspace for a given query and returns the results as a `pandas` DataFrame.\n", + "- `display_graph` - Given a `pandas` DataFrame, displays a `plotly` line graph showing hourly usage for various data types over time." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667412535 + } + }, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import plotly.express as px\n", + "\n", + "from azure.monitor.querylogs import LogsQueryStatus\n", + "from azure.core.exceptions import HttpResponseError\n", + "\n", + "\n", + "def query_logs_workspace(query):\n", + " try:\n", + " response = logs_query_client.query_workspace(LOGS_WORKSPACE_ID, query, timespan=None)\n", + " if response.status == LogsQueryStatus.SUCCESS:\n", + " data = response.tables\n", + " else:\n", + " error = response.partial_error\n", + " data = response.partial_data\n", + " print(error)\n", + "\n", + " for table in data:\n", + " my_data = pd.DataFrame(data=table.rows, columns=table.columns)\n", + " except HttpResponseError as err:\n", + " print(\"something fatal happened\")\n", + " print (err)\n", + " return my_data\n", + "\n", + "\n", + "def display_graph(df, title):\n", + " df = df.sort_values(by=\"TimeGenerated\")\n", + " graph = px.line(df, x='TimeGenerated', y=\"ActualUsage\", color='DataType', title=title)\n", + " graph.show()\n", + "\n", + "\n", + "# Set display options for visualizing\n", + "def display_options():\n", + " display = pd.options.display\n", + " display.max_columns = 10\n", + " display.max_rows = 10\n", + " display.max_colwidth = 300\n", + " display.width = None\n", + " return None\n", + "\n", + "display_options()\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "\n", + "## 2. Query and visualize data\n", + "\n", + "Let's start by exploring the data in the Log Analytics workspace. We'll start by running the following query on the [Usage](https://learn.microsoft.com/azure/azure-monitor/reference/tables/usage) table which is assumed to exist inside the workspace. \n", + "\n", + "This query will check how much data (in Megabytes) was ingested into each of the tables (data types) in the Log Analytics workspace each hour over the past week." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667412978 + } + }, + "outputs": [], + "source": [ + "TABLE = \"Usage\"\n", + "\n", + "QUERY = f\"\"\"\n", + "let starttime = 7d; // Start date for the time series, counting back from the current date\n", + "let endtime = 0d; // today\n", + "{TABLE} | project TimeGenerated, DataType, Quantity\n", + "| where TimeGenerated between (ago(starttime)..ago(endtime))\n", + "| summarize ActualUsage=sum(Quantity) by TimeGenerated=bin(TimeGenerated, 1h), DataType\n", + "\"\"\"\n", + "\n", + "df = query_logs_workspace(QUERY)\n", + "display(df)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, let's view the data as a graph using the helper function we defined above." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667416120 + } + }, + "outputs": [], + "source": [ + "display_graph(df, \"All Data Types - last week usage\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "\n", + "## 3. Analyze data using machine learning techniques\n", + "\n", + "\n", + "### Prepare data for model training\n", + "\n", + "After exploring the available data, let's use a subset of it for model training. We will choose a few of the data types to train our model on (defined in `data_types` below). " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667416435 + } + }, + "outputs": [], + "source": [ + "# Insert here the selected data types for analysis - for simplicity we picked 6, which seemed most interesting at exploration of data step\n", + "data_types = [\"ContainerLog\", \"AzureNetworkAnalytics_CL\", \"StorageBlobLogs\", \"AzureDiagnostics\", \"Perf\", \"AVSSyslog\"]\n", + "\n", + "# Get all available data types that have data.\n", + "available_data_types = df[\"DataType\"].unique()\n", + "\n", + "# Filter out data types that are not available in the data.\n", + "data_types = list(filter(lambda data_type: data_type in available_data_types, data_types))\n", + "\n", + "if data_types:\n", + " print(f\"Selected data type for analysis: {data_types}\")\n", + "else:\n", + " raise SystemExit(\"No datatypes found. Please select data types which have data\")\n", + "\n", + "# Returns usage query for selected data types for given time range\n", + "def get_selected_datatypes(data_types, start, end):\n", + " data_types_string = \",\".join([f\"'{data_type}'\" for data_type in data_types])\n", + " query = (\n", + " f\"let starttime = {start}d; \"\n", + " f\"let endtime = {end}d; \"\n", + " \"Usage | project TimeGenerated, DataType, Quantity \"\n", + " \"| where TimeGenerated between (ago(starttime)..ago(endtime)) \"\n", + " f\"| where DataType in ({data_types_string}) \"\n", + " \"| summarize ActualUsage=sum(Quantity) by TimeGenerated=bin(TimeGenerated, 1h), DataType\"\n", + " )\n", + " return query\n", + "\n", + "# We will query the data from the first 3 weeks of the past month.\n", + "# Feel free to change the start and end dates.\n", + "start = 28\n", + "end = 7\n", + "\n", + "query = get_selected_datatypes(data_types, start, end)\n", + "my_data = query_logs_workspace(query)\n", + "display(my_data)\n", + "\n", + "if my_data.empty:\n", + " raise SystemExit(\"No data found for training. Please select data types which have data\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667416744 + } + }, + "outputs": [], + "source": [ + "display_graph(my_data, \"Selected Data Types - Historical Data Usage (3 weeks)\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's now expand the timestamp information in the TimeGenerated field into separate columns for year, month, day, and hour using [`DatetimeIndex`](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#time-date-components) from `pandas`. This will allow us to use the timestamp information as features in our model." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667416948 + } + }, + "outputs": [], + "source": [ + "my_data['Year'] = pd.DatetimeIndex(my_data['TimeGenerated']).year\n", + "my_data['Month'] = pd.DatetimeIndex(my_data['TimeGenerated']).month\n", + "my_data['Day'] = pd.DatetimeIndex(my_data['TimeGenerated']).day\n", + "my_data['Hour'] = pd.DatetimeIndex(my_data['TimeGenerated']).hour" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, define the X and y variables for training the model. The X variable will contain the features (timestamp information) and the y variable will contain the target (data usage in Megabytes)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667417190 + } + }, + "outputs": [], + "source": [ + "Y = my_data['ActualUsage']\n", + "X = my_data[['DataType', 'Year', 'Month', 'Day', 'Hour']]\n", + "\n", + "display(X)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "\n", + "### Train and test regression models on historical data\n", + "\n", + "Now that we have our data prepared, let's experiment with two different regression models and check which of the models most closely predicts the data in our testing set:\n", + "\n", + "#### Define cross validator\n", + "\n", + "Before we train, we'll define a cross-validator using [`TimeSeriesSplit`](https://scikit-learn.org/stable/modules/generated/sklearn.model_selection.TimeSeriesSplit.html#sklearn.model_selection.TimeSeriesSplit) from `scikit-learn`. The `evaluate` function defined below will use this cross-validator to evaluate the performance of the models we train.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667417450 + } + }, + "outputs": [], + "source": [ + "from sklearn.model_selection import cross_validate\n", + "from sklearn.model_selection import TimeSeriesSplit\n", + "\n", + "ts_cv = TimeSeriesSplit()\n", + "\n", + "def evaluate(model, X, Y, cv):\n", + " cv_results = cross_validate(\n", + " model,\n", + " X,\n", + " Y,\n", + " cv=cv,\n", + " scoring=[\"neg_mean_absolute_error\", \"neg_root_mean_squared_error\"],\n", + " )\n", + " mae = -cv_results[\"test_neg_mean_absolute_error\"]\n", + " rmse = -cv_results[\"test_neg_root_mean_squared_error\"]\n", + " print(\n", + " f\"Mean Absolute Error: {mae.mean():.3f} +/- {mae.std():.3f}\\n\"\n", + " f\"Root Mean Squared Error: {rmse.mean():.3f} +/- {rmse.std():.3f}\"\n", + " )" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "#### Train and evaluate a linear regression model\n", + "\n", + "First, let's train a linear regression model.\n", + "\n", + "Here, we first apply some transformations to the input data:\n", + "\n", + "* One-hot encode the categorical features using [`OneHotEncoder`](https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.OneHotEncoder.html#sklearn.preprocessing.OneHotEncoder). This is how we numerically represent \"DataTypes\" in our model.\n", + "* Scales numerical features - in our case, hourly usage - to the 0-1 range.\n", + "\n", + "Then, we train the model using an extension of Linear regression called [Ridge Regression](https://en.wikipedia.org/wiki/Ridge_regression). This is a linear regression model that uses L2 [regularization](https://en.wikipedia.org/wiki/Regularization_(mathematics)) to prevent overfitting.\n", + "\n", + "Finally, we evaluate the model using the cross-validator defined above." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683669757397 + } + }, + "outputs": [], + "source": [ + "from sklearn.pipeline import make_pipeline\n", + "from sklearn.compose import ColumnTransformer\n", + "from sklearn.preprocessing import OneHotEncoder\n", + "from sklearn.preprocessing import MinMaxScaler\n", + "from sklearn.linear_model import RidgeCV\n", + "import numpy as np\n", + "\n", + "\n", + "categorical_columns = [\"DataType\"]\n", + "\n", + "one_hot_encoder = OneHotEncoder(handle_unknown=\"ignore\", sparse_output=False)\n", + "\n", + "# Get 25 alpha values between 10^-6 and 10^6\n", + "alphas = np.logspace(-6, 6, 25)\n", + "ridge_linear_pipeline = make_pipeline(\n", + " ColumnTransformer(\n", + " transformers=[\n", + " (\"categorical\", one_hot_encoder, categorical_columns),\n", + " ],\n", + " remainder=MinMaxScaler(),\n", + " ),\n", + " RidgeCV(alphas=alphas),\n", + ")\n", + "\n", + "ridge_linear_pipeline.fit(X, Y)\n", + "\n", + "print(\"Score of Linear Regression:\")\n", + "evaluate(ridge_linear_pipeline, X, Y, cv=ts_cv)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Train and evaluate a gradient boosting regression model\n", + "\n", + "Next, let's train a gradient boosting regression model. Here, we'll use [`HistGradientBoostingRegressor`](https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.HistGradientBoostingRegressor.html#sklearn.ensemble.HistGradientBoostingRegressor) from `scikit-learn`. We will do ordinal encoding of the categorical features using [`OrdinalEncoder`](https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.OrdinalEncoder.html#sklearn.preprocessing.OrdinalEncoder)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667417902 + } + }, + "outputs": [], + "source": [ + "from sklearn.preprocessing import OrdinalEncoder\n", + "from sklearn.ensemble import HistGradientBoostingRegressor\n", + "\n", + "\n", + "ordinal_encoder = OrdinalEncoder(categories=[data_types])\n", + "\n", + "gradient_boosting_pipeline = make_pipeline(\n", + " ColumnTransformer(\n", + " transformers=[\n", + " (\"categorical\", ordinal_encoder, categorical_columns),\n", + " ],\n", + " remainder=\"passthrough\",\n", + " ),\n", + " HistGradientBoostingRegressor(\n", + " categorical_features=range(1),\n", + " ),\n", + ")\n", + "\n", + "gradient_boosting_pipeline.fit(X, Y)\n", + "print(\"Score of Gradient Boosting Regression:\")\n", + "evaluate(gradient_boosting_pipeline, X, Y, cv=ts_cv)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Take a look at the error metrics for both models. Which model performs better?\n", + "\n", + "Typically, for this dataset, the gradient boosting regression model will perform better than the linear regression model based on the lower error metrics. Since the gradient boosting regression model performs better, we'll use it to predict new values and identify anomalies." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Save the model\n", + "\n", + "First, we need to pickle the model so that we can use it later." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667418162 + } + }, + "outputs": [], + "source": [ + "import joblib\n", + "\n", + "# Save the model as a pickle file\n", + "filename = './myModel.pkl'\n", + "joblib.dump(gradient_boosting_pipeline, filename)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "\n", + "### Predict new values and identify anomalies\n", + "\n", + "Now that we have a trained model, let's use it to predict new values and identify anomalies. Let's start by querying ingestion information for the six data types we selected over the past week." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667418434 + } + }, + "outputs": [], + "source": [ + "# Time range from past week.\n", + "start = 7\n", + "end = 0\n", + "\n", + "query = get_selected_datatypes(data_types, start, end)\n", + "new_data = query_logs_workspace(query)\n", + "\n", + "new_data['Year'] = pd.DatetimeIndex(new_data['TimeGenerated']).year\n", + "new_data['Month'] = pd.DatetimeIndex(new_data['TimeGenerated']).month\n", + "new_data['Day'] = pd.DatetimeIndex(new_data['TimeGenerated']).day\n", + "new_data['Hour'] = pd.DatetimeIndex(new_data['TimeGenerated']).hour\n", + "display(new_data)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Visualize the data in a graph:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667418762 + } + }, + "outputs": [], + "source": [ + "display_graph(new_data, \"Selected Data Types - New Data Usage (1 week)\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, load the model from the pickle file and use it to predict (score) values for the latest data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667418994 + } + }, + "outputs": [], + "source": [ + "# Load the model from the file\n", + "X_new = new_data[['DataType', 'Year', 'Month', 'Day', 'Hour']]\n", + "\n", + "loaded_model = joblib.load(filename)\n", + "Predictions_new = loaded_model.predict(X_new)\n", + "new_data[\"PredictedUsage\"] = Predictions_new\n", + "display(new_data)\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "From the displayed DataFrame, you should see an additional column called \"PredictedUsage\" which contains the predicted usage values." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Identify ingestion anomalies\n", + "\n", + "Let's now try to identify anomalies. There are multiple approaches to identifying anomalies, but, for this sample, we'll use a method call [Tukey's fences](https://en.wikipedia.org/wiki/Outlier#Tukey%27s_fences).\n", + "\n", + "Note: The KQL [series_decompose_anomalies](https://learn.microsoft.com/azure/data-explorer/kusto/query/series-decompose-anomaliesfunction) function also uses the Tukey's fences method to detect anomalies." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Define helper functions\n", + "\n", + "Let's define a couple of helper function that will help us identify anomalies. These will update a DataFrame with a new column called `Anomalies` where `1` indicates a positive anomaly, and `-1` indicates a negative anomaly." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667419166 + } + }, + "outputs": [], + "source": [ + "def outlier_range(data_column):\n", + " sorted(data_column)\n", + " Q1, Q3 = np.percentile(data_column , [10,90])\n", + " IQR = Q3 - Q1\n", + " lower_bound = Q1 - (1.5 * IQR)\n", + " upper_bound = Q3 + (1.5 * IQR)\n", + " return lower_bound, upper_bound\n", + "\n", + "def outlier_update_data_frame(df):\n", + " lower_bound, upper_bound = outlier_range(df['Residual'])\n", + "\n", + " df.loc[((df['Residual'] < lower_bound) | (df['Residual'] > upper_bound)) & (df['Residual'] < 0) , 'Anomalies'] = -1\n", + " df.loc[((df['Residual'] < lower_bound) | (df['Residual'] > upper_bound)) & (df['Residual'] >= 0) , 'Anomalies'] = 1\n", + " df.loc[(df['Residual'] >= lower_bound) & (df['Residual'] <= upper_bound), 'Anomalies'] = 0\n", + "\n", + " return df[df['Anomalies'] != 0]" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Run the helper functions on the DataFrame to identify anomalies in the new data:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667419398 + } + }, + "outputs": [], + "source": [ + "\n", + "new_data[\"Residual\"] = new_data[\"ActualUsage\"] - new_data[\"PredictedUsage\"]\n", + "new_data_datatypes = new_data[\"DataType\"].unique()\n", + "\n", + "new_data.set_index('DataType', inplace=True)\n", + "\n", + "anomalies_df = pd.DataFrame()\n", + "for data_type in new_data_datatypes:\n", + " type_anomalies = outlier_update_data_frame(new_data.loc[data_type, :])\n", + " # Add DataType as a column since we reset index later on\n", + " type_anomalies['DataType'] = data_type\n", + " anomalies_df = pd.concat([anomalies_df, type_anomalies], ignore_index=True)\n", + "\n", + "new_data.reset_index(inplace=True)\n", + "\n", + "print(f\"{len(anomalies_df)} anomalies detected\")\n", + "display(anomalies_df)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "\n", + "## 4. Ingest anomalies (optional)\n", + "\n", + "Optionally, we can upload detected anomalies to a custom table in a Log Analytics workspace. This can be useful for further analysis or visualization.\n", + "\n", + "To send data to your Log Analytics workspace, you need a registered Azure Active Directory application, custom table, data collection endpoint (DCE), and data collection rule (DCR). You also need to assign permissions to data collection rule so that the Azure AD application can upload.\n", + "\n", + "Use the following tutorial for specifics on creating the prerequisites: [Tutorial: Send data to Azure Monitor Logs with Logs ingestion API (Azure portal) ](https://learn.microsoft.com/azure/azure-monitor/logs/tutorial-logs-ingestion-portal)." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "When creating the table for the custom logs, use the JSON file created in the following cell when asked to upload a sample JSON file:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667419568 + } + }, + "outputs": [], + "source": [ + "import json\n", + "\n", + "sample_data = [{\n", + " \"TimeGenerated\": \"2023-03-19T19:56:43.7447391Z\",\n", + " \"ActualUsage\": 40.1,\n", + " \"PredictedUsage\": 45.1,\n", + " \"Anomalies\": -1,\n", + " \"DataType\": \"AzureDiagnostics\"\n", + "}]\n", + "\n", + "with open(\"data_sample.json\", \"w\") as file:\n", + " json.dump(sample_data, file)\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Then use the following in the `Transformation Editor`:\n", + "\n", + "`source | extend AnomalyTimeGenerated = todatetime(TimeGenerated) | extend TimeGenerated = now() `\n", + "\n", + "This will add a transformation so that `AnomalyTimeGenerated` indicates the time when the anomaly was detected and `TimeGenerated` indicates the time when the anomaly was uploaded to the custom table." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define constants\n", + "\n", + "Define constants for your Azure AD application and DCR/DCE information." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667419730 + } + }, + "outputs": [], + "source": [ + "AZURE_TENANT_ID = \"\"; # ID of the tenant where the data collection endpoint resides\n", + "AZURE_CLIENT_ID = \"\"; # Application ID to which you granted permissions to your data collection rule\n", + "AZURE_CLIENT_SECRET = \"\"; # Secret created for the application\n", + "\n", + "LOGS_DCR_STREAM_NAME = \"\" # Name of the custom stream from the data collection rule (e.g. \"Custom-DetectedAnomalies_CL\")\n", + "LOGS_DCR_RULE_ID = \"\" # immutableId of your data collection rule (Can be found in the JSON View of the data collection rule overview page)\n", + "DATA_COLLECTION_ENDPOINT = \"\" # URL that looks like this: https://xxxx.ingest.monitor.azure.com" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Ingest the data\n", + "\n", + "After creating the table and Data collection rule, you can use the following code to ingest the data into the custom table.\n", + "\n", + "**Note:** After creating the table, it can take up to 15 minutes for the table to be available for ingestion through the DCR stream." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "gather": { + "logged": 1683667420147 + } + }, + "outputs": [], + "source": [ + "from azure.core.exceptions import HttpResponseError\n", + "from azure.identity import ClientSecretCredential\n", + "from azure.monitor.ingestion import LogsIngestionClient\n", + "\n", + "\n", + "credential = ClientSecretCredential(\n", + " tenant_id=AZURE_TENANT_ID,\n", + " client_id=AZURE_CLIENT_ID,\n", + " client_secret=AZURE_CLIENT_SECRET\n", + ")\n", + "\n", + "client = LogsIngestionClient(endpoint=DATA_COLLECTION_ENDPOINT, credential=credential, logging_enable=True)\n", + "\n", + "body = json.loads(anomalies_df.to_json(orient='records', date_format='iso'))\n", + "\n", + "try:\n", + " response = client.upload(rule_id=LOGS_DCR_RULE_ID, stream_name=LOGS_DCR_STREAM_NAME, logs=body)\n", + " print(\"Upload request accepted\")\n", + "except HttpResponseError as e:\n", + " print(f\"Upload failed: {e}\")\n", + "\n" + ] + } + ], + "metadata": { + "kernel_info": { + "name": "python3" + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.1" + }, + "microsoft": { + "host": { + "AzureML": { + "notebookHasBeenCompleted": true + } + }, + "ms_spell_check": { + "ms_spell_check_language": "en" + } + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/sdk/monitor/azure-monitor-querylogs/samples/sample_authentication.py b/sdk/monitor/azure-monitor-querylogs/samples/sample_authentication.py new file mode 100644 index 000000000000..9e0ca723e35f --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/sample_authentication.py @@ -0,0 +1,39 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_authentication.py +DESCRIPTION: + This sample demonstrates how to authenticate to the Azure Monitor Logs service using + LogsQueryClient. +USAGE: + python sample_authentication.py + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. +""" + + +def create_logs_query_client(): + # [START create_logs_query_client] + from azure.identity import DefaultAzureCredential + from azure.monitor.querylogs import LogsQueryClient + + credential = DefaultAzureCredential() + client = LogsQueryClient(credential) + # [END create_logs_query_client] + + +def create_logs_query_client_sovereign_cloud(): + # [START create_logs_query_client_sovereign_cloud] + from azure.identity import AzureAuthorityHosts, DefaultAzureCredential + from azure.monitor.querylogs import LogsQueryClient + + credential = DefaultAzureCredential(authority=AzureAuthorityHosts.AZURE_GOVERNMENT) + client = LogsQueryClient(credential, endpoint="https://api.loganalytics.us/v1") + # [END create_logs_query_client_sovereign_cloud] + + +if __name__ == "__main__": + create_logs_query_client() + create_logs_query_client_sovereign_cloud() diff --git a/sdk/monitor/azure-monitor-querylogs/samples/sample_batch_query.py b/sdk/monitor/azure-monitor-querylogs/samples/sample_batch_query.py new file mode 100644 index 000000000000..59f3d903f9ee --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/sample_batch_query.py @@ -0,0 +1,71 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_batch_query.py +DESCRIPTION: + This sample demonstrates querying multiple queries in a batch. +USAGE: + python sample_batch_query.py + Set the environment variables with your own values before running the sample: + 1) LOGS_WORKSPACE_ID - The The first (primary) workspace ID. + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. + +**Note** - Although this example uses pandas to print the response, it's optional and +isn't a required package for querying. Alternatively, native Python can be used as well. +""" + +# [START send_query_batch] +from datetime import datetime, timedelta, timezone +import os + +from azure.core.exceptions import HttpResponseError +from azure.identity import DefaultAzureCredential +from azure.monitor.querylogs import LogsQueryClient, LogsBatchQuery, LogsQueryStatus +import pandas as pd + + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id=os.environ["LOGS_WORKSPACE_ID"], + ), + LogsBatchQuery(query="bad query", timespan=timedelta(days=1), workspace_id=os.environ["LOGS_WORKSPACE_ID"]), + LogsBatchQuery( + query="""let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id=os.environ["LOGS_WORKSPACE_ID"], + timespan=(datetime(2021, 6, 2, tzinfo=timezone.utc), datetime(2021, 6, 5, tzinfo=timezone.utc)), # (start, end) + include_statistics=True, + ), +] + +try: + results = client.query_batch(requests) + + for res in results: + if res.status == LogsQueryStatus.SUCCESS: + # This will be a LogsQueryResult + table = res.tables[0] + df = pd.DataFrame(table.rows, columns=table.columns) + print(df) + elif res.status == LogsQueryStatus.PARTIAL: + # This will be a LogsQueryPartialResult + print(res.partial_error) + for table in res.partial_data: + df = pd.DataFrame(table.rows, columns=table.columns) + print(df) + else: + # This will be a LogsQueryError + print(res) +except HttpResponseError as err: + print("something fatal happened") + print(err) +# [END send_query_batch] diff --git a/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_key_value_form.py b/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_key_value_form.py new file mode 100644 index 000000000000..79bf440ea656 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_key_value_form.py @@ -0,0 +1,51 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_logs_query_key_value_form.py +DESCRIPTION: + This sample demonstrates authenticating the LogsQueryClient and querying a single query + and printing the response in a key value form. +USAGE: + python sample_logs_query_key_value_form.py + Set the environment variables with your own values before running the sample: + 1) LOGS_WORKSPACE_ID - The first (primary) workspace ID. + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. + +**Note** - Although this example uses pandas to print the response, it's optional and +isn't a required package for querying. Alternatively, native Python can be used as well. +""" +from datetime import timedelta +import os +from pprint import pprint + +from azure.core.exceptions import HttpResponseError +from azure.identity import DefaultAzureCredential +from azure.monitor.querylogs import LogsQueryClient, LogsQueryStatus +import pandas as pd + + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +query = "AppRequests | take 5" + +try: + response = client.query_workspace(os.environ["LOGS_WORKSPACE_ID"], query, timespan=timedelta(days=1)) + if response.status == LogsQueryStatus.SUCCESS: + data = response.tables + else: + # LogsQueryPartialResult - handle error here + error = response.partial_error + data = response.partial_data + print(error) + + for table in data: + df = pd.DataFrame(data=table.rows, columns=table.columns) + key_value = df.to_dict(orient="records") + pprint(key_value) +except HttpResponseError as err: + print("something fatal happened") + print(err) diff --git a/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_multiple_workspaces.py b/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_multiple_workspaces.py new file mode 100644 index 000000000000..1500ee530511 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_query_multiple_workspaces.py @@ -0,0 +1,47 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_logs_query_multiple_workspaces.py +DESCRIPTION: + This sample demonstrates authenticating the LogsQueryClient and querying a single query + on multiple workspaces using the additional_workspaces param. +USAGE: + python sample_logs_query_multiple_workspaces.py + Set the environment variables with your own values before running the sample: + 1) LOGS_WORKSPACE_ID - The first (primary) workspace ID. + 2) SECONDARY_WORKSPACE_ID - An additional workspace. + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. + +**Note** - Although this example uses pandas to print the response, it's optional and +isn't a required package for querying. Alternatively, native Python can be used as well. +""" +from datetime import timedelta +import os + +from azure.core.exceptions import HttpResponseError +from azure.identity import DefaultAzureCredential +from azure.monitor.querylogs import LogsQueryClient +import pandas as pd + + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +query = "AppRequests | take 5" + +try: + response = client.query_workspace( + os.environ["LOGS_WORKSPACE_ID"], + query, + timespan=timedelta(days=1), + additional_workspaces=[os.environ["SECONDARY_WORKSPACE_ID"]], + ) + for table in response: + df = pd.DataFrame(data=table.rows, columns=table.columns) + print(df) +except HttpResponseError as err: + print("something fatal happened") + print(err) diff --git a/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query.py b/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query.py new file mode 100644 index 000000000000..b7aaa3489524 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query.py @@ -0,0 +1,57 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_logs_single_query.py +DESCRIPTION: + This sample demonstrates authenticating the LogsQueryClient and querying a single query. +USAGE: + python sample_logs_single_query.py + Set the environment variables with your own values before running the sample: + 1) LOGS_WORKSPACE_ID - The first (primary) workspace ID. + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. + +**Note** - Although this example uses pandas to print the response, it's optional and +isn't a required package for querying. Alternatively, native Python can be used as well. +""" +# [START send_logs_query] +from datetime import timedelta +import os + +from azure.core.exceptions import HttpResponseError +from azure.identity import DefaultAzureCredential +from azure.monitor.querylogs import LogsQueryClient, LogsQueryStatus +import pandas as pd + + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +query = "AppRequests | take 5" + +try: + response = client.query_workspace(os.environ["LOGS_WORKSPACE_ID"], query, timespan=timedelta(days=1)) + if response.status == LogsQueryStatus.SUCCESS: + data = response.tables + else: + # LogsQueryPartialResult - handle error here + error = response.partial_error + data = response.partial_data + print(error) + + for table in data: + df = pd.DataFrame(data=table.rows, columns=table.columns) + print(df) +except HttpResponseError as err: + print("something fatal happened") + print(err) + +# [END send_logs_query] +""" + TimeGenerated _ResourceId avgRequestDuration +0 2021-05-27T08:40:00Z /subscriptions/faa080af-c1d8-40ad-9cce-e1a450c... 27.307699999999997 +1 2021-05-27T08:50:00Z /subscriptions/faa080af-c1d8-40ad-9cce-e1a450c... 18.11655 +2 2021-05-27T09:00:00Z /subscriptions/faa080af-c1d8-40ad-9cce-e1a450c... 24.5271 +""" diff --git a/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query_partial_result.py b/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query_partial_result.py new file mode 100644 index 000000000000..46c047523d68 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/sample_logs_single_query_partial_result.py @@ -0,0 +1,61 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_logs_single_query_partial_result.py +DESCRIPTION: + This sample demonstrates authenticating the LogsQueryClient and querying a single query + and handling a partial query response. +USAGE: + python sample_logs_single_query_partial_result.py + Set the environment variables with your own values before running the sample: + 1) LOGS_WORKSPACE_ID - The first (primary) workspace ID. + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. + +**Note** - Although this example uses pandas to print the response, it's optional and +isn't a required package for querying. Alternatively, native Python can be used as well. +""" +from datetime import timedelta +import os + +from azure.core.exceptions import HttpResponseError +from azure.identity import DefaultAzureCredential +from azure.monitor.querylogs import LogsQueryClient, LogsQueryStatus +import pandas as pd + + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + + +# This block of code is exactly the same whether the expected result is a success, a failure, or a +# partial success +try: + response = client.query_workspace(os.environ["LOGS_WORKSPACE_ID"], query, timespan=timedelta(days=1)) + if response.status == LogsQueryStatus.SUCCESS: + data = response.tables + else: + # LogsQueryPartialResult - handle error here + error = response.partial_error + data = response.partial_data + print(error) + + for table in data: + df = pd.DataFrame(data=table.rows, columns=table.columns) + print(df) +except HttpResponseError as err: + print("something fatal happened") + print(err) + +""" + TimeGenerated _ResourceId avgRequestDuration +0 2021-05-27T08:40:00Z /subscriptions/faa080af-c1d8-40ad-9cce-e1a450c... 27.307699999999997 +1 2021-05-27T08:50:00Z /subscriptions/faa080af-c1d8-40ad-9cce-e1a450c... 18.11655 +2 2021-05-27T09:00:00Z /subscriptions/faa080af-c1d8-40ad-9cce-e1a450c... 24.5271 +""" diff --git a/sdk/monitor/azure-monitor-querylogs/samples/sample_resource_logs_query.py b/sdk/monitor/azure-monitor-querylogs/samples/sample_resource_logs_query.py new file mode 100644 index 000000000000..fd87afa5f353 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/sample_resource_logs_query.py @@ -0,0 +1,53 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_resource_logs_query.py +DESCRIPTION: + This sample demonstrates authenticating the LogsQueryClient and querying the logs + of a specific resource. Update the `query` variable with a query that corresponds to + your resource. +USAGE: + python sample_resource_logs_query.py + Set the environment variables with your own values before running the sample: + 1) LOGS_RESOURCE_ID - The resource ID. Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}` + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. + +**Note** - Although this example uses pandas to print the response, it's optional and +isn't a required package for querying. Alternatively, native Python can be used as well. +""" +# [START resource_logs_query] +from datetime import timedelta +import os + +from azure.core.exceptions import HttpResponseError +from azure.identity import DefaultAzureCredential +from azure.monitor.querylogs import LogsQueryClient, LogsQueryStatus +import pandas as pd + + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +query = "AzureActivity | take 5" + +try: + response = client.query_resource(os.environ["LOGS_RESOURCE_ID"], query, timespan=timedelta(days=1)) + if response.status == LogsQueryStatus.SUCCESS: + data = response.tables + else: + # LogsQueryPartialResult - handle error here + error = response.partial_error + data = response.partial_data + print(error) + + for table in data: + df = pd.DataFrame(data=table.rows, columns=table.columns) + print(df) +except HttpResponseError as err: + print("something fatal happened") + print(err) + +# [END resource_logs_query] diff --git a/sdk/monitor/azure-monitor-querylogs/samples/sample_server_timeout.py b/sdk/monitor/azure-monitor-querylogs/samples/sample_server_timeout.py new file mode 100644 index 000000000000..293a7428debe --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/sample_server_timeout.py @@ -0,0 +1,53 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_server_timeout.py +DESCRIPTION: + This sample demonstrates how to update a server timeout for a long running query. +USAGE: + python sample_server_timeout.py + Set the environment variables with your own values before running the sample: + 1) LOGS_WORKSPACE_ID - The first (primary) workspace ID. + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. + +**Note** - Although this example uses pandas to print the response, it's optional and +isn't a required package for querying. Alternatively, native Python can be used as well. +""" +from datetime import timedelta +import os + +from azure.core.exceptions import HttpResponseError +from azure.identity import DefaultAzureCredential +from azure.monitor.querylogs import LogsQueryClient, LogsQueryStatus +import pandas as pd + + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +query = "range x from 1 to 10000000000 step 1 | count" + +try: + response = client.query_workspace( + os.environ["LOGS_WORKSPACE_ID"], + query, + timespan=timedelta(days=1), + server_timeout=600, # sets the timeout to 10 minutes + ) + if response.status == LogsQueryStatus.SUCCESS: + data = response.tables + else: + # LogsQueryPartialResult - handle error here + error = response.partial_error + data = response.partial_data + print(error) + + for table in data: + df = pd.DataFrame(data=table.rows, columns=table.columns) + print(df) +except HttpResponseError as err: + print("something fatal happened") + print(err) diff --git a/sdk/monitor/azure-monitor-querylogs/samples/sample_single_log_query_without_pandas.py b/sdk/monitor/azure-monitor-querylogs/samples/sample_single_log_query_without_pandas.py new file mode 100644 index 000000000000..6018f6d26051 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/samples/sample_single_log_query_without_pandas.py @@ -0,0 +1,49 @@ +# pylint: disable=line-too-long,useless-suppression +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +""" +FILE: sample_single_log_query_without_pandas.py +DESCRIPTION: + This sample demonstrates authenticating the LogsQueryClient and querying a single query + and handling a query response without using pandas. +USAGE: + python sample_single_log_query_without_pandas.py + Set the environment variables with your own values before running the sample: + 1) LOGS_WORKSPACE_ID - The first (primary) workspace ID. + +This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. +For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. +""" +from datetime import timedelta +import os + +from azure.core.exceptions import HttpResponseError +from azure.identity import DefaultAzureCredential +from azure.monitor.querylogs import LogsQueryClient, LogsQueryStatus + + +credential = DefaultAzureCredential() +client = LogsQueryClient(credential) + +query = "AppRequests | take 5" + +try: + response = client.query_workspace(os.environ["LOGS_WORKSPACE_ID"], query, timespan=timedelta(days=1)) + if response.status == LogsQueryStatus.SUCCESS: + data = response.tables + else: + # LogsQueryPartialResult - handle error here + error = response.partial_error + data = response.partial_data + print(error) + + for table in data: + for col in table.columns: + print(col + " ", end="") + for row in table.rows: + for item in row: + print(item, end="") + print("\n") +except HttpResponseError as err: + print("something fatal happened") + print(err) diff --git a/sdk/monitor/azure-monitor-querylogs/sdk_packaging.toml b/sdk/monitor/azure-monitor-querylogs/sdk_packaging.toml new file mode 100644 index 000000000000..901bc8ccbfa6 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/sdk_packaging.toml @@ -0,0 +1,2 @@ +[packaging] +auto_update = false diff --git a/sdk/monitor/azure-monitor-querylogs/setup.py b/sdk/monitor/azure-monitor-querylogs/setup.py new file mode 100644 index 000000000000..f430fd17f0de --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/setup.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + + +import os +import re +from setuptools import setup, find_packages + + +PACKAGE_NAME = "azure-monitor-querylogs" +PACKAGE_PPRINT_NAME = "Azure Monitor Query Logs" +PACKAGE_NAMESPACE = "azure.monitor.querylogs" + +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") + +# Version extraction inspired from 'requests' +with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) + +if not version: + raise RuntimeError("Cannot find version information") + + +setup( + name=PACKAGE_NAME, + version=version, + description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + long_description=open("README.md", "r").read(), + long_description_content_type="text/markdown", + license="MIT License", + author="Microsoft Corporation", + author_email="azpysdkhelp@microsoft.com", + url="https://github.com/Azure/azure-sdk-for-python/tree/main/sdk", + keywords="azure, azure sdk", + classifiers=[ + "Development Status :: 5 - Production/Stable", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "License :: OSI Approved :: MIT License", + ], + zip_safe=False, + packages=find_packages( + exclude=[ + "tests", + # Exclude packages that will be covered by PEP420 or nspkg + "azure", + "azure.monitor", + ] + ), + include_package_data=True, + package_data={ + "azure.monitor.querylogs": ["py.typed"], + }, + install_requires=[ + "isodate>=0.6.1", + "azure-core>=1.30.0", + "typing-extensions>=4.6.0", + ], + python_requires=">=3.9", +) diff --git a/sdk/monitor/azure-monitor-querylogs/tests/base_testcase.py b/sdk/monitor/azure-monitor-querylogs/tests/base_testcase.py new file mode 100644 index 000000000000..f5097ba9f5db --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/tests/base_testcase.py @@ -0,0 +1,29 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +import os + +from devtools_testutils import AzureRecordedTestCase + + +ENV_MONITOR_ENVIRONMENT = "MONITOR_ENVIRONMENT" + +LOGS_ENVIRONMENT_ENDPOINT_MAP = { + "AzureCloud": "https://api.loganalytics.io/v1", + "AzureChinaCloud": "https://api.loganalytics.azure.cn/v1", + "AzureUSGovernment": "https://api.loganalytics.us/v1", +} + + +class AzureMonitorQueryLogsTestCase(AzureRecordedTestCase): + + def get_client(self, client_class, credential): + + kwargs = {} + environment = os.getenv(ENV_MONITOR_ENVIRONMENT) + if environment: + kwargs["endpoint"] = LOGS_ENVIRONMENT_ENDPOINT_MAP[environment] + + return self.create_client_from_credential(client_class, credential, **kwargs) diff --git a/sdk/monitor/azure-monitor-querylogs/tests/conftest.py b/sdk/monitor/azure-monitor-querylogs/tests/conftest.py new file mode 100644 index 000000000000..5d0766eafb5a --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/tests/conftest.py @@ -0,0 +1,51 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +import pytest + +from devtools_testutils import add_header_regex_sanitizer, remove_batch_sanitizers, add_body_key_sanitizer + + +# Environment variable keys +ENV_SUBSCRIPTION_ID = "AZURE_SUBSCRIPTION_ID" +ENV_WORKSPACE_ID = "AZURE_MONITOR_WORKSPACE_ID" +ENV_SECONDARY_WORKSPACE_ID = "AZURE_MONITOR_SECONDARY_WORKSPACE_ID" +ENV_DCR_ID = "AZURE_MONITOR_DCR_ID" +ENV_TABLE_NAME = "AZURE_MONITOR_TABLE_NAME" +ENV_TENANT_ID = "AZURE_TENANT_ID" +ENV_CLIENT_ID = "AZURE_CLIENT_ID" +ENV_CLIENT_SECRET = "AZURE_CLIENT_SECRET" + +# Fake values +TEST_ID = "00000000-0000-0000-0000-000000000000" +TEST_TABLE_NAME = "test-table" + + +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy, environment_variables): + sanitization_mapping = { + ENV_SUBSCRIPTION_ID: TEST_ID, + ENV_WORKSPACE_ID: TEST_ID, + ENV_SECONDARY_WORKSPACE_ID: TEST_ID, + ENV_TENANT_ID: TEST_ID, + ENV_CLIENT_ID: TEST_ID, + ENV_CLIENT_SECRET: TEST_ID, + ENV_TABLE_NAME: TEST_TABLE_NAME, + ENV_DCR_ID: TEST_ID, + } + environment_variables.sanitize_batch(sanitization_mapping) + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + # Remove the following body key sanitizer: + # - AZSDK3493: $..name + remove_batch_sanitizers(["AZSDK3493"]) + + +@pytest.fixture(scope="session") +def monitor_info(environment_variables): + yield { + "workspace_id": environment_variables.get(ENV_WORKSPACE_ID), + "secondary_workspace_id": environment_variables.get(ENV_SECONDARY_WORKSPACE_ID), + "table_name": environment_variables.get(ENV_TABLE_NAME), + } diff --git a/sdk/monitor/azure-monitor-querylogs/tests/test_exceptions.py b/sdk/monitor/azure-monitor-querylogs/tests/test_exceptions.py new file mode 100644 index 000000000000..d3b183d865a7 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/tests/test_exceptions.py @@ -0,0 +1,155 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +from datetime import datetime, timedelta + +import pytest + +from azure.identity import ClientSecretCredential +from azure.core.exceptions import HttpResponseError +from azure.monitor.querylogs import ( + LogsQueryClient, + LogsBatchQuery, + LogsQueryError, + LogsQueryPartialResult, + LogsQueryStatus, +) + +from base_testcase import AzureMonitorQueryLogsTestCase + + +class TestQueryExceptions(AzureMonitorQueryLogsTestCase): + + def test_logs_single_query_fatal_exception(self, recorded_test): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + with pytest.raises(HttpResponseError): + client.query_workspace("bad_workspace_id", "AppRequests", timespan=None) + + def test_logs_single_query_partial_exception(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = client.query_workspace(monitor_info["workspace_id"], query, timespan=timedelta(days=1)) + assert response.__class__ == LogsQueryPartialResult + assert response.status == LogsQueryStatus.PARTIAL + assert response.partial_error is not None + assert response.partial_data is not None + assert response.partial_error.details is not None + assert response.partial_error.code == "PartialError" + assert response.partial_error.__class__ == LogsQueryError + + def test_logs_resource_query_fatal_exception(self, recorded_test): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + with pytest.raises(HttpResponseError): + client.query_resource("/bad/resource/id", "AzureActivity", timespan=None) + + def test_logs_resource_query_partial_exception(self, recorded_test, monitor_info): + # Since this is logs-only package, we'll test workspace query instead + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = client.query_workspace(monitor_info["workspace_id"], query, timespan=timedelta(days=1)) + assert response.__class__ == LogsQueryPartialResult + assert response.status == LogsQueryStatus.PARTIAL + assert response.partial_error is not None + assert response.partial_data is not None + assert response.partial_error.details is not None + assert response.partial_error.code == "PartialError" + assert response.partial_error.__class__ == LogsQueryError + + def test_logs_batch_query_fatal_exception(self, recorded_test, monitor_info): + credential = ClientSecretCredential( + client_id="00000000-0000-0000-0000-000000000000", + client_secret="bad_secret", + tenant_id="00000000-0000-0000-0000-000000000000", + ) + client = self.get_client(LogsQueryClient, credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id=monitor_info["workspace_id"], + ), + LogsBatchQuery( + query="""AppRequestsss | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id=monitor_info["workspace_id"], + ), + LogsBatchQuery( + query="""let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id=monitor_info["workspace_id"], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True, + ), + ] + with pytest.raises(HttpResponseError): + responses = client.query_batch(requests) + + @pytest.mark.live_test_only("Issues recording dynamic 'id' values in requests/responses") + def test_logs_query_batch_partial_exception(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + requests = [ + LogsBatchQuery( + workspace_id=monitor_info["workspace_id"], + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + ), + LogsBatchQuery( + workspace_id=monitor_info["workspace_id"], + query="""let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + timespan=timedelta(days=1), + ), + ] + responses = client.query_batch(requests) + assert responses[1].__class__ == LogsQueryPartialResult + + @pytest.mark.live_test_only("Issues recording dynamic 'id' values in requests/responses") + def test_logs_query_batch_non_fatal_exception(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + requests = [ + LogsBatchQuery( + workspace_id=monitor_info["workspace_id"], + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + ), + LogsBatchQuery(workspace_id=monitor_info["workspace_id"], query="not a valid query", timespan=None), + ] + responses = client.query_batch(requests) + assert responses[1].__class__ == LogsQueryError + + def test_logs_query_batch_raises_with_no_timespan(self, monitor_info): + with pytest.raises(TypeError): + LogsBatchQuery( + workspace_id=monitor_info["workspace_id"], + query="AzureActivity | summarize count()", + ) + + def test_logs_bad_query_fatal_exception(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + with pytest.raises(HttpResponseError): + client.query_workspace(monitor_info["workspace_id"], "not a table", timespan=None) + + def test_logs_query_result_partial_success_iterator(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = client.query_workspace(monitor_info["workspace_id"], query, timespan=None) + # LogsQueryPartialResult as an iterator should return tables from `partial_data` + assert response.__class__ == LogsQueryPartialResult + for table in response: + assert table is not None + + def test_logs_invalid_credential(self, recorded_test, monitor_info): + credential = ClientSecretCredential(client_id="client_id", client_secret="client_secret", tenant_id="tenant-id") + client = LogsQueryClient(credential) + with pytest.raises(HttpResponseError) as e: + client.query_workspace(monitor_info["workspace_id"], "AppRequests", timespan=None) diff --git a/sdk/monitor/azure-monitor-querylogs/tests/test_exceptions_async.py b/sdk/monitor/azure-monitor-querylogs/tests/test_exceptions_async.py new file mode 100644 index 000000000000..81c89b4edae4 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/tests/test_exceptions_async.py @@ -0,0 +1,129 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +from datetime import timedelta, datetime + +import pytest + +from azure.identity.aio import ClientSecretCredential +from azure.core.exceptions import HttpResponseError +from azure.monitor.querylogs import ( + LogsBatchQuery, + LogsQueryError, + LogsQueryResult, + LogsQueryPartialResult, + LogsQueryStatus, +) +from azure.monitor.querylogs.aio import LogsQueryClient + +from base_testcase import AzureMonitorQueryLogsTestCase + + +class TestQueryExceptionsAsync(AzureMonitorQueryLogsTestCase): + + @pytest.mark.asyncio + async def test_logs_single_query_fatal_exception(self, recorded_test): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + with pytest.raises(HttpResponseError): + await client.query_workspace("bad_workspace_id", "AppRequests", timespan=None) + + @pytest.mark.asyncio + async def test_logs_single_query_partial_exception_not_allowed(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = await client.query_workspace(monitor_info["workspace_id"], query, timespan=timedelta(days=1)) + assert response.__class__ == LogsQueryPartialResult + assert response.status == LogsQueryStatus.PARTIAL + assert response.partial_error is not None + assert response.partial_error.code == "PartialError" + assert response.partial_error.__class__ == LogsQueryError + + @pytest.mark.asyncio + async def test_logs_resource_query_fatal_exception(self, recorded_test): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + with pytest.raises(HttpResponseError): + await client.query_resource("/bad/resource/id", "AzureActivity", timespan=None) + + @pytest.mark.asyncio + async def test_logs_resource_query_partial_exception(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + # Modified to use workspace query instead of resource query for logs-only package + response = await client.query_workspace(monitor_info["workspace_id"], query, timespan=timedelta(days=1)) + assert response.__class__ == LogsQueryPartialResult + assert response.partial_error is not None + assert response.partial_error.code == "PartialError" + assert response.partial_error.__class__ == LogsQueryError + + @pytest.mark.asyncio + async def test_logs_batch_query_fatal_exception(self, recorded_test, monitor_info): + credential = ClientSecretCredential( + client_id="00000000-0000-0000-0000-000000000000", + client_secret="bad_secret", + tenant_id="00000000-0000-0000-0000-000000000000", + ) + client = self.get_client(LogsQueryClient, credential) + async with client: + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id=monitor_info["workspace_id"], + ), + LogsBatchQuery( + query="""AppRequestsss | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id=monitor_info["workspace_id"], + ), + LogsBatchQuery( + query="""let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id=monitor_info["workspace_id"], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True, + ), + ] + with pytest.raises(HttpResponseError): + await client.query_batch(requests) + + @pytest.mark.live_test_only("Issues recording dynamic 'id' values in requests/responses") + @pytest.mark.asyncio + async def test_logs_batch_query_partial_exception_not_allowed(self, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id=monitor_info["workspace_id"], + ), + LogsBatchQuery( + query="""bad query | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id=monitor_info["workspace_id"], + ), + LogsBatchQuery( + query="""let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id=monitor_info["workspace_id"], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True, + ), + ] + responses = await client.query_batch(requests) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryError + assert r3.__class__ == LogsQueryPartialResult diff --git a/sdk/monitor/azure-monitor-querylogs/tests/test_helpers.py b/sdk/monitor/azure-monitor-querylogs/tests/test_helpers.py new file mode 100644 index 000000000000..13d7052d5905 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/tests/test_helpers.py @@ -0,0 +1,33 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +from datetime import datetime, timedelta +import pytest + +from azure.monitor.querylogs._helpers import get_subscription_id_from_resource + + +def test_get_subscription_id_from_resource(): + assert ( + get_subscription_id_from_resource( + "/subscriptions/00000000-1111-2222-3333-000000000000/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm" + ) + == "00000000-1111-2222-3333-000000000000" + ) + + # Test witout preceding slash + assert ( + get_subscription_id_from_resource( + "subscriptions/00000000-1111-2222-3333-000000000000/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm" + ) + == "00000000-1111-2222-3333-000000000000" + ) + + with pytest.raises(ValueError): + get_subscription_id_from_resource("/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm") + + with pytest.raises(ValueError): + get_subscription_id_from_resource("") diff --git a/sdk/monitor/azure-monitor-querylogs/tests/test_logs_client.py b/sdk/monitor/azure-monitor-querylogs/tests/test_logs_client.py new file mode 100644 index 000000000000..bb87e9bf0fe9 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/tests/test_logs_client.py @@ -0,0 +1,311 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +from datetime import timedelta + +import pytest + +from azure.core.exceptions import HttpResponseError +from azure.monitor.querylogs import ( + LogsQueryClient, + LogsBatchQuery, + LogsQueryError, + LogsTable, + LogsQueryResult, + LogsTableRow, + LogsQueryPartialResult, + LogsQueryStatus, +) +from azure.monitor.querylogs._helpers import native_col_type +from azure.monitor.querylogs._version import VERSION + +from base_testcase import AzureMonitorQueryLogsTestCase + + +class TestLogsClient(AzureMonitorQueryLogsTestCase): + + def test_logs_single_query(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """AppRequests | + where TimeGenerated > ago(12h) | + summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""" + + # returns LogsQueryResult + response = client.query_workspace(monitor_info["workspace_id"], query, timespan=None) + + assert response is not None + assert response.status == LogsQueryStatus.SUCCESS + assert response.tables is not None + + def test_logs_single_query_raises_no_timespan(self, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """AppRequests | + where TimeGenerated > ago(12h) | + summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""" + + # returns LogsQueryResult + with pytest.raises(TypeError): + client.query_workspace(monitor_info["workspace_id"], query) + + def test_logs_single_query_with_non_200(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """AppInsights | + where TimeGenerated > ago(12h)""" + + with pytest.raises(HttpResponseError) as e: + client.query_workspace(monitor_info["workspace_id"], query, timespan=None) + + assert "SemanticError" in e.value.message + + def test_logs_single_query_with_partial_success(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = client.query_workspace(monitor_info["workspace_id"], query, timespan=None) + + assert response.partial_error is not None + assert response.partial_data is not None + assert response.status == LogsQueryStatus.PARTIAL + assert response.__class__ == LogsQueryPartialResult + + def test_logs_server_timeout(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + + try: + response = client.query_workspace( + monitor_info["workspace_id"], + "range x from 1 to 1000000000000000 step 1 | count", + timespan=None, + server_timeout=2, + retry_total=0, + ) + except HttpResponseError as e: + assert "Gateway timeout" in e.message + else: + # Response an be observed as either 504 response code from the gateway or a partial failure 200 response. + assert response.status == LogsQueryStatus.PARTIAL + assert "timed out" in str(response.partial_error) + + @pytest.mark.live_test_only("Issues recording dynamic 'id' values in requests/responses") + def test_logs_query_batch_default(self, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + + requests = [ + LogsBatchQuery( + workspace_id=monitor_info["workspace_id"], + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + ), + LogsBatchQuery( + workspace_id=monitor_info["workspace_id"], + query="""AppRequests | take 10 | + summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""", + timespan=timedelta(hours=1), + ), + LogsBatchQuery(workspace_id=monitor_info["workspace_id"], query="Wrong query | take 2", timespan=None), + ] + response = client.query_batch(requests) + + assert len(response) == 3 + + r0 = response[0] + assert r0.tables[0].columns == ["count_"] + r1 = response[1] + assert r1.tables[0].columns[0] == "TimeGenerated" + assert r1.tables[0].columns[1] == "_ResourceId" + assert r1.tables[0].columns[2] == "avgRequestDuration" + r2 = response[2] + assert r2.__class__ == LogsQueryError + + def test_logs_single_query_with_statistics(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """AppRequests | take 10""" + + # returns LogsQueryResult + response = client.query_workspace(monitor_info["workspace_id"], query, timespan=None, include_statistics=True) + + assert response.statistics is not None + + def test_logs_single_query_with_visualization(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """AppRequests | take 10""" + + # returns LogsQueryResult + response = client.query_workspace( + monitor_info["workspace_id"], query, timespan=None, include_visualization=True + ) + + assert response.visualization is not None + + def test_logs_single_query_with_visualization_and_stats(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """AppRequests | take 10""" + + # returns LogsQueryResult + response = client.query_workspace( + monitor_info["workspace_id"], query, timespan=None, include_visualization=True, include_statistics=True + ) + + assert response.visualization is not None + assert response.statistics is not None + + @pytest.mark.live_test_only("Issues recording dynamic 'id' values in requests/responses") + def test_logs_query_batch_with_statistics_in_some(self, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + + requests = [ + LogsBatchQuery( + workspace_id=monitor_info["workspace_id"], + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + ), + LogsBatchQuery( + workspace_id=monitor_info["workspace_id"], + query="""AppRequests| + summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""", + timespan=timedelta(hours=1), + include_statistics=True, + ), + LogsBatchQuery( + workspace_id=monitor_info["workspace_id"], query="AppRequests", timespan=None, include_statistics=True + ), + ] + response = client.query_batch(requests) + + assert len(response) == 3 + assert response[0].statistics is None + assert response[2].statistics is not None + + def test_logs_single_query_additional_workspaces(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = ( + f"{monitor_info['table_name']} | where TimeGenerated > ago(100d)" + "| project TenantId | summarize count() by TenantId" + ) + # returns LogsQueryResult + response = client.query_workspace( + monitor_info["workspace_id"], + query, + timespan=None, + additional_workspaces=[monitor_info["secondary_workspace_id"]], + ) + + assert response is not None + assert len(response.tables[0].rows) == 2 + + @pytest.mark.live_test_only("Issues recording dynamic 'id' values in requests/responses") + def test_logs_query_batch_additional_workspaces(self, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = ( + f"{monitor_info['table_name']} | where TimeGenerated > ago(100d)" + "| project TenantId | summarize count() by TenantId" + ) + requests = [ + LogsBatchQuery( + monitor_info["workspace_id"], + query, + timespan=None, + additional_workspaces=[monitor_info["secondary_workspace_id"]], + ), + LogsBatchQuery( + monitor_info["workspace_id"], + query, + timespan=None, + additional_workspaces=[monitor_info["secondary_workspace_id"]], + ), + LogsBatchQuery( + monitor_info["workspace_id"], + query, + timespan=None, + additional_workspaces=[monitor_info["secondary_workspace_id"]], + ), + ] + response = client.query_batch(requests) + for resp in response: + assert len(resp.tables[0].rows) == 2 + + def test_logs_query_result_iterate_over_tables(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + + query = "AppRequests | take 10; AppRequests | take 5" + + response = client.query_workspace( + monitor_info["workspace_id"], query, timespan=None, include_statistics=True, include_visualization=True + ) + + ## should iterate over tables + for item in response: + assert item.__class__ == LogsTable + + assert response.statistics is not None + assert response.visualization is not None + assert len(response.tables) == 2 + assert response.__class__ == LogsQueryResult + + def test_logs_query_result_row_type(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + + query = "AppRequests | take 5" + + response = client.query_workspace( + monitor_info["workspace_id"], + query, + timespan=None, + ) + + ## should iterate over tables + for table in response: + assert table.__class__ == LogsTable + + for row in table.rows: + assert row.__class__ == LogsTableRow + + def test_native_col_type(self): + val = native_col_type("datetime", None) + assert val is None + + val = native_col_type("datetime", "2020-10-10") + assert val is not None + + def test_logs_resource_query(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = "requests | summarize count()" + + # Note: This test would need a resource ID for logs resource query + # For now, we'll skip this test or modify it to test workspace query instead + response = client.query_workspace( + monitor_info["workspace_id"], "AppRequests | summarize count()", timespan=None + ) + + assert response is not None + assert response.tables is not None + + def test_logs_resource_query_additional_options(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = "AppRequests | summarize count()" + + response = client.query_workspace( + monitor_info["workspace_id"], + query, + timespan=None, + include_statistics=True, + include_visualization=True, + ) + + assert response.visualization is not None + assert response.statistics is not None + + def test_client_different_endpoint(self): + credential = self.get_credential(LogsQueryClient) + endpoint = "https://api.loganalytics.azure.cn/v1" + client = LogsQueryClient(credential, endpoint=endpoint) + + assert client._endpoint == endpoint + assert "https://api.loganalytics.azure.cn/.default" in client._client._config.authentication_policy._scopes + + def test_client_user_agent(self): + client: LogsQueryClient = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + assert f"monitor-querylogs/{VERSION}" in client._client._config.user_agent_policy.user_agent diff --git a/sdk/monitor/azure-monitor-querylogs/tests/test_logs_client_async.py b/sdk/monitor/azure-monitor-querylogs/tests/test_logs_client_async.py new file mode 100644 index 000000000000..7b89ff0491fd --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/tests/test_logs_client_async.py @@ -0,0 +1,269 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +from datetime import timedelta + +import pytest + +from azure.core.exceptions import HttpResponseError +from azure.monitor.querylogs import ( + LogsBatchQuery, + LogsQueryError, + LogsTable, + LogsQueryResult, + LogsTableRow, + LogsQueryStatus, +) +from azure.monitor.querylogs.aio import LogsQueryClient +from azure.monitor.querylogs._version import VERSION + +from base_testcase import AzureMonitorQueryLogsTestCase + + +class TestLogsClientAsync(AzureMonitorQueryLogsTestCase): + + @pytest.mark.asyncio + async def test_logs_auth(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = """AppRequests | + where TimeGenerated > ago(12h) | + summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""" + + # returns LogsQueryResult + response = await client.query_workspace(monitor_info["workspace_id"], query, timespan=None) + + assert response is not None + assert response.status == LogsQueryStatus.SUCCESS + assert response.tables is not None + + @pytest.mark.asyncio + async def test_logs_auth_no_timespan(self, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = """AppRequests | + where TimeGenerated > ago(12h) | + summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""" + + # returns LogsQueryResult + with pytest.raises(TypeError): + await client.query_workspace(monitor_info["workspace_id"], query) + + @pytest.mark.asyncio + async def test_logs_server_timeout(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + + try: + async with client: + response = await client.query_workspace( + monitor_info["workspace_id"], + "range x from 1 to 1000000000000000 step 1 | count", + timespan=None, + server_timeout=2, + retry_total=0, + ) + except HttpResponseError as e: + assert "Gateway timeout" in e.message + else: + # Response an be observed as either 504 response code from the gateway or a partial failure 200 response. + assert response.status == LogsQueryStatus.PARTIAL + assert "timed out" in str(response.partial_error) + + @pytest.mark.asyncio + async def test_logs_query_batch_raises_on_no_timespan(self, monitor_info): + with pytest.raises(TypeError): + LogsBatchQuery( + workspace_id=monitor_info["workspace_id"], + query="AzureActivity | summarize count()", + ) + + @pytest.mark.live_test_only("Issues recording dynamic 'id' values in requests/responses") + @pytest.mark.asyncio + async def test_logs_query_batch_default(self, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + requests = [ + LogsBatchQuery( + monitor_info["workspace_id"], query="AzureActivity | summarize count()", timespan=timedelta(hours=1) + ), + LogsBatchQuery( + monitor_info["workspace_id"], + query="""AppRequests | take 10 | + summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""", + timespan=timedelta(hours=1), + ), + LogsBatchQuery(monitor_info["workspace_id"], query="Wrong query | take 2", timespan=None), + ] + response = await client.query_batch(requests) + + assert len(response) == 3 + r0 = response[0] + assert r0.tables[0].columns == ["count_"] + r1 = response[1] + assert r1.tables[0].columns[0] == "TimeGenerated" + assert r1.tables[0].columns[1] == "_ResourceId" + assert r1.tables[0].columns[2] == "avgRequestDuration" + r2 = response[2] + assert r2.__class__ == LogsQueryError + + @pytest.mark.asyncio + async def test_logs_single_query_additional_workspaces_async(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = ( + f"{monitor_info['table_name']} | where TimeGenerated > ago(100d)" + "| project TenantId | summarize count() by TenantId" + ) + + # returns LogsQueryResult + response = await client.query_workspace( + monitor_info["workspace_id"], + query, + timespan=None, + additional_workspaces=[monitor_info["secondary_workspace_id"]], + ) + + assert response + assert len(response.tables[0].rows) == 2 + + @pytest.mark.live_test_only("Issues recording dynamic 'id' values in requests/responses") + @pytest.mark.asyncio + async def test_logs_query_batch_additional_workspaces(self, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = ( + f"{monitor_info['table_name']} | where TimeGenerated > ago(100d)" + "| project TenantId | summarize count() by TenantId" + ) + requests = [ + LogsBatchQuery( + monitor_info["workspace_id"], + query, + timespan=None, + additional_workspaces=[monitor_info["secondary_workspace_id"]], + ), + LogsBatchQuery( + monitor_info["workspace_id"], + query, + timespan=None, + additional_workspaces=[monitor_info["secondary_workspace_id"]], + ), + LogsBatchQuery( + monitor_info["workspace_id"], + query, + timespan=None, + additional_workspaces=[monitor_info["secondary_workspace_id"]], + ), + ] + response = await client.query_batch(requests) + + assert len(response) == 3 + for resp in response: + assert len(resp.tables[0].rows) == 2 + + @pytest.mark.asyncio + async def test_logs_single_query_with_visualization(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = """AppRequests | take 10""" + + # returns LogsQueryResult + response = await client.query_workspace( + monitor_info["workspace_id"], query, timespan=None, include_visualization=True + ) + + assert response.visualization is not None + + @pytest.mark.asyncio + async def test_logs_single_query_with_visualization_and_stats(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = """AppRequests | take 10""" + # returns LogsQueryResult + response = await client.query_workspace( + monitor_info["workspace_id"], query, timespan=None, include_visualization=True, include_statistics=True + ) + + assert response.visualization is not None + assert response.statistics is not None + + @pytest.mark.asyncio + async def test_logs_query_result_iterate_over_tables(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = "AppRequests | take 10; AppRequests | take 5" + response = await client.query_workspace( + monitor_info["workspace_id"], query, timespan=None, include_statistics=True, include_visualization=True + ) + + ## should iterate over tables + for item in response: + assert item.__class__ == LogsTable + + assert response.statistics is not None + assert response.visualization is not None + assert len(response.tables) == 2 + assert response.__class__ == LogsQueryResult + + @pytest.mark.asyncio + async def test_logs_query_result_row_type(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = "AppRequests | take 5" + response = await client.query_workspace( + monitor_info["workspace_id"], + query, + timespan=None, + ) + + ## should iterate over tables + for table in response: + assert table.__class__ == LogsTable + + for row in table.rows: + assert row.__class__ == LogsTableRow + + @pytest.mark.asyncio + async def test_logs_resource_query(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = "AppRequests | summarize count()" + + # Modified to use workspace query instead of resource query for logs-only package + response = await client.query_workspace(monitor_info["workspace_id"], query, timespan=None) + + assert response is not None + assert response.tables is not None + + @pytest.mark.asyncio + async def test_logs_resource_query_additional_options(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + query = "AppRequests | summarize count()" + + response = await client.query_workspace( + monitor_info["workspace_id"], + query, + timespan=None, + include_statistics=True, + include_visualization=True, + ) + + assert response.visualization is not None + assert response.statistics is not None + + @pytest.mark.asyncio + async def test_client_different_endpoint(self): + credential = self.get_credential(LogsQueryClient, is_async=True) + endpoint = "https://api.loganalytics.azure.cn/v1" + client = LogsQueryClient(credential, endpoint=endpoint) + + assert client._endpoint == endpoint + + @pytest.mark.asyncio + async def test_client_user_agent(self): + client: LogsQueryClient = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) + async with client: + assert f"monitor-querylogs/{VERSION}" in client._client._config.user_agent_policy.user_agent diff --git a/sdk/monitor/azure-monitor-querylogs/tests/test_logs_response.py b/sdk/monitor/azure-monitor-querylogs/tests/test_logs_response.py new file mode 100644 index 000000000000..45b7ab108775 --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/tests/test_logs_response.py @@ -0,0 +1,84 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +# cspell:ignore toint +from datetime import datetime, timezone + +from azure.monitor.querylogs import LogsQueryClient + +from base_testcase import AzureMonitorQueryLogsTestCase + + +class TestLogsResponse(AzureMonitorQueryLogsTestCase): + + def test_query_response_data(self, recorded_test, monitor_info): + # Sample log entry that is populated in table before test. + # { + # "Time": "2022-11-07T01:03:07.584426Z", + # "Computer": "Computer1", + # "AdditionalContext": '{"testContextKey": 1, "CounterName": "AppMetric1"}}' + # } + + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = ( + f"{monitor_info['table_name']} | project TimeGenerated, Type, ExtendedColumn, AdditionalContext" + f"| order by TimeGenerated desc | take 5" + ) + + # returns LogsQueryResult + result = client.query_workspace(monitor_info["workspace_id"], query, timespan=None) + assert isinstance(result.tables[0].rows[0][0], datetime) + + assert isinstance(result.tables[0].rows[0][1], str) + assert result.tables[0].rows[0][1] == monitor_info["table_name"] + + assert isinstance(result.tables[0].rows[0][2], str) + # Check if DCR transformation correctly populated the ExtendedColumn field. + assert "AppMetric" in result.tables[0].rows[0][2] + + assert isinstance(result.tables[0].rows[0][3], str) + assert "testContextKey" in result.tables[0].rows[0][3] + + def test_query_response_types(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """print "hello", true, make_datetime("2000-01-02 03:04:05Z"), toint(100), long(101), 102.1 + | project + stringcolumn=print_0, + boolcolumn=print_1, + datecolumn=print_2, + intcolumn=print_3, + longcolumn=print_4, + realcolumn=print_5 + """ + + result = client.query_workspace(monitor_info["workspace_id"], query, timespan=None) + + table = result.tables[0] + columns = table.columns + row = table.rows[0] + + assert columns[0] == "stringcolumn" + assert isinstance(row[0], str) + assert row[0] == "hello" + + assert columns[1] == "boolcolumn" + assert isinstance(row[1], bool) + assert row[1] is True + + assert columns[2] == "datecolumn" + assert isinstance(row[2], datetime) + assert row[2] == datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) + + assert columns[3] == "intcolumn" + assert isinstance(row[3], int) + assert row[3] == 100 + + assert columns[4] == "longcolumn" + assert isinstance(row[4], int) + assert row[4] == 101 + + assert columns[5] == "realcolumn" + assert isinstance(row[5], float) + assert row[5] == 102.1 diff --git a/sdk/monitor/azure-monitor-querylogs/tests/test_logs_timespans.py b/sdk/monitor/azure-monitor-querylogs/tests/test_logs_timespans.py new file mode 100644 index 000000000000..e5abd94a361a --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/tests/test_logs_timespans.py @@ -0,0 +1,102 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +from datetime import datetime, timedelta, timezone +import json + +import pytest + +from azure.monitor.querylogs import LogsQueryClient +from azure.monitor.querylogs._helpers import construct_iso8601 + +from base_testcase import AzureMonitorQueryLogsTestCase + + +class TestLogsTimespans(AzureMonitorQueryLogsTestCase): + + def test_query_no_duration(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = """AppRequests | + where TimeGenerated > ago(12h) | + summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""" + + def callback(request): + dic = json.loads(request.http_request.body) + assert dic.get("timespan") is None + + # returns LogsQueryResult + client.query_workspace(monitor_info["workspace_id"], query, timespan=None) + + def test_query_start_and_end_time(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = "AppRequests | take 5" + + end_time = datetime(2022, 11, 8) + start_time = end_time - timedelta(days=3) + + def callback(request): + dic = json.loads(request.http_request.body) + assert dic.get("timespan") is not None + + client.query_workspace( + monitor_info["workspace_id"], query, timespan=(start_time, end_time), raw_request_hook=callback + ) + + def test_query_duration_and_start_time(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = "AppRequests | take 5" + + end_time = datetime(2022, 11, 8) + start_time = end_time - timedelta(days=3) + duration = timedelta(days=3) + + def callback(request): + dic = json.loads(request.http_request.body) + assert "/PT259200.0S" in dic.get("timespan") + + client.query_workspace( + monitor_info["workspace_id"], query, timespan=(start_time, duration), raw_request_hook=callback + ) + + def test_query_duration_only(self, recorded_test, monitor_info): + client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) + query = "AppRequests | take 5" + + duration = timedelta(days=3) + + def callback(request): + dic = json.loads(request.http_request.body) + assert "PT259200.0S" in dic.get("timespan") + + client.query_workspace(monitor_info["workspace_id"], query, timespan=duration, raw_request_hook=callback) + + def test_duration_to_iso8601(self): + d1 = timedelta(days=1) + d2 = timedelta(weeks=1) + d3 = timedelta(weeks=3, days=4) + d4 = timedelta(seconds=10) + d5 = timedelta(microseconds=1000) + d6 = timedelta(milliseconds=100000) + d7 = timedelta(hours=24, days=1) + + assert construct_iso8601(timespan=d1) == "PT86400.0S" + assert construct_iso8601(timespan=d2) == "PT604800.0S" + assert construct_iso8601(timespan=d3) == "PT2160000.0S" + assert construct_iso8601(timespan=d4) == "PT10.0S" + assert construct_iso8601(timespan=d5) == "PT0.001S" + assert construct_iso8601(timespan=d5) == "PT0.001S" + assert construct_iso8601(timespan=d7) == "PT172800.0S" + + with pytest.raises(ValueError, match="timespan must be a timedelta or a tuple."): + construct_iso8601(timespan=(datetime.now(timezone.utc))) + + def test_iso8601_start_end(self): + start = datetime(2022, 11, 7, 1, 3, 7, 584426, tzinfo=timezone.utc) + end = datetime(2022, 11, 8, 1, 3, 7, 584426, tzinfo=timezone.utc) + duration = timedelta(days=1) + + assert construct_iso8601(timespan=(start, end)) == "2022-11-07T01:03:07.584426Z/2022-11-08T01:03:07.584426Z" + assert construct_iso8601(timespan=(start, duration)) == "2022-11-07T01:03:07.584426Z/PT86400.0S" + assert construct_iso8601(timespan=duration) == "PT86400.0S" diff --git a/sdk/monitor/azure-monitor-querylogs/tsp-location.yaml b/sdk/monitor/azure-monitor-querylogs/tsp-location.yaml new file mode 100644 index 000000000000..910700a83d2e --- /dev/null +++ b/sdk/monitor/azure-monitor-querylogs/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/monitor/Monitor.Query.Logs +commit: c4d7c4945950f5a18c003bce3ff30284b86e12dd +repo: Azure/azure-rest-api-specs +additionalDirectories: diff --git a/sdk/monitor/ci.yml b/sdk/monitor/ci.yml index 127e01083b28..3b1225558869 100644 --- a/sdk/monitor/ci.yml +++ b/sdk/monitor/ci.yml @@ -39,3 +39,5 @@ extends: safeName: azuremonitorquery - name: azure-monitor-ingestion safeName: azuremonitoringestion + - name: azure-monitor-querylogs + safeName: azuremonitorquerylogs