diff --git a/sdk/monitor/azure-monitor-query/CHANGELOG.md b/sdk/monitor/azure-monitor-query/CHANGELOG.md index ba96cc2ea4f4..70c9d0b31bfb 100644 --- a/sdk/monitor/azure-monitor-query/CHANGELOG.md +++ b/sdk/monitor/azure-monitor-query/CHANGELOG.md @@ -4,8 +4,14 @@ ### Features Added +- Added `QueryPartialErrorException` and `LogsQueryError` to handle errors. +- Added `partial_error` and `is_error` attributes to `LogsQueryResult`. +- Added an option `allow_partial_errors` that defaults to False, which can be set to not throw if there are any partial errors. + ### Breaking Changes +- `LogsQueryResult` now iterates over the tables directly as a convinience. + ### Bugs Fixed ### Other Changes diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py index 9043ee10d44c..d33a6f0a5c28 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py @@ -7,6 +7,11 @@ from ._logs_query_client import LogsQueryClient from ._metrics_query_client import MetricsQueryClient +from ._exceptions import ( + LogsQueryError, + QueryPartialErrorException +) + from ._models import ( MetricAggregationType, LogsQueryResult, @@ -30,6 +35,8 @@ "MetricAggregationType", "LogsQueryClient", "LogsQueryResult", + "LogsQueryError", + "QueryPartialErrorException", "LogsTable", "LogsBatchQuery", "MetricsQueryClient", diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py new file mode 100644 index 000000000000..f849f93ff6ee --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_exceptions.py @@ -0,0 +1,79 @@ +# +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from azure.core.exceptions import HttpResponseError + +class LogsQueryError(object): + """The code and message for an error. + + All required parameters must be populated in order to send to Azure. + + :ivar code: A machine readable error code. + :vartype code: str + :ivar message: A human readable error message. + :vartype message: str + :ivar details: error details. + :vartype details: list[~monitor_query_client.models.ErrorDetail] + :ivar innererror: Inner error details if they exist. + :vartype innererror: ~azure.monitor.query.LogsQueryError + :ivar additional_properties: Additional properties that can be provided on the error info + object. + :vartype additional_properties: object + :ivar bool is_error: Boolean check for error item when iterating over list of + results. Always True for an instance of a LogsQueryError. + """ + def __init__( + self, + **kwargs + ): + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.details = kwargs.get('details', None) + self.innererror = kwargs.get('innererror', None) + self.additional_properties = kwargs.get('additional_properties', None) + self.is_error = True + + @classmethod + def _from_generated(cls, generated): + if not generated: + return None + details = None + if generated.details is not None: + details = [d.serialize() for d in generated.details] + return cls( + code=generated.code, + message=generated.message, + innererror=cls._from_generated(generated.innererror) if generated.innererror else None, + additional_properties=generated.additional_properties, + details=details, + ) + +class QueryPartialErrorException(HttpResponseError): + """There is a partial failure in query operation. This is thrown for a single query operation + when allow_partial_errors is set to False. + + :ivar code: A machine readable error code. + :vartype code: str + :ivar message: A human readable error message. + :vartype message: str + :ivar details: error details. + :vartype details: list[~monitor_query_client.models.ErrorDetail] + :ivar innererror: Inner error details if they exist. + :vartype innererror: ~azure.monitor.query.LogsQueryError + :ivar additional_properties: Additional properties that can be provided on the error info + object. + :vartype additional_properties: object + """ + + def __init__(self, **kwargs): + error = kwargs.pop('error', None) + if error: + self.code = error.code + self.message = error.message + self.details = [d.serialize() for d in error.details] if error.details else None + self.innererror = LogsQueryError._from_generated(error.innererror) if error.innererror else None + self.additional_properties = error.additional_properties + super(QueryPartialErrorException, self).__init__(message=self.message) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py index 8b3194f3dc14..5adf5bc095d9 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py @@ -41,13 +41,21 @@ def get_metrics_authentication_policy( raise TypeError("Unsupported credential") -def process_error(exception): - raise_error = HttpResponseError - raise raise_error(message=exception.message, response=exception.response) - -def order_results(request_order, mapping, obj): +def order_results(request_order, mapping, obj, err, allow_partial_errors=False): ordered = [mapping[id] for id in request_order] - return [obj._from_generated(rsp) for rsp in ordered] # pylint: disable=protected-access + results = [] + for item in ordered: + if not item.body.error: + results.append(obj._from_generated(item.body)) # pylint: disable=protected-access + else: + error = item.body.error + if allow_partial_errors and error.code == 'PartialError': + res = obj._from_generated(item.body) # pylint: disable=protected-access + res.partial_error = err._from_generated(error) # pylint: disable=protected-access + results.append(res) + else: + results.append(err._from_generated(error)) # pylint: disable=protected-access + return results def construct_iso8601(timespan=None): if not timespan: @@ -90,3 +98,23 @@ def native_col_type(col_type, value): def process_row(col_types, row): return [native_col_type(col_types[ind], val) for ind, val in enumerate(row)] + +def process_error(error, model): + try: + model = model._from_generated(error.model.error) # pylint: disable=protected-access + except AttributeError: # model can be none + pass + raise HttpResponseError( + message=error.message, + response=error.response, + model=model) + +def process_prefer(server_timeout, include_statistics, include_visualization): + prefer = "" + if server_timeout: + prefer += "wait=" + str(server_timeout) + "," + if include_statistics: + prefer += "include-statistics=true," + if include_visualization: + prefer += "include-render=true" + return prefer.rstrip(",") diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py index b555985d44aa..ccb68fdd6414 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py @@ -12,8 +12,9 @@ from ._generated._monitor_query_client import MonitorQueryClient from ._generated.models import BatchRequest, QueryBody as LogsQueryBody -from ._helpers import get_authentication_policy, process_error, construct_iso8601, order_results +from ._helpers import get_authentication_policy, construct_iso8601, order_results, process_error, process_prefer from ._models import LogsBatchQuery, LogsQueryResult +from ._exceptions import LogsQueryError, QueryPartialErrorException if TYPE_CHECKING: from azure.core.credentials import TokenCredential @@ -76,6 +77,8 @@ def query(self, workspace_id, query, **kwargs): :keyword additional_workspaces: A list of workspaces that are included in the query. These can be qualified workspace names, workspace Ids, or Azure resource Ids. :paramtype additional_workspaces: list[str] + :keyword allow_partial_errors: Defaults to False. If set to true, partial errors are not thrown. + :paramtype allow_partial_errors: bool :return: LogsQueryResult, or the result of cls(response) :rtype: ~azure.monitor.query.LogsQueryResult :raises: ~azure.core.exceptions.HttpResponseError @@ -89,6 +92,7 @@ def query(self, workspace_id, query, **kwargs): :dedent: 0 :caption: Get a response for a single Log Query """ + allow_partial_errors = kwargs.pop('allow_partial_errors', False) if 'timespan' not in kwargs: raise TypeError("query() missing 1 required keyword-only argument: 'timespan'") timespan = construct_iso8601(kwargs.pop('timespan')) @@ -97,17 +101,7 @@ def query(self, workspace_id, query, **kwargs): server_timeout = kwargs.pop("server_timeout", None) workspaces = kwargs.pop("additional_workspaces", None) - prefer = "" - if server_timeout: - prefer += "wait=" + str(server_timeout) - if include_statistics: - if len(prefer) > 0: - prefer += "," - prefer += "include-statistics=true" - if include_visualization: - if len(prefer) > 0: - prefer += "," - prefer += "include-render=true" + prefer = process_prefer(server_timeout, include_statistics, include_visualization) body = LogsQueryBody( query=query, @@ -117,14 +111,23 @@ def query(self, workspace_id, query, **kwargs): ) try: - return LogsQueryResult._from_generated(self._query_op.execute( # pylint: disable=protected-access + generated_response = self._query_op.execute( # pylint: disable=protected-access workspace_id=workspace_id, body=body, prefer=prefer, **kwargs - )) - except HttpResponseError as e: - process_error(e) + ) + except HttpResponseError as err: + process_error(err, LogsQueryError) + response = LogsQueryResult._from_generated(generated_response) # pylint: disable=protected-access + if not generated_response.error: + return response + if not allow_partial_errors: + raise QueryPartialErrorException(error=generated_response.error) + response.partial_error = LogsQueryError._from_generated( # pylint: disable=protected-access + generated_response.error + ) + return response @distributed_trace def query_batch(self, queries, **kwargs): @@ -136,6 +139,9 @@ def query_batch(self, queries, **kwargs): :param queries: The list of Kusto queries to execute. :type queries: list[dict] or list[~azure.monitor.query.LogsBatchQuery] + :keyword bool allow_partial_errors: If set to True, a `LogsQueryResult` object is returned + when a partial error occurs. The error can be accessed using the `partial_error` + attribute in the object. :return: List of LogsQueryResult, or the result of cls(response) :rtype: list[~azure.monitor.query.LogsQueryResult] :raises: ~azure.core.exceptions.HttpResponseError @@ -149,6 +155,7 @@ def query_batch(self, queries, **kwargs): :dedent: 0 :caption: Get a response for multiple Log Queries. """ + allow_partial_errors = kwargs.pop('allow_partial_errors', False) try: queries = [LogsBatchQuery(**q) for q in queries] except (KeyError, TypeError): @@ -161,7 +168,12 @@ def query_batch(self, queries, **kwargs): batch = BatchRequest(requests=queries) generated = self._query_op.batch(batch, **kwargs) mapping = {item.id: item for item in generated.responses} - return order_results(request_order, mapping, LogsQueryResult) + return order_results( + request_order, + mapping, + LogsQueryResult, + LogsQueryError, + allow_partial_errors) def close(self): # type: () -> None diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py index 84e0a05f4cd1..4532f1ab468e 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py @@ -165,21 +165,27 @@ class LogsQueryResult(object): :ivar visualization: This will include a visualization property in the response that specifies the type of visualization selected by the query and any properties for that visualization. :vartype visualization: object - :ivar error: Any error info. - :vartype error: ~azure.core.exceptions.HttpResponseError + :ivar partial_error: Any error info. This is none except in the case where `allow_partial_errors` + is explicitly set to True. + :vartype partial_error: ~azure.core.exceptions.HttpResponseError + :ivar bool is_error: Boolean check for error item when iterating over list of + results. Always False for an instance of a LogsQueryResult. """ def __init__( self, **kwargs ): self.tables = kwargs.get('tables', None) - self.error = kwargs.get('error', None) + self.partial_error = None self.statistics = kwargs.get('statistics', None) self.visualization = kwargs.get('visualization', None) + self.is_error = False + + def __iter__(self): + return iter(self.tables) @classmethod def _from_generated(cls, generated): - if not generated: return cls() tables = None @@ -195,7 +201,6 @@ def _from_generated(cls, generated): tables=tables, statistics=generated.statistics, visualization=generated.render, - error=generated.error ) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py index 090fcceb9ed8..e31806a110fa 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py @@ -7,15 +7,16 @@ from datetime import datetime, timedelta from typing import Any, Tuple, Union, Sequence, Dict, List, TYPE_CHECKING -from azure.core.exceptions import HttpResponseError from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.exceptions import HttpResponseError from .._generated.aio._monitor_query_client import MonitorQueryClient from .._generated.models import BatchRequest, QueryBody as LogsQueryBody -from .._helpers import process_error, construct_iso8601, order_results +from .._helpers import construct_iso8601, order_results, process_error, process_prefer from .._models import LogsQueryResult, LogsBatchQuery from ._helpers_asyc import get_authentication_policy +from .._exceptions import LogsQueryError, QueryPartialErrorException if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -75,23 +76,14 @@ async def query( :rtype: ~azure.monitor.query.LogsQueryResult :raises: ~azure.core.exceptions.HttpResponseError """ + allow_partial_errors = kwargs.pop('allow_partial_errors', False) timespan = construct_iso8601(timespan) include_statistics = kwargs.pop("include_statistics", False) include_visualization = kwargs.pop("include_visualization", False) server_timeout = kwargs.pop("server_timeout", None) additional_workspaces = kwargs.pop("additional_workspaces", None) - prefer = "" - if server_timeout: - prefer += "wait=" + str(server_timeout) - if include_statistics: - if len(prefer) > 0: - prefer += "," - prefer += "include-statistics=true" - if include_visualization: - if len(prefer) > 0: - prefer += "," - prefer += "include-render=true" + prefer = process_prefer(server_timeout, include_statistics, include_visualization) body = LogsQueryBody( query=query, @@ -101,14 +93,23 @@ async def query( ) try: - return LogsQueryResult._from_generated(await self._query_op.execute( # pylint: disable=protected-access + generated_response = await self._query_op.execute( # pylint: disable=protected-access workspace_id=workspace_id, body=body, prefer=prefer, **kwargs - )) - except HttpResponseError as e: - process_error(e) + ) + except HttpResponseError as err: + process_error(err, LogsQueryError) + response = LogsQueryResult._from_generated(generated_response) # pylint: disable=protected-access + if not generated_response.error: + return response + if not allow_partial_errors: + raise QueryPartialErrorException(error=generated_response.error) + response.partial_error = LogsQueryError._from_generated( # pylint: disable=protected-access + generated_response.error + ) + return response @distributed_trace_async async def query_batch( @@ -123,10 +124,14 @@ async def query_batch( :param queries: The list of Kusto queries to execute. :type queries: list[dict] or list[~azure.monitor.query.LogsBatchQuery] + :keyword bool allow_partial_errors: If set to True, a `LogsQueryResult` object is returned + when a partial error occurs. The error can be accessed using the `partial_error` + attribute in the object. :return: list of LogsQueryResult objects, or the result of cls(response) :rtype: list[~azure.monitor.query.LogsQueryResult] :raises: ~azure.core.exceptions.HttpResponseError """ + allow_partial_errors = kwargs.pop('allow_partial_errors', False) try: queries = [LogsBatchQuery(**q) for q in queries] except (KeyError, TypeError): @@ -139,7 +144,12 @@ async def query_batch( batch = BatchRequest(requests=queries) generated = await self._query_op.batch(batch, **kwargs) mapping = {item.id: item for item in generated.responses} - return order_results(request_order, mapping, LogsQueryResult) + return order_results( + request_order, + mapping, + LogsQueryResult, + LogsQueryError, + allow_partial_errors) async def __aenter__(self) -> "LogsQueryClient": await self._client.__aenter__() diff --git a/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py b/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py index c0838643dce4..43444c369c3c 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py @@ -20,27 +20,30 @@ workspace_id= os.environ['LOG_WORKSPACE_ID'] ), LogsBatchQuery( - query= """AppRequests | take 10 | - summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""", - timespan=(datetime(2021, 6, 2), timedelta(hours=1)), + query= """AppRequestsss | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), workspace_id= os.environ['LOG_WORKSPACE_ID'] ), LogsBatchQuery( - query= "AppRequests | take 5", + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", workspace_id= os.environ['LOG_WORKSPACE_ID'], timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), include_statistics=True ), ] -responses = client.query_batch(requests) +responses = client.query_batch(requests, allow_partial_errors=False) for response in responses: - try: + if not response.is_error: table = response.tables[0] df = pd.DataFrame(table.rows, columns=table.columns) print(df) print("\n\n-------------------------\n\n") - except TypeError: - print(response.error.innererror) + else: + error = response + print(error.message) + # [END send_query_batch] \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-query/samples/sample_log_query_client.py b/sdk/monitor/azure-monitor-query/samples/sample_log_query_client.py index 917f6a16e698..5bda7d0e0f3a 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_log_query_client.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_log_query_client.py @@ -4,7 +4,8 @@ import os import pandas as pd from datetime import timedelta -from azure.monitor.query import LogsQueryClient +from azure.monitor.query import LogsQueryClient, QueryPartialErrorException +from azure.core.exceptions import HttpResponseError from azure.identity import DefaultAzureCredential # [START client_auth_with_token_cred] @@ -16,21 +17,20 @@ # Response time trend # request duration over the last 12 hours. # [START send_logs_query] -query = """AppRequests | -summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""" +query = """AppRwequests | take 5""" # returns LogsQueryResult -response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1)) +try: + response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1)) + for table in response: + print(table) +except QueryPartialErrorException as err: + print("this is a partial error") + print(err.details) +except HttpResponseError as err: + print("something fatal happened") + print (err) -if not response.tables: - print("No results for the query") - -for table in response.tables: - try: - df = pd.DataFrame(table.rows, columns=table.columns) - print(df) - except TypeError: - print(response.error) # [END send_logs_query] """ TimeGenerated _ResourceId avgRequestDuration diff --git a/sdk/monitor/azure-monitor-query/tests/async/test_exceptions_async.py b/sdk/monitor/azure-monitor-query/tests/async/test_exceptions_async.py new file mode 100644 index 000000000000..cf46756a6ddf --- /dev/null +++ b/sdk/monitor/azure-monitor-query/tests/async/test_exceptions_async.py @@ -0,0 +1,171 @@ +from datetime import timedelta, datetime +import pytest +import os +from azure.identity.aio import ClientSecretCredential +from azure.core.exceptions import HttpResponseError +from azure.monitor.query import LogsBatchQuery, LogsQueryError,LogsQueryResult, QueryPartialErrorException +from azure.monitor.query.aio import LogsQueryClient + +def _credential(): + credential = ClientSecretCredential( + client_id = os.environ['AZURE_CLIENT_ID'], + client_secret = os.environ['AZURE_CLIENT_SECRET'], + tenant_id = os.environ['AZURE_TENANT_ID'] + ) + return credential + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_single_query_fatal_exception(): + credential = _credential() + client = LogsQueryClient(credential) + with pytest.raises(HttpResponseError): + await client.query('bad_workspace_id', 'AppRequests', timespan=None) + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_single_query_partial_exception_not_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + with pytest.raises(QueryPartialErrorException) as err: + await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1)) + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_single_query_partial_exception_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = await client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1), allow_partial_errors=True) + assert response.partial_error is not None + assert response.partial_error.code == 'PartialError' + assert response.partial_error.__class__ == LogsQueryError + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_batch_query_fatal_exception(): + credential = ClientSecretCredential( + client_id = os.environ['AZURE_CLIENT_ID'], + client_secret = 'bad_secret', + tenant_id = os.environ['AZURE_TENANT_ID'] + ) + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequestsss | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + with pytest.raises(HttpResponseError): + await client.query_batch(requests, allow_partial_errors=True) + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_batch_query_partial_exception_not_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = await client.query_batch(requests) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryError + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_batch_query_partial_exception_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = await client.query_batch(requests, allow_partial_errors=True) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryResult + assert r3.partial_error is not None + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_batch_query_non_fatal_exception(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """Bad Query""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = await client.query_batch(requests) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryError diff --git a/sdk/monitor/azure-monitor-query/tests/async/test_logs_client_async.py b/sdk/monitor/azure-monitor-query/tests/async/test_logs_client_async.py index bf7ade3b9ad2..c7675081ed5a 100644 --- a/sdk/monitor/azure-monitor-query/tests/async/test_logs_client_async.py +++ b/sdk/monitor/azure-monitor-query/tests/async/test_logs_client_async.py @@ -4,7 +4,7 @@ import os from azure.identity.aio import ClientSecretCredential from azure.core.exceptions import HttpResponseError -from azure.monitor.query import LogsBatchQuery +from azure.monitor.query import LogsBatchQuery, LogsQueryError, LogsTable, LogsQueryResult from azure.monitor.query.aio import LogsQueryClient def _credential(): @@ -100,7 +100,7 @@ async def test_logs_query_batch_default(): assert r1.tables[0].columns[1] == '_ResourceId' assert r1.tables[0].columns[2] == 'avgRequestDuration' r2 = response[2] - assert r2.error is not None + assert r2.__class__ == LogsQueryError @pytest.mark.skip('https://github.com/Azure/azure-sdk-for-python/issues/19382') @pytest.mark.live_test_only @@ -178,3 +178,27 @@ async def test_logs_single_query_with_render_and_stats(): assert response.visualization is not None assert response.statistics is not None + +@pytest.mark.live_test_only +@pytest.mark.asyncio +async def test_logs_query_result_iterate_over_tables(): + client = LogsQueryClient(_credential()) + + query = "AppRequests; AppRequests | take 5" + + response = await client.query( + os.environ['LOG_WORKSPACE_ID'], + query, + timespan=None, + include_statistics=True, + include_visualization=True + ) + + ## should iterate over tables + for item in response: + assert item.__class__ == LogsTable + + assert response.statistics is not None + assert response.visualization is not None + assert len(response.tables) == 2 + assert response.__class__ == LogsQueryResult diff --git a/sdk/monitor/azure-monitor-query/tests/test_exceptions.py b/sdk/monitor/azure-monitor-query/tests/test_exceptions.py new file mode 100644 index 000000000000..92d28cd75dd9 --- /dev/null +++ b/sdk/monitor/azure-monitor-query/tests/test_exceptions.py @@ -0,0 +1,163 @@ +from datetime import timedelta, datetime +import pytest +import os +from azure.identity import ClientSecretCredential +from azure.core.exceptions import HttpResponseError +from azure.monitor.query import LogsQueryClient, LogsBatchQuery, LogsQueryError,LogsQueryResult, QueryPartialErrorException + +def _credential(): + credential = ClientSecretCredential( + client_id = os.environ['AZURE_CLIENT_ID'], + client_secret = os.environ['AZURE_CLIENT_SECRET'], + tenant_id = os.environ['AZURE_TENANT_ID'] + ) + return credential + +@pytest.mark.live_test_only +def test_logs_single_query_fatal_exception(): + credential = _credential() + client = LogsQueryClient(credential) + with pytest.raises(HttpResponseError): + client.query('bad_workspace_id', 'AppRequests', timespan=None) + +@pytest.mark.live_test_only +def test_logs_single_query_partial_exception_not_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + with pytest.raises(QueryPartialErrorException) as err: + client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1)) + +@pytest.mark.live_test_only +def test_logs_single_query_partial_exception_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1), allow_partial_errors=True) + assert response.partial_error is not None + assert response.partial_error.code == 'PartialError' + assert response.partial_error.__class__ == LogsQueryError + +@pytest.mark.live_test_only +def test_logs_batch_query_fatal_exception(): + credential = ClientSecretCredential( + client_id = os.environ['AZURE_CLIENT_ID'], + client_secret = 'bad_secret', + tenant_id = os.environ['AZURE_TENANT_ID'] + ) + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequestsss | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + with pytest.raises(HttpResponseError): + responses = client.query_batch(requests, allow_partial_errors=True) + +@pytest.mark.live_test_only +def test_logs_batch_query_partial_exception_not_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = client.query_batch(requests) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryError + +@pytest.mark.live_test_only +def test_logs_batch_query_partial_exception_allowed(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = client.query_batch(requests, allow_partial_errors=True) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryResult + assert r3.partial_error is not None + +@pytest.mark.live_test_only +def test_logs_batch_query_non_fatal_exception(): + credential = _credential() + client = LogsQueryClient(credential) + requests = [ + LogsBatchQuery( + query="AzureActivity | summarize count()", + timespan=timedelta(hours=1), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """AppRequests | take 10""", + timespan=(datetime(2021, 6, 2), timedelta(days=1)), + workspace_id= os.environ['LOG_WORKSPACE_ID'] + ), + LogsBatchQuery( + query= """Bad Query""", + workspace_id= os.environ['LOG_WORKSPACE_ID'], + timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), + include_statistics=True + ), + ] + responses = client.query_batch(requests) + r1, r2, r3 = responses[0], responses[1], responses[2] + assert r1.__class__ == LogsQueryResult + assert r2.__class__ == LogsQueryResult + assert r3.__class__ == LogsQueryError diff --git a/sdk/monitor/azure-monitor-query/tests/test_logs_client.py b/sdk/monitor/azure-monitor-query/tests/test_logs_client.py index 248f44796cbe..34b00e201dc5 100644 --- a/sdk/monitor/azure-monitor-query/tests/test_logs_client.py +++ b/sdk/monitor/azure-monitor-query/tests/test_logs_client.py @@ -3,7 +3,7 @@ import os from azure.identity import ClientSecretCredential from azure.core.exceptions import HttpResponseError -from azure.monitor.query import LogsQueryClient, LogsBatchQuery +from azure.monitor.query import LogsQueryClient, LogsBatchQuery, LogsQueryError, LogsTable, LogsQueryResult def _credential(): credential = ClientSecretCredential( @@ -55,11 +55,12 @@ def test_logs_single_query_with_non_200(): def test_logs_single_query_with_partial_success(): credential = _credential() client = LogsQueryClient(credential) - query = "set truncationmaxrecords=1; union * | project TimeGenerated | take 10" + query = """let Weight = 92233720368547758; + range x from 1 to 3 step 1 + | summarize percentilesw(x, Weight * 100, 50)""" + response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None, allow_partial_errors=True) - response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None) - - assert response is not None + assert response.partial_error is not None @pytest.mark.skip("https://github.com/Azure/azure-sdk-for-python/issues/19917") @pytest.mark.live_test_only @@ -108,7 +109,7 @@ def test_logs_query_batch_default(): assert r1.tables[0].columns[1] == '_ResourceId' assert r1.tables[0].columns[2] == 'avgRequestDuration' r2 = response[2] - assert r2.error is not None + assert r2.__class__ == LogsQueryError @pytest.mark.live_test_only def test_logs_single_query_with_statistics(): @@ -221,3 +222,26 @@ def test_logs_query_batch_additional_workspaces(): for resp in response: assert len(resp.tables[0].rows) == 2 + +@pytest.mark.live_test_only +def test_logs_query_result_iterate_over_tables(): + client = LogsQueryClient(_credential()) + + query = "AppRequests; AppRequests | take 5" + + response = client.query( + os.environ['LOG_WORKSPACE_ID'], + query, + timespan=None, + include_statistics=True, + include_visualization=True + ) + + ## should iterate over tables + for item in response: + assert item.__class__ == LogsTable + + assert response.statistics is not None + assert response.visualization is not None + assert len(response.tables) == 2 + assert response.__class__ == LogsQueryResult