diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/CHANGELOG.md b/sdk/cosmos/azure-mgmt-cosmosdb/CHANGELOG.md index 316721783168..bec3853dc26a 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/CHANGELOG.md +++ b/sdk/cosmos/azure-mgmt-cosmosdb/CHANGELOG.md @@ -1,5 +1,17 @@ # Release History +## 9.6.0 (2024-09-18) + +### Features Added + + - Model `ResourceRestoreParameters` added property `restore_with_ttl_disabled` + - Model `RestoreParameters` added parameter `restore_with_ttl_disabled` in method `__init__` + - Model `RestoreParametersBase` added property `restore_with_ttl_disabled` + - Enum `ServerVersion` added member `SEVEN0` + - Added model `ErrorAdditionalInfo` + - Added model `ErrorDetail` + - Added model `ErrorResponseAutoGenerated` + ## 9.5.1 (2024-06-19) ### Features Added diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/_meta.json b/sdk/cosmos/azure-mgmt-cosmosdb/_meta.json index 6035385fb15d..2dc61f33d873 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/_meta.json +++ b/sdk/cosmos/azure-mgmt-cosmosdb/_meta.json @@ -1,11 +1,11 @@ { - "commit": "f1546dc981fa5d164d7ecd13588520457462c22c", + "commit": "3519c80fe510a268f6e59a29ccac8a53fdec15b6", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "autorest": "3.10.2", "use": [ - "@autorest/python@6.13.19", + "@autorest/python@6.19.0", "@autorest/modelerfour@4.27.0" ], - "autorest_command": "autorest specification/cosmos-db/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --tag=package-2024-05 --use=@autorest/python@6.13.19 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", + "autorest_command": "autorest specification/cosmos-db/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --tag=package-2024-08 --use=@autorest/python@6.19.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", "readme": "specification/cosmos-db/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_configuration.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_configuration.py index 0cb6a524eeef..f91c65b1ff32 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_configuration.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_configuration.py @@ -28,13 +28,13 @@ class CosmosDBManagementClientConfiguration: # pylint: disable=too-many-instanc :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. Default value is "2024-05-15". Note that overriding this + :keyword api_version: Api Version. Default value is "2024-08-15". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "2024-05-15") + api_version: str = kwargs.pop("api_version", "2024-08-15") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_cosmos_db_management_client.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_cosmos_db_management_client.py index 6198471e5cea..eefe7c153445 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_cosmos_db_management_client.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_cosmos_db_management_client.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, TYPE_CHECKING +from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse @@ -161,7 +162,7 @@ class CosmosDBManagementClient: # pylint: disable=client-accepts-api-version-ke :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. Default value is "2024-05-15". Note that overriding this + :keyword api_version: Api Version. Default value is "2024-08-15". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no @@ -323,7 +324,7 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: def close(self) -> None: self._client.close() - def __enter__(self) -> "CosmosDBManagementClient": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_serialization.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_serialization.py index f0c6180722c8..8139854b97bb 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_serialization.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_serialization.py @@ -144,6 +144,8 @@ def _json_attemp(data): # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_vendor.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_vendor.py deleted file mode 100644 index 0dafe0e287ff..000000000000 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_vendor.py +++ /dev/null @@ -1,16 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.pipeline.transport import HttpRequest - - -def _convert_request(request, files=None): - data = request.content if not files else None - request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) - if files: - request.set_formdata_body(files) - return request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_version.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_version.py index 7c492199f550..1154dc0c5272 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_version.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "9.5.1" +VERSION = "9.6.0" diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/_configuration.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/_configuration.py index 697fb89f2b6b..016bb54d71a6 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/_configuration.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/_configuration.py @@ -28,13 +28,13 @@ class CosmosDBManagementClientConfiguration: # pylint: disable=too-many-instanc :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. Default value is "2024-05-15". Note that overriding this + :keyword api_version: Api Version. Default value is "2024-08-15". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "2024-05-15") + api_version: str = kwargs.pop("api_version", "2024-08-15") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/_cosmos_db_management_client.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/_cosmos_db_management_client.py index 8c45a55e89c0..87ac3503b327 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/_cosmos_db_management_client.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/_cosmos_db_management_client.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest @@ -164,7 +165,7 @@ class CosmosDBManagementClient: # pylint: disable=client-accepts-api-version-ke :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. Default value is "2024-05-15". Note that overriding this + :keyword api_version: Api Version. Default value is "2024-08-15". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no @@ -328,7 +329,7 @@ def _send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "CosmosDBManagementClient": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_clusters_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_clusters_operations.py index f50c901bd973..8c695f20a17f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_clusters_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_clusters_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._cassandra_clusters_operations import ( build_create_update_request, build_deallocate_request, @@ -103,7 +103,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -119,7 +118,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -185,7 +183,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -201,7 +198,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -265,7 +261,6 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -279,16 +274,14 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ClusterResource", pipeline_response) + deserialized = self._deserialize("ClusterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, cluster_name: str, **kwargs: Any - ) -> None: + async def _delete_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -301,7 +294,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -311,10 +304,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -322,11 +315,19 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -350,7 +351,7 @@ async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwar lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, api_version=api_version, @@ -359,6 +360,7 @@ async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwar params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -386,7 +388,7 @@ async def _create_update_initial( cluster_name: str, body: Union[_models.ClusterResource, IO[bytes]], **kwargs: Any - ) -> _models.ClusterResource: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -400,7 +402,7 @@ async def _create_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ClusterResource] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -421,10 +423,10 @@ async def _create_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -432,14 +434,14 @@ async def _create_update_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("ClusterResource", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("ClusterResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -551,10 +553,11 @@ async def begin_create_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ClusterResource", pipeline_response) + deserialized = self._deserialize("ClusterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -582,7 +585,7 @@ async def _update_initial( cluster_name: str, body: Union[_models.ClusterResource, IO[bytes]], **kwargs: Any - ) -> _models.ClusterResource: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -596,7 +599,7 @@ async def _update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ClusterResource] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -617,10 +620,10 @@ async def _update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -628,14 +631,14 @@ async def _update_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("ClusterResource", pipeline_response) - - if response.status_code == 202: - deserialized = self._deserialize("ClusterResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -742,10 +745,11 @@ async def begin_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ClusterResource", pipeline_response) + deserialized = self._deserialize("ClusterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -773,7 +777,7 @@ async def _invoke_command_initial( cluster_name: str, body: Union[_models.CommandPostBody, IO[bytes]], **kwargs: Any - ) -> _models.CommandOutput: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -787,7 +791,7 @@ async def _invoke_command_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.CommandOutput] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -808,10 +812,10 @@ async def _invoke_command_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -819,10 +823,14 @@ async def _invoke_command_initial( response = pipeline_response.http_response if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CommandOutput", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -929,10 +937,11 @@ async def begin_invoke_command( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("CommandOutput", pipeline_response) + deserialized = self._deserialize("CommandOutput", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -954,9 +963,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _deallocate_initial( # pylint: disable=inconsistent-return-statements + async def _deallocate_initial( self, resource_group_name: str, cluster_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -969,7 +978,7 @@ async def _deallocate_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_deallocate_request( resource_group_name=resource_group_name, @@ -979,10 +988,10 @@ async def _deallocate_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -990,11 +999,19 @@ async def _deallocate_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_deallocate( @@ -1022,7 +1039,7 @@ async def begin_deallocate( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._deallocate_initial( # type: ignore + raw_result = await self._deallocate_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, api_version=api_version, @@ -1031,6 +1048,7 @@ async def begin_deallocate( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1052,9 +1070,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _start_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, cluster_name: str, **kwargs: Any - ) -> None: + async def _start_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1067,7 +1083,7 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -1077,10 +1093,10 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1088,11 +1104,19 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_start(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -1118,7 +1142,7 @@ async def begin_start(self, resource_group_name: str, cluster_name: str, **kwarg lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._start_initial( # type: ignore + raw_result = await self._start_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, api_version=api_version, @@ -1127,6 +1151,7 @@ async def begin_start(self, resource_group_name: str, cluster_name: str, **kwarg params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1185,7 +1210,6 @@ async def status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1199,7 +1223,7 @@ async def status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CassandraClusterPublicStatus", pipeline_response) + deserialized = self._deserialize("CassandraClusterPublicStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_data_centers_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_data_centers_operations.py index ae9ad2941b87..093aa15cfd59 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_data_centers_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_data_centers_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._cassandra_data_centers_operations import ( build_create_update_request, build_delete_request, @@ -107,7 +107,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -123,7 +122,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -192,7 +190,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -206,16 +203,16 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataCenterResource", pipeline_response) + deserialized = self._deserialize("DataCenterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, cluster_name: str, data_center_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -228,7 +225,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -239,10 +236,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -250,11 +247,19 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -282,7 +287,7 @@ async def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, data_center_name=data_center_name, @@ -292,6 +297,7 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -320,7 +326,7 @@ async def _create_update_initial( data_center_name: str, body: Union[_models.DataCenterResource, IO[bytes]], **kwargs: Any - ) -> _models.DataCenterResource: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -334,7 +340,7 @@ async def _create_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DataCenterResource] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -356,10 +362,10 @@ async def _create_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -367,14 +373,14 @@ async def _create_update_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DataCenterResource", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DataCenterResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -494,10 +500,11 @@ async def begin_create_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataCenterResource", pipeline_response) + deserialized = self._deserialize("DataCenterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -526,7 +533,7 @@ async def _update_initial( data_center_name: str, body: Union[_models.DataCenterResource, IO[bytes]], **kwargs: Any - ) -> _models.DataCenterResource: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -540,7 +547,7 @@ async def _update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DataCenterResource] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -562,10 +569,10 @@ async def _update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -573,14 +580,14 @@ async def _update_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DataCenterResource", pipeline_response) - - if response.status_code == 202: - deserialized = self._deserialize("DataCenterResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -697,10 +704,11 @@ async def begin_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataCenterResource", pipeline_response) + deserialized = self._deserialize("DataCenterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_resources_operations.py index 5975ec1005bc..ad400dc181b7 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_cassandra_resources_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._cassandra_resources_operations import ( build_create_update_cassandra_keyspace_request, build_create_update_cassandra_table_request, @@ -120,7 +120,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -136,7 +135,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -206,7 +204,6 @@ async def get_cassandra_keyspace( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -220,7 +217,7 @@ async def get_cassandra_keyspace( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CassandraKeyspaceGetResults", pipeline_response) + deserialized = self._deserialize("CassandraKeyspaceGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -234,7 +231,7 @@ async def _create_update_cassandra_keyspace_initial( # pylint: disable=name-too keyspace_name: str, create_update_cassandra_keyspace_parameters: Union[_models.CassandraKeyspaceCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.CassandraKeyspaceGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -248,7 +245,7 @@ async def _create_update_cassandra_keyspace_initial( # pylint: disable=name-too api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.CassandraKeyspaceGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -272,10 +269,10 @@ async def _create_update_cassandra_keyspace_initial( # pylint: disable=name-too headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -283,20 +280,22 @@ async def _create_update_cassandra_keyspace_initial( # pylint: disable=name-too response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("CassandraKeyspaceGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -420,10 +419,11 @@ async def begin_create_update_cassandra_keyspace( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("CassandraKeyspaceGetResults", pipeline_response) + deserialized = self._deserialize("CassandraKeyspaceGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -445,9 +445,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_cassandra_keyspace_initial( # pylint: disable=inconsistent-return-statements + async def _delete_cassandra_keyspace_initial( self, resource_group_name: str, account_name: str, keyspace_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -460,7 +460,7 @@ async def _delete_cassandra_keyspace_initial( # pylint: disable=inconsistent-re _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_cassandra_keyspace_request( resource_group_name=resource_group_name, @@ -471,10 +471,10 @@ async def _delete_cassandra_keyspace_initial( # pylint: disable=inconsistent-re headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -482,6 +482,10 @@ async def _delete_cassandra_keyspace_initial( # pylint: disable=inconsistent-re response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -492,8 +496,12 @@ async def _delete_cassandra_keyspace_initial( # pylint: disable=inconsistent-re ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_cassandra_keyspace( @@ -521,7 +529,7 @@ async def begin_delete_cassandra_keyspace( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_cassandra_keyspace_initial( # type: ignore + raw_result = await self._delete_cassandra_keyspace_initial( resource_group_name=resource_group_name, account_name=account_name, keyspace_name=keyspace_name, @@ -531,6 +539,7 @@ async def begin_delete_cassandra_keyspace( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -593,7 +602,6 @@ async def get_cassandra_keyspace_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -607,7 +615,7 @@ async def get_cassandra_keyspace_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -621,7 +629,7 @@ async def _update_cassandra_keyspace_throughput_initial( # pylint: disable=name keyspace_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -635,7 +643,7 @@ async def _update_cassandra_keyspace_throughput_initial( # pylint: disable=name api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -657,10 +665,10 @@ async def _update_cassandra_keyspace_throughput_initial( # pylint: disable=name headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -668,20 +676,22 @@ async def _update_cassandra_keyspace_throughput_initial( # pylint: disable=name response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -805,10 +815,11 @@ async def begin_update_cassandra_keyspace_throughput( # pylint: disable=name-to params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -832,7 +843,7 @@ def get_long_running_output(pipeline_response): async def _migrate_cassandra_keyspace_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, keyspace_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -845,7 +856,7 @@ async def _migrate_cassandra_keyspace_to_autoscale_initial( # pylint: disable=n _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_cassandra_keyspace_to_autoscale_request( resource_group_name=resource_group_name, @@ -856,10 +867,10 @@ async def _migrate_cassandra_keyspace_to_autoscale_initial( # pylint: disable=n headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -867,20 +878,22 @@ async def _migrate_cassandra_keyspace_to_autoscale_initial( # pylint: disable=n response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -924,10 +937,11 @@ async def begin_migrate_cassandra_keyspace_to_autoscale( # pylint: disable=name params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -951,7 +965,7 @@ def get_long_running_output(pipeline_response): async def _migrate_cassandra_keyspace_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, keyspace_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -964,7 +978,7 @@ async def _migrate_cassandra_keyspace_to_manual_throughput_initial( # pylint: d _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_cassandra_keyspace_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -975,10 +989,10 @@ async def _migrate_cassandra_keyspace_to_manual_throughput_initial( # pylint: d headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -986,20 +1000,22 @@ async def _migrate_cassandra_keyspace_to_manual_throughput_initial( # pylint: d response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1043,10 +1059,11 @@ async def begin_migrate_cassandra_keyspace_to_manual_throughput( # pylint: disa params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1113,7 +1130,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1129,7 +1145,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1201,7 +1216,6 @@ async def get_cassandra_table( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1215,7 +1229,7 @@ async def get_cassandra_table( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CassandraTableGetResults", pipeline_response) + deserialized = self._deserialize("CassandraTableGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1230,7 +1244,7 @@ async def _create_update_cassandra_table_initial( table_name: str, create_update_cassandra_table_parameters: Union[_models.CassandraTableCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.CassandraTableGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1244,7 +1258,7 @@ async def _create_update_cassandra_table_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.CassandraTableGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1269,10 +1283,10 @@ async def _create_update_cassandra_table_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1280,20 +1294,22 @@ async def _create_update_cassandra_table_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("CassandraTableGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1427,10 +1443,11 @@ async def begin_create_update_cassandra_table( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("CassandraTableGetResults", pipeline_response) + deserialized = self._deserialize("CassandraTableGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1452,9 +1469,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_cassandra_table_initial( # pylint: disable=inconsistent-return-statements + async def _delete_cassandra_table_initial( self, resource_group_name: str, account_name: str, keyspace_name: str, table_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1467,7 +1484,7 @@ async def _delete_cassandra_table_initial( # pylint: disable=inconsistent-retur _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_cassandra_table_request( resource_group_name=resource_group_name, @@ -1479,10 +1496,10 @@ async def _delete_cassandra_table_initial( # pylint: disable=inconsistent-retur headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1490,6 +1507,10 @@ async def _delete_cassandra_table_initial( # pylint: disable=inconsistent-retur response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -1500,8 +1521,12 @@ async def _delete_cassandra_table_initial( # pylint: disable=inconsistent-retur ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_cassandra_table( @@ -1531,7 +1556,7 @@ async def begin_delete_cassandra_table( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_cassandra_table_initial( # type: ignore + raw_result = await self._delete_cassandra_table_initial( resource_group_name=resource_group_name, account_name=account_name, keyspace_name=keyspace_name, @@ -1542,6 +1567,7 @@ async def begin_delete_cassandra_table( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1607,7 +1633,6 @@ async def get_cassandra_table_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1621,7 +1646,7 @@ async def get_cassandra_table_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1636,7 +1661,7 @@ async def _update_cassandra_table_throughput_initial( # pylint: disable=name-to table_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1650,7 +1675,7 @@ async def _update_cassandra_table_throughput_initial( # pylint: disable=name-to api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1673,10 +1698,10 @@ async def _update_cassandra_table_throughput_initial( # pylint: disable=name-to headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1684,20 +1709,22 @@ async def _update_cassandra_table_throughput_initial( # pylint: disable=name-to response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1831,10 +1858,11 @@ async def begin_update_cassandra_table_throughput( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1858,7 +1886,7 @@ def get_long_running_output(pipeline_response): async def _migrate_cassandra_table_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, keyspace_name: str, table_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1871,7 +1899,7 @@ async def _migrate_cassandra_table_to_autoscale_initial( # pylint: disable=name _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_cassandra_table_to_autoscale_request( resource_group_name=resource_group_name, @@ -1883,10 +1911,10 @@ async def _migrate_cassandra_table_to_autoscale_initial( # pylint: disable=name headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1894,20 +1922,22 @@ async def _migrate_cassandra_table_to_autoscale_initial( # pylint: disable=name response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1954,10 +1984,11 @@ async def begin_migrate_cassandra_table_to_autoscale( # pylint: disable=name-to params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1981,7 +2012,7 @@ def get_long_running_output(pipeline_response): async def _migrate_cassandra_table_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, keyspace_name: str, table_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1994,7 +2025,7 @@ async def _migrate_cassandra_table_to_manual_throughput_initial( # pylint: disa _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_cassandra_table_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -2006,10 +2037,10 @@ async def _migrate_cassandra_table_to_manual_throughput_initial( # pylint: disa headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2017,20 +2048,22 @@ async def _migrate_cassandra_table_to_manual_throughput_initial( # pylint: disa response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2077,10 +2110,11 @@ async def begin_migrate_cassandra_table_to_manual_throughput( # pylint: disable params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_operations.py index 21c41cc10e8e..1e58e81d93e8 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._collection_operations import ( build_list_metric_definitions_request, build_list_metrics_request, @@ -119,7 +117,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -135,7 +132,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -221,7 +217,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -237,7 +232,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -312,7 +306,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -328,7 +321,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_partition_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_partition_operations.py index 7b7abd6331a8..d6146c5c07b4 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_partition_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_partition_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._collection_partition_operations import build_list_metrics_request, build_list_usages_request if sys.version_info >= (3, 9): @@ -115,7 +113,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -131,7 +128,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -217,7 +213,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -233,7 +228,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_partition_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_partition_region_operations.py index dbad34849bd7..e74c836fb53f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_partition_region_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_partition_region_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._collection_partition_region_operations import build_list_metrics_request if sys.version_info >= (3, 9): @@ -119,7 +117,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -135,7 +132,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_region_operations.py index ace8fe220095..2047546baa8b 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_region_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_collection_region_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._collection_region_operations import build_list_metrics_request if sys.version_info >= (3, 9): @@ -119,7 +117,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -135,7 +132,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_account_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_account_region_operations.py index b3d031ce17d4..5c4d6917bf8d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_account_region_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_account_region_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._database_account_region_operations import build_list_metrics_request if sys.version_info >= (3, 9): @@ -105,7 +103,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -121,7 +118,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_accounts_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_accounts_operations.py index fb11f01e6579..2007764906f4 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_accounts_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_accounts_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._database_accounts_operations import ( build_check_name_exists_request, build_create_or_update_request, @@ -117,7 +117,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -131,7 +130,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response) + deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -144,7 +143,7 @@ async def _update_initial( account_name: str, update_parameters: Union[_models.DatabaseAccountUpdateParameters, IO[bytes]], **kwargs: Any - ) -> _models.DatabaseAccountGetResults: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -158,7 +157,7 @@ async def _update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatabaseAccountGetResults] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -179,10 +178,10 @@ async def _update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -190,10 +189,14 @@ async def _update_initial( response = pipeline_response.http_response if response.status_code not in [200]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -304,10 +307,11 @@ async def begin_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response) + deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -335,7 +339,7 @@ async def _create_or_update_initial( account_name: str, create_update_parameters: Union[_models.DatabaseAccountCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> _models.DatabaseAccountGetResults: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -349,7 +353,7 @@ async def _create_or_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatabaseAccountGetResults] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -370,10 +374,10 @@ async def _create_or_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -381,10 +385,14 @@ async def _create_or_update_initial( response = pipeline_response.http_response if response.status_code not in [200]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -501,10 +509,11 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response) + deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -526,9 +535,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: + async def _delete_initial(self, resource_group_name: str, account_name: str, **kwargs: Any) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -541,7 +548,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -551,10 +558,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -562,6 +569,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -572,8 +583,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete(self, resource_group_name: str, account_name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -597,7 +612,7 @@ async def begin_delete(self, resource_group_name: str, account_name: str, **kwar lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, account_name=account_name, api_version=api_version, @@ -606,6 +621,7 @@ async def begin_delete(self, resource_group_name: str, account_name: str, **kwar params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -627,13 +643,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _failover_priority_change_initial( # pylint: disable=inconsistent-return-statements + async def _failover_priority_change_initial( self, resource_group_name: str, account_name: str, failover_parameters: Union[_models.FailoverPolicies, IO[bytes]], **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -647,7 +663,7 @@ async def _failover_priority_change_initial( # pylint: disable=inconsistent-ret api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -668,10 +684,10 @@ async def _failover_priority_change_initial( # pylint: disable=inconsistent-ret headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -679,6 +695,10 @@ async def _failover_priority_change_initial( # pylint: disable=inconsistent-ret response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -689,8 +709,12 @@ async def _failover_priority_change_initial( # pylint: disable=inconsistent-ret ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @overload async def begin_failover_priority_change( @@ -787,7 +811,7 @@ async def begin_failover_priority_change( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._failover_priority_change_initial( # type: ignore + raw_result = await self._failover_priority_change_initial( resource_group_name=resource_group_name, account_name=account_name, failover_parameters=failover_parameters, @@ -798,6 +822,7 @@ async def begin_failover_priority_change( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -852,7 +877,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -868,7 +892,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -936,7 +959,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -952,7 +974,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1018,7 +1039,6 @@ async def list_keys( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1032,7 +1052,7 @@ async def list_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountListKeysResult", pipeline_response) + deserialized = self._deserialize("DatabaseAccountListKeysResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1076,7 +1096,6 @@ async def list_connection_strings( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1090,20 +1109,20 @@ async def list_connection_strings( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountListConnectionStringsResult", pipeline_response) + deserialized = self._deserialize("DatabaseAccountListConnectionStringsResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - async def _offline_region_initial( # pylint: disable=inconsistent-return-statements + async def _offline_region_initial( self, resource_group_name: str, account_name: str, region_parameter_for_offline: Union[_models.RegionForOnlineOffline, IO[bytes]], **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1117,7 +1136,7 @@ async def _offline_region_initial( # pylint: disable=inconsistent-return-statem api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1138,10 +1157,10 @@ async def _offline_region_initial( # pylint: disable=inconsistent-return-statem headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1149,6 +1168,10 @@ async def _offline_region_initial( # pylint: disable=inconsistent-return-statem response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -1160,8 +1183,12 @@ async def _offline_region_initial( # pylint: disable=inconsistent-return-statem ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @overload async def begin_offline_region( @@ -1252,7 +1279,7 @@ async def begin_offline_region( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._offline_region_initial( # type: ignore + raw_result = await self._offline_region_initial( resource_group_name=resource_group_name, account_name=account_name, region_parameter_for_offline=region_parameter_for_offline, @@ -1263,6 +1290,7 @@ async def begin_offline_region( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1284,13 +1312,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _online_region_initial( # pylint: disable=inconsistent-return-statements + async def _online_region_initial( self, resource_group_name: str, account_name: str, region_parameter_for_online: Union[_models.RegionForOnlineOffline, IO[bytes]], **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1304,7 +1332,7 @@ async def _online_region_initial( # pylint: disable=inconsistent-return-stateme api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1325,10 +1353,10 @@ async def _online_region_initial( # pylint: disable=inconsistent-return-stateme headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1336,6 +1364,10 @@ async def _online_region_initial( # pylint: disable=inconsistent-return-stateme response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -1347,8 +1379,12 @@ async def _online_region_initial( # pylint: disable=inconsistent-return-stateme ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @overload async def begin_online_region( @@ -1439,7 +1475,7 @@ async def begin_online_region( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._online_region_initial( # type: ignore + raw_result = await self._online_region_initial( resource_group_name=resource_group_name, account_name=account_name, region_parameter_for_online=region_parameter_for_online, @@ -1450,6 +1486,7 @@ async def begin_online_region( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1508,7 +1545,6 @@ async def get_read_only_keys( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1522,7 +1558,7 @@ async def get_read_only_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountListReadOnlyKeysResult", pipeline_response) + deserialized = self._deserialize("DatabaseAccountListReadOnlyKeysResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1566,7 +1602,6 @@ async def list_read_only_keys( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1580,20 +1615,20 @@ async def list_read_only_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountListReadOnlyKeysResult", pipeline_response) + deserialized = self._deserialize("DatabaseAccountListReadOnlyKeysResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - async def _regenerate_key_initial( # pylint: disable=inconsistent-return-statements + async def _regenerate_key_initial( self, resource_group_name: str, account_name: str, key_to_regenerate: Union[_models.DatabaseAccountRegenerateKeyParameters, IO[bytes]], **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1607,7 +1642,7 @@ async def _regenerate_key_initial( # pylint: disable=inconsistent-return-statem api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1628,10 +1663,10 @@ async def _regenerate_key_initial( # pylint: disable=inconsistent-return-statem headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1639,6 +1674,10 @@ async def _regenerate_key_initial( # pylint: disable=inconsistent-return-statem response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -1649,8 +1688,12 @@ async def _regenerate_key_initial( # pylint: disable=inconsistent-return-statem ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @overload async def begin_regenerate_key( @@ -1739,7 +1782,7 @@ async def begin_regenerate_key( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._regenerate_key_initial( # type: ignore + raw_result = await self._regenerate_key_initial( resource_group_name=resource_group_name, account_name=account_name, key_to_regenerate=key_to_regenerate, @@ -1750,6 +1793,7 @@ async def begin_regenerate_key( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1803,7 +1847,6 @@ async def check_name_exists(self, account_name: str, **kwargs: Any) -> bool: headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1866,7 +1909,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1882,7 +1924,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1956,7 +1997,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1972,7 +2012,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -2041,7 +2080,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -2057,7 +2095,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_operations.py index d6596da98007..7b1205f45266 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._database_operations import ( build_list_metric_definitions_request, build_list_metrics_request, @@ -110,7 +108,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -126,7 +123,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -208,7 +204,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -224,7 +219,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -296,7 +290,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -312,7 +305,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_gremlin_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_gremlin_resources_operations.py index a799913a5040..8763dd96ee72 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_gremlin_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_gremlin_resources_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._gremlin_resources_operations import ( build_create_update_gremlin_database_request, build_create_update_gremlin_graph_request, @@ -121,7 +121,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -137,7 +136,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -207,7 +205,6 @@ async def get_gremlin_database( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -221,7 +218,7 @@ async def get_gremlin_database( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GremlinDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("GremlinDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -235,7 +232,7 @@ async def _create_update_gremlin_database_initial( database_name: str, create_update_gremlin_database_parameters: Union[_models.GremlinDatabaseCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.GremlinDatabaseGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -249,7 +246,7 @@ async def _create_update_gremlin_database_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.GremlinDatabaseGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -273,10 +270,10 @@ async def _create_update_gremlin_database_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -284,20 +281,22 @@ async def _create_update_gremlin_database_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("GremlinDatabaseGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -421,10 +420,11 @@ async def begin_create_update_gremlin_database( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("GremlinDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("GremlinDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -446,9 +446,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_gremlin_database_initial( # pylint: disable=inconsistent-return-statements + async def _delete_gremlin_database_initial( self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -461,7 +461,7 @@ async def _delete_gremlin_database_initial( # pylint: disable=inconsistent-retu _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_gremlin_database_request( resource_group_name=resource_group_name, @@ -472,10 +472,10 @@ async def _delete_gremlin_database_initial( # pylint: disable=inconsistent-retu headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -483,6 +483,10 @@ async def _delete_gremlin_database_initial( # pylint: disable=inconsistent-retu response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -493,8 +497,12 @@ async def _delete_gremlin_database_initial( # pylint: disable=inconsistent-retu ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_gremlin_database( @@ -522,7 +530,7 @@ async def begin_delete_gremlin_database( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_gremlin_database_initial( # type: ignore + raw_result = await self._delete_gremlin_database_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -532,6 +540,7 @@ async def begin_delete_gremlin_database( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -594,7 +603,6 @@ async def get_gremlin_database_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -608,7 +616,7 @@ async def get_gremlin_database_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -622,7 +630,7 @@ async def _update_gremlin_database_throughput_initial( # pylint: disable=name-t database_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -636,7 +644,7 @@ async def _update_gremlin_database_throughput_initial( # pylint: disable=name-t api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -658,10 +666,10 @@ async def _update_gremlin_database_throughput_initial( # pylint: disable=name-t headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -669,20 +677,22 @@ async def _update_gremlin_database_throughput_initial( # pylint: disable=name-t response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -806,10 +816,11 @@ async def begin_update_gremlin_database_throughput( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -833,7 +844,7 @@ def get_long_running_output(pipeline_response): async def _migrate_gremlin_database_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -846,7 +857,7 @@ async def _migrate_gremlin_database_to_autoscale_initial( # pylint: disable=nam _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_gremlin_database_to_autoscale_request( resource_group_name=resource_group_name, @@ -857,10 +868,10 @@ async def _migrate_gremlin_database_to_autoscale_initial( # pylint: disable=nam headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -868,20 +879,22 @@ async def _migrate_gremlin_database_to_autoscale_initial( # pylint: disable=nam response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -925,10 +938,11 @@ async def begin_migrate_gremlin_database_to_autoscale( # pylint: disable=name-t params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -952,7 +966,7 @@ def get_long_running_output(pipeline_response): async def _migrate_gremlin_database_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -965,7 +979,7 @@ async def _migrate_gremlin_database_to_manual_throughput_initial( # pylint: dis _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_gremlin_database_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -976,10 +990,10 @@ async def _migrate_gremlin_database_to_manual_throughput_initial( # pylint: dis headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -987,20 +1001,22 @@ async def _migrate_gremlin_database_to_manual_throughput_initial( # pylint: dis response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1044,10 +1060,11 @@ async def begin_migrate_gremlin_database_to_manual_throughput( # pylint: disabl params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1114,7 +1131,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1130,7 +1146,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1202,7 +1217,6 @@ async def get_gremlin_graph( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1216,7 +1230,7 @@ async def get_gremlin_graph( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GremlinGraphGetResults", pipeline_response) + deserialized = self._deserialize("GremlinGraphGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1231,7 +1245,7 @@ async def _create_update_gremlin_graph_initial( graph_name: str, create_update_gremlin_graph_parameters: Union[_models.GremlinGraphCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.GremlinGraphGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1245,7 +1259,7 @@ async def _create_update_gremlin_graph_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.GremlinGraphGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1268,10 +1282,10 @@ async def _create_update_gremlin_graph_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1279,20 +1293,22 @@ async def _create_update_gremlin_graph_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("GremlinGraphGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1423,10 +1439,11 @@ async def begin_create_update_gremlin_graph( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("GremlinGraphGetResults", pipeline_response) + deserialized = self._deserialize("GremlinGraphGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1448,9 +1465,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_gremlin_graph_initial( # pylint: disable=inconsistent-return-statements + async def _delete_gremlin_graph_initial( self, resource_group_name: str, account_name: str, database_name: str, graph_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1463,7 +1480,7 @@ async def _delete_gremlin_graph_initial( # pylint: disable=inconsistent-return- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_gremlin_graph_request( resource_group_name=resource_group_name, @@ -1475,10 +1492,10 @@ async def _delete_gremlin_graph_initial( # pylint: disable=inconsistent-return- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1486,6 +1503,10 @@ async def _delete_gremlin_graph_initial( # pylint: disable=inconsistent-return- response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -1496,8 +1517,12 @@ async def _delete_gremlin_graph_initial( # pylint: disable=inconsistent-return- ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_gremlin_graph( @@ -1527,7 +1552,7 @@ async def begin_delete_gremlin_graph( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_gremlin_graph_initial( # type: ignore + raw_result = await self._delete_gremlin_graph_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -1538,6 +1563,7 @@ async def begin_delete_gremlin_graph( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1603,7 +1629,6 @@ async def get_gremlin_graph_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1617,7 +1642,7 @@ async def get_gremlin_graph_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1632,7 +1657,7 @@ async def _update_gremlin_graph_throughput_initial( graph_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1646,7 +1671,7 @@ async def _update_gremlin_graph_throughput_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1669,10 +1694,10 @@ async def _update_gremlin_graph_throughput_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1680,20 +1705,22 @@ async def _update_gremlin_graph_throughput_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1827,10 +1854,11 @@ async def begin_update_gremlin_graph_throughput( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1854,7 +1882,7 @@ def get_long_running_output(pipeline_response): async def _migrate_gremlin_graph_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, graph_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1867,7 +1895,7 @@ async def _migrate_gremlin_graph_to_autoscale_initial( # pylint: disable=name-t _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_gremlin_graph_to_autoscale_request( resource_group_name=resource_group_name, @@ -1879,10 +1907,10 @@ async def _migrate_gremlin_graph_to_autoscale_initial( # pylint: disable=name-t headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1890,20 +1918,22 @@ async def _migrate_gremlin_graph_to_autoscale_initial( # pylint: disable=name-t response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1950,10 +1980,11 @@ async def begin_migrate_gremlin_graph_to_autoscale( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1977,7 +2008,7 @@ def get_long_running_output(pipeline_response): async def _migrate_gremlin_graph_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, graph_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1990,7 +2021,7 @@ async def _migrate_gremlin_graph_to_manual_throughput_initial( # pylint: disabl _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_gremlin_graph_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -2002,10 +2033,10 @@ async def _migrate_gremlin_graph_to_manual_throughput_initial( # pylint: disabl headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2013,20 +2044,22 @@ async def _migrate_gremlin_graph_to_manual_throughput_initial( # pylint: disabl response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2073,10 +2106,11 @@ async def begin_migrate_gremlin_graph_to_manual_throughput( # pylint: disable=n params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2106,7 +2140,7 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na graph_name: str, location: Union[_models.ContinuousBackupRestoreLocation, IO[bytes]], **kwargs: Any - ) -> Optional[_models.BackupInformation]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2120,7 +2154,7 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.BackupInformation]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2143,10 +2177,10 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2154,12 +2188,14 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2286,10 +2322,11 @@ async def begin_retrieve_continuous_backup_information( # pylint: disable=name- params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = self._deserialize("BackupInformation", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_locations_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_locations_operations.py index 1df34b58e950..dfcc88306b67 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_locations_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_locations_operations.py @@ -20,15 +20,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._locations_operations import build_get_request, build_list_request if sys.version_info >= (3, 9): @@ -89,7 +87,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -105,7 +102,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -166,7 +162,6 @@ async def get(self, location: str, **kwargs: Any) -> _models.LocationGetResult: headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -180,7 +175,7 @@ async def get(self, location: str, **kwargs: Any) -> _models.LocationGetResult: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LocationGetResult", pipeline_response) + deserialized = self._deserialize("LocationGetResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_mongo_db_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_mongo_db_resources_operations.py index a1190c5a91a9..8bf04def918b 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_mongo_db_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_mongo_db_resources_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._mongo_db_resources_operations import ( build_create_update_mongo_db_collection_request, build_create_update_mongo_db_database_request, @@ -129,7 +129,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -145,7 +144,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -215,7 +213,6 @@ async def get_mongo_db_database( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -229,7 +226,7 @@ async def get_mongo_db_database( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("MongoDBDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("MongoDBDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -243,7 +240,7 @@ async def _create_update_mongo_db_database_initial( database_name: str, create_update_mongo_db_database_parameters: Union[_models.MongoDBDatabaseCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.MongoDBDatabaseGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -257,7 +254,7 @@ async def _create_update_mongo_db_database_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.MongoDBDatabaseGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -281,10 +278,10 @@ async def _create_update_mongo_db_database_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -292,20 +289,22 @@ async def _create_update_mongo_db_database_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("MongoDBDatabaseGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -429,10 +428,11 @@ async def begin_create_update_mongo_db_database( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("MongoDBDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("MongoDBDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -454,9 +454,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_mongo_db_database_initial( # pylint: disable=inconsistent-return-statements + async def _delete_mongo_db_database_initial( self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -469,7 +469,7 @@ async def _delete_mongo_db_database_initial( # pylint: disable=inconsistent-ret _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_mongo_db_database_request( resource_group_name=resource_group_name, @@ -480,10 +480,10 @@ async def _delete_mongo_db_database_initial( # pylint: disable=inconsistent-ret headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -491,6 +491,10 @@ async def _delete_mongo_db_database_initial( # pylint: disable=inconsistent-ret response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -501,8 +505,12 @@ async def _delete_mongo_db_database_initial( # pylint: disable=inconsistent-ret ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_mongo_db_database( @@ -530,7 +538,7 @@ async def begin_delete_mongo_db_database( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_mongo_db_database_initial( # type: ignore + raw_result = await self._delete_mongo_db_database_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -540,6 +548,7 @@ async def begin_delete_mongo_db_database( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -602,7 +611,6 @@ async def get_mongo_db_database_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -616,7 +624,7 @@ async def get_mongo_db_database_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -630,7 +638,7 @@ async def _update_mongo_db_database_throughput_initial( # pylint: disable=name- database_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -644,7 +652,7 @@ async def _update_mongo_db_database_throughput_initial( # pylint: disable=name- api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -666,10 +674,10 @@ async def _update_mongo_db_database_throughput_initial( # pylint: disable=name- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -677,20 +685,22 @@ async def _update_mongo_db_database_throughput_initial( # pylint: disable=name- response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -814,10 +824,11 @@ async def begin_update_mongo_db_database_throughput( # pylint: disable=name-too params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -841,7 +852,7 @@ def get_long_running_output(pipeline_response): async def _migrate_mongo_db_database_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -854,7 +865,7 @@ async def _migrate_mongo_db_database_to_autoscale_initial( # pylint: disable=na _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_mongo_db_database_to_autoscale_request( resource_group_name=resource_group_name, @@ -865,10 +876,10 @@ async def _migrate_mongo_db_database_to_autoscale_initial( # pylint: disable=na headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -876,20 +887,22 @@ async def _migrate_mongo_db_database_to_autoscale_initial( # pylint: disable=na response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -933,10 +946,11 @@ async def begin_migrate_mongo_db_database_to_autoscale( # pylint: disable=name- params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -960,7 +974,7 @@ def get_long_running_output(pipeline_response): async def _migrate_mongo_db_database_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -973,7 +987,7 @@ async def _migrate_mongo_db_database_to_manual_throughput_initial( # pylint: di _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_mongo_db_database_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -984,10 +998,10 @@ async def _migrate_mongo_db_database_to_manual_throughput_initial( # pylint: di headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -995,20 +1009,22 @@ async def _migrate_mongo_db_database_to_manual_throughput_initial( # pylint: di response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1052,10 +1068,11 @@ async def begin_migrate_mongo_db_database_to_manual_throughput( # pylint: disab params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1122,7 +1139,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1138,7 +1154,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1210,7 +1225,6 @@ async def get_mongo_db_collection( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1224,7 +1238,7 @@ async def get_mongo_db_collection( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("MongoDBCollectionGetResults", pipeline_response) + deserialized = self._deserialize("MongoDBCollectionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1239,7 +1253,7 @@ async def _create_update_mongo_db_collection_initial( # pylint: disable=name-to collection_name: str, create_update_mongo_db_collection_parameters: Union[_models.MongoDBCollectionCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.MongoDBCollectionGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1253,7 +1267,7 @@ async def _create_update_mongo_db_collection_initial( # pylint: disable=name-to api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.MongoDBCollectionGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1278,10 +1292,10 @@ async def _create_update_mongo_db_collection_initial( # pylint: disable=name-to headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1289,20 +1303,22 @@ async def _create_update_mongo_db_collection_initial( # pylint: disable=name-to response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("MongoDBCollectionGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1436,10 +1452,11 @@ async def begin_create_update_mongo_db_collection( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("MongoDBCollectionGetResults", pipeline_response) + deserialized = self._deserialize("MongoDBCollectionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1461,9 +1478,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_mongo_db_collection_initial( # pylint: disable=inconsistent-return-statements + async def _delete_mongo_db_collection_initial( self, resource_group_name: str, account_name: str, database_name: str, collection_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1476,7 +1493,7 @@ async def _delete_mongo_db_collection_initial( # pylint: disable=inconsistent-r _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_mongo_db_collection_request( resource_group_name=resource_group_name, @@ -1488,10 +1505,10 @@ async def _delete_mongo_db_collection_initial( # pylint: disable=inconsistent-r headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1499,6 +1516,10 @@ async def _delete_mongo_db_collection_initial( # pylint: disable=inconsistent-r response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -1509,8 +1530,12 @@ async def _delete_mongo_db_collection_initial( # pylint: disable=inconsistent-r ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_mongo_db_collection( @@ -1540,7 +1565,7 @@ async def begin_delete_mongo_db_collection( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_mongo_db_collection_initial( # type: ignore + raw_result = await self._delete_mongo_db_collection_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -1551,6 +1576,7 @@ async def begin_delete_mongo_db_collection( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1616,7 +1642,6 @@ async def get_mongo_db_collection_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1630,7 +1655,7 @@ async def get_mongo_db_collection_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1645,7 +1670,7 @@ async def _update_mongo_db_collection_throughput_initial( # pylint: disable=nam collection_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1659,7 +1684,7 @@ async def _update_mongo_db_collection_throughput_initial( # pylint: disable=nam api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1682,10 +1707,10 @@ async def _update_mongo_db_collection_throughput_initial( # pylint: disable=nam headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1693,20 +1718,22 @@ async def _update_mongo_db_collection_throughput_initial( # pylint: disable=nam response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1840,10 +1867,11 @@ async def begin_update_mongo_db_collection_throughput( # pylint: disable=name-t params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1867,7 +1895,7 @@ def get_long_running_output(pipeline_response): async def _migrate_mongo_db_collection_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, collection_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1880,7 +1908,7 @@ async def _migrate_mongo_db_collection_to_autoscale_initial( # pylint: disable= _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_mongo_db_collection_to_autoscale_request( resource_group_name=resource_group_name, @@ -1892,10 +1920,10 @@ async def _migrate_mongo_db_collection_to_autoscale_initial( # pylint: disable= headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1903,20 +1931,22 @@ async def _migrate_mongo_db_collection_to_autoscale_initial( # pylint: disable= response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1963,10 +1993,11 @@ async def begin_migrate_mongo_db_collection_to_autoscale( # pylint: disable=nam params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1990,7 +2021,7 @@ def get_long_running_output(pipeline_response): async def _migrate_mongo_db_collection_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, collection_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2003,7 +2034,7 @@ async def _migrate_mongo_db_collection_to_manual_throughput_initial( # pylint: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_mongo_db_collection_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -2015,10 +2046,10 @@ async def _migrate_mongo_db_collection_to_manual_throughput_initial( # pylint: headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2026,20 +2057,22 @@ async def _migrate_mongo_db_collection_to_manual_throughput_initial( # pylint: response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2086,10 +2119,11 @@ async def begin_migrate_mongo_db_collection_to_manual_throughput( # pylint: dis params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2152,7 +2186,6 @@ async def get_mongo_role_definition( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2166,7 +2199,7 @@ async def get_mongo_role_definition( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("MongoRoleDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("MongoRoleDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2182,7 +2215,7 @@ async def _create_update_mongo_role_definition_initial( # pylint: disable=name- _models.MongoRoleDefinitionCreateUpdateParameters, IO[bytes] ], **kwargs: Any - ) -> Optional[_models.MongoRoleDefinitionGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2196,7 +2229,7 @@ async def _create_update_mongo_role_definition_initial( # pylint: disable=name- api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.MongoRoleDefinitionGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2220,10 +2253,10 @@ async def _create_update_mongo_role_definition_initial( # pylint: disable=name- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2231,12 +2264,14 @@ async def _create_update_mongo_role_definition_initial( # pylint: disable=name- response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("MongoRoleDefinitionGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2363,10 +2398,11 @@ async def begin_create_update_mongo_role_definition( # pylint: disable=name-too params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("MongoRoleDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("MongoRoleDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2388,9 +2424,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_mongo_role_definition_initial( # pylint: disable=inconsistent-return-statements + async def _delete_mongo_role_definition_initial( self, mongo_role_definition_id: str, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2403,7 +2439,7 @@ async def _delete_mongo_role_definition_initial( # pylint: disable=inconsistent _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_mongo_role_definition_request( mongo_role_definition_id=mongo_role_definition_id, @@ -2414,10 +2450,10 @@ async def _delete_mongo_role_definition_initial( # pylint: disable=inconsistent headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2425,11 +2461,19 @@ async def _delete_mongo_role_definition_initial( # pylint: disable=inconsistent response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_mongo_role_definition( @@ -2457,7 +2501,7 @@ async def begin_delete_mongo_role_definition( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_mongo_role_definition_initial( # type: ignore + raw_result = await self._delete_mongo_role_definition_initial( mongo_role_definition_id=mongo_role_definition_id, resource_group_name=resource_group_name, account_name=account_name, @@ -2467,6 +2511,7 @@ async def begin_delete_mongo_role_definition( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2530,7 +2575,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -2546,7 +2590,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -2616,7 +2659,6 @@ async def get_mongo_user_definition( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2630,7 +2672,7 @@ async def get_mongo_user_definition( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("MongoUserDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("MongoUserDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2646,7 +2688,7 @@ async def _create_update_mongo_user_definition_initial( # pylint: disable=name- _models.MongoUserDefinitionCreateUpdateParameters, IO[bytes] ], **kwargs: Any - ) -> Optional[_models.MongoUserDefinitionGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2660,7 +2702,7 @@ async def _create_update_mongo_user_definition_initial( # pylint: disable=name- api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.MongoUserDefinitionGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2684,10 +2726,10 @@ async def _create_update_mongo_user_definition_initial( # pylint: disable=name- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2695,12 +2737,14 @@ async def _create_update_mongo_user_definition_initial( # pylint: disable=name- response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("MongoUserDefinitionGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2827,10 +2871,11 @@ async def begin_create_update_mongo_user_definition( # pylint: disable=name-too params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("MongoUserDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("MongoUserDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2852,9 +2897,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_mongo_user_definition_initial( # pylint: disable=inconsistent-return-statements + async def _delete_mongo_user_definition_initial( self, mongo_user_definition_id: str, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2867,7 +2912,7 @@ async def _delete_mongo_user_definition_initial( # pylint: disable=inconsistent _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_mongo_user_definition_request( mongo_user_definition_id=mongo_user_definition_id, @@ -2878,10 +2923,10 @@ async def _delete_mongo_user_definition_initial( # pylint: disable=inconsistent headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2889,11 +2934,19 @@ async def _delete_mongo_user_definition_initial( # pylint: disable=inconsistent response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_mongo_user_definition( @@ -2921,7 +2974,7 @@ async def begin_delete_mongo_user_definition( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_mongo_user_definition_initial( # type: ignore + raw_result = await self._delete_mongo_user_definition_initial( mongo_user_definition_id=mongo_user_definition_id, resource_group_name=resource_group_name, account_name=account_name, @@ -2931,6 +2984,7 @@ async def begin_delete_mongo_user_definition( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2994,7 +3048,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -3010,7 +3063,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -3047,7 +3099,7 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na collection_name: str, location: Union[_models.ContinuousBackupRestoreLocation, IO[bytes]], **kwargs: Any - ) -> Optional[_models.BackupInformation]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3061,7 +3113,7 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.BackupInformation]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -3084,10 +3136,10 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3095,12 +3147,14 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3227,10 +3281,11 @@ async def begin_retrieve_continuous_backup_information( # pylint: disable=name- params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = self._deserialize("BackupInformation", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_notebook_workspaces_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_notebook_workspaces_operations.py index e83410263e34..bbd7e6ab0731 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_notebook_workspaces_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_notebook_workspaces_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._notebook_workspaces_operations import ( build_create_or_update_request, build_delete_request, @@ -109,7 +109,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -125,7 +124,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -200,7 +198,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -215,7 +212,7 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NotebookWorkspace", pipeline_response) + deserialized = self._deserialize("NotebookWorkspace", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -229,7 +226,7 @@ async def _create_or_update_initial( notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], notebook_create_update_parameters: Union[_models.NotebookWorkspaceCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> _models.NotebookWorkspace: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -243,7 +240,7 @@ async def _create_or_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.NotebookWorkspace] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -265,10 +262,10 @@ async def _create_or_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -276,11 +273,15 @@ async def _create_or_update_initial( response = pipeline_response.http_response if response.status_code not in [200]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NotebookWorkspace", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -405,10 +406,11 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("NotebookWorkspace", pipeline_response) + deserialized = self._deserialize("NotebookWorkspace", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -430,13 +432,13 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, account_name: str, notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -449,7 +451,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -460,10 +462,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -471,12 +473,20 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -509,7 +519,7 @@ async def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, account_name=account_name, notebook_workspace_name=notebook_workspace_name, @@ -519,6 +529,7 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -585,7 +596,6 @@ async def list_connection_info( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -600,20 +610,20 @@ async def list_connection_info( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NotebookWorkspaceConnectionInfoResult", pipeline_response) + deserialized = self._deserialize("NotebookWorkspaceConnectionInfoResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - async def _regenerate_auth_token_initial( # pylint: disable=inconsistent-return-statements + async def _regenerate_auth_token_initial( self, resource_group_name: str, account_name: str, notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -626,7 +636,7 @@ async def _regenerate_auth_token_initial( # pylint: disable=inconsistent-return _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_regenerate_auth_token_request( resource_group_name=resource_group_name, @@ -637,10 +647,10 @@ async def _regenerate_auth_token_initial( # pylint: disable=inconsistent-return headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -648,12 +658,20 @@ async def _regenerate_auth_token_initial( # pylint: disable=inconsistent-return response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_regenerate_auth_token( @@ -686,7 +704,7 @@ async def begin_regenerate_auth_token( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._regenerate_auth_token_initial( # type: ignore + raw_result = await self._regenerate_auth_token_initial( resource_group_name=resource_group_name, account_name=account_name, notebook_workspace_name=notebook_workspace_name, @@ -696,6 +714,7 @@ async def begin_regenerate_auth_token( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -717,13 +736,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _start_initial( # pylint: disable=inconsistent-return-statements + async def _start_initial( self, resource_group_name: str, account_name: str, notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -736,7 +755,7 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -747,10 +766,10 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -758,12 +777,20 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_start( @@ -796,7 +823,7 @@ async def begin_start( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._start_initial( # type: ignore + raw_result = await self._start_initial( resource_group_name=resource_group_name, account_name=account_name, notebook_workspace_name=notebook_workspace_name, @@ -806,6 +833,7 @@ async def begin_start( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_operations.py index 6270309b2b1d..374941add85d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._operations import build_list_request if sys.version_info >= (3, 9): @@ -87,7 +85,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -103,7 +100,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_partition_key_range_id_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_partition_key_range_id_operations.py index 452f3560970b..e5719f865a4c 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_partition_key_range_id_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_partition_key_range_id_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._partition_key_range_id_operations import build_list_metrics_request if sys.version_info >= (3, 9): @@ -118,7 +116,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -134,7 +131,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_partition_key_range_id_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_partition_key_range_id_region_operations.py index 5413cd15ad21..81cab31b89d3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_partition_key_range_id_region_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_partition_key_range_id_region_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._partition_key_range_id_region_operations import build_list_metrics_request if sys.version_info >= (3, 9): @@ -123,7 +121,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -139,7 +136,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_operations.py index 73cdcdb5061b..9d20844c2176 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._percentile_operations import build_list_metrics_request if sys.version_info >= (3, 9): @@ -103,7 +101,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -119,7 +116,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_source_target_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_source_target_operations.py index 032ba19b5f1e..7703bb570111 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_source_target_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_source_target_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._percentile_source_target_operations import build_list_metrics_request if sys.version_info >= (3, 9): @@ -117,7 +115,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -133,7 +130,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_target_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_target_operations.py index 3d57f4b07e48..a3b1ae14db50 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_target_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_percentile_target_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._percentile_target_operations import build_list_metrics_request if sys.version_info >= (3, 9): @@ -107,7 +105,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -123,7 +120,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_private_endpoint_connections_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_private_endpoint_connections_operations.py index 74b12f3e92f9..972d2bd2c7d7 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_private_endpoint_connections_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_endpoint_connections_operations import ( build_create_or_update_request, build_delete_request, @@ -108,7 +108,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -124,7 +123,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -193,7 +191,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -207,7 +204,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -221,7 +218,7 @@ async def _create_or_update_initial( private_endpoint_connection_name: str, parameters: Union[_models.PrivateEndpointConnection, IO[bytes]], **kwargs: Any - ) -> Optional[_models.PrivateEndpointConnection]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -235,7 +232,7 @@ async def _create_or_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.PrivateEndpointConnection]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -257,10 +254,10 @@ async def _create_or_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -268,13 +265,15 @@ async def _create_or_update_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponseAutoGenerated, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -393,10 +392,11 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -418,9 +418,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -433,7 +433,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -444,10 +444,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -455,12 +455,20 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponseAutoGenerated, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -488,7 +496,7 @@ async def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, account_name=account_name, private_endpoint_connection_name=private_endpoint_connection_name, @@ -498,6 +506,7 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_private_link_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_private_link_resources_operations.py index 0bf50d2c22bc..e89692eb682a 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_private_link_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_private_link_resources_operations.py @@ -20,15 +20,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_link_resources_operations import build_get_request, build_list_by_database_account_request if sys.version_info >= (3, 9): @@ -99,7 +97,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -115,7 +112,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -184,7 +180,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -198,7 +193,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateLinkResource", pipeline_response) + deserialized = self._deserialize("PrivateLinkResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_database_accounts_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_database_accounts_operations.py index 9dd7a454acdf..74d32712b83d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_database_accounts_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_database_accounts_operations.py @@ -20,15 +20,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_database_accounts_operations import ( build_get_by_location_request, build_list_by_location_request, @@ -103,7 +101,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -119,7 +116,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -182,7 +178,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -198,7 +193,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -265,7 +259,6 @@ async def get_by_location( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -279,7 +272,7 @@ async def get_by_location( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("RestorableDatabaseAccountGetResult", pipeline_response) + deserialized = self._deserialize("RestorableDatabaseAccountGetResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_databases_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_databases_operations.py index 9e2befca1c73..e20776bddcbc 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_databases_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_databases_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_gremlin_databases_operations import build_list_request if sys.version_info >= (3, 9): @@ -102,7 +100,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -118,7 +115,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_graphs_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_graphs_operations.py index cc2db438123f..9727d2e9c266 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_graphs_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_graphs_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_gremlin_graphs_operations import build_list_request if sys.version_info >= (3, 9): @@ -117,7 +115,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -133,7 +130,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_resources_operations.py index 477549082bbe..bed6e396aa7d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_gremlin_resources_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_gremlin_resources_operations import build_list_request if sys.version_info >= (3, 9): @@ -115,7 +113,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -131,7 +128,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_collections_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_collections_operations.py index 61e023c29716..53fbdd05a929 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_collections_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_collections_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_mongodb_collections_operations import build_list_request if sys.version_info >= (3, 9): @@ -117,7 +115,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -133,7 +130,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_databases_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_databases_operations.py index 4cc2ea3dfc40..e8696dcf1c04 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_databases_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_databases_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_mongodb_databases_operations import build_list_request if sys.version_info >= (3, 9): @@ -102,7 +100,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -118,7 +115,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_resources_operations.py index cf2bb998cd0f..3bd82fca524a 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_mongodb_resources_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_mongodb_resources_operations import build_list_request if sys.version_info >= (3, 9): @@ -115,7 +113,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -131,7 +128,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_containers_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_containers_operations.py index 590cdfbe33bc..38f893922e0a 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_containers_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_containers_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_sql_containers_operations import build_list_request if sys.version_info >= (3, 9): @@ -116,7 +114,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -132,7 +129,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_databases_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_databases_operations.py index 3fbbda935331..bfb7c3f659ca 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_databases_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_databases_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_sql_databases_operations import build_list_request if sys.version_info >= (3, 9): @@ -102,7 +100,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -118,7 +115,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_resources_operations.py index e4d01217a44a..185894260213 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_sql_resources_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_sql_resources_operations import build_list_request if sys.version_info >= (3, 9): @@ -115,7 +113,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -131,7 +128,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_table_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_table_resources_operations.py index 00db572b892b..45f0907202c1 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_table_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_table_resources_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_table_resources_operations import build_list_request if sys.version_info >= (3, 9): @@ -114,7 +112,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -130,7 +127,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_tables_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_tables_operations.py index db4b59ceeb3f..868a6a90b922 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_tables_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_restorable_tables_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._restorable_tables_operations import build_list_request if sys.version_info >= (3, 9): @@ -112,7 +110,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -128,7 +125,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_service_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_service_operations.py index ec32140a6c36..b48c44726384 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_service_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_service_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._service_operations import ( build_create_request, build_delete_request, @@ -106,7 +106,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -122,7 +121,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -158,7 +156,7 @@ async def _create_initial( service_name: str, create_update_parameters: Union[_models.ServiceResourceCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ServiceResource]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -172,7 +170,7 @@ async def _create_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ServiceResource]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -194,10 +192,10 @@ async def _create_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -205,20 +203,22 @@ async def _create_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ServiceResource", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -336,10 +336,11 @@ async def begin_create( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ServiceResource", pipeline_response) + deserialized = self._deserialize("ServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -401,7 +402,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -415,16 +415,16 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ServiceResource", pipeline_response) + deserialized = self._deserialize("ServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, account_name: str, service_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -437,7 +437,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -448,10 +448,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -459,6 +459,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -469,8 +473,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -498,7 +506,7 @@ async def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, account_name=account_name, service_name=service_name, @@ -508,6 +516,7 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_sql_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_sql_resources_operations.py index d2e3d6979322..67cafa788b06 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_sql_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_sql_resources_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._sql_resources_operations import ( build_create_update_client_encryption_key_request, build_create_update_sql_container_request, @@ -144,7 +144,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -160,7 +159,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -230,7 +228,6 @@ async def get_sql_database( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -244,7 +241,7 @@ async def get_sql_database( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("SqlDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -258,7 +255,7 @@ async def _create_update_sql_database_initial( database_name: str, create_update_sql_database_parameters: Union[_models.SqlDatabaseCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.SqlDatabaseGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -272,7 +269,7 @@ async def _create_update_sql_database_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlDatabaseGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -294,10 +291,10 @@ async def _create_update_sql_database_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -305,20 +302,22 @@ async def _create_update_sql_database_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("SqlDatabaseGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -438,10 +437,11 @@ async def begin_create_update_sql_database( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("SqlDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -463,9 +463,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_sql_database_initial( # pylint: disable=inconsistent-return-statements + async def _delete_sql_database_initial( self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -478,7 +478,7 @@ async def _delete_sql_database_initial( # pylint: disable=inconsistent-return-s _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_database_request( resource_group_name=resource_group_name, @@ -489,10 +489,10 @@ async def _delete_sql_database_initial( # pylint: disable=inconsistent-return-s headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -500,6 +500,10 @@ async def _delete_sql_database_initial( # pylint: disable=inconsistent-return-s response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -510,8 +514,12 @@ async def _delete_sql_database_initial( # pylint: disable=inconsistent-return-s ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_sql_database( @@ -539,7 +547,7 @@ async def begin_delete_sql_database( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_sql_database_initial( # type: ignore + raw_result = await self._delete_sql_database_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -549,6 +557,7 @@ async def begin_delete_sql_database( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -611,7 +620,6 @@ async def get_sql_database_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -625,7 +633,7 @@ async def get_sql_database_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -639,7 +647,7 @@ async def _update_sql_database_throughput_initial( database_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -653,7 +661,7 @@ async def _update_sql_database_throughput_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -675,10 +683,10 @@ async def _update_sql_database_throughput_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -686,20 +694,22 @@ async def _update_sql_database_throughput_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -823,10 +833,11 @@ async def begin_update_sql_database_throughput( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -850,7 +861,7 @@ def get_long_running_output(pipeline_response): async def _migrate_sql_database_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -863,7 +874,7 @@ async def _migrate_sql_database_to_autoscale_initial( # pylint: disable=name-to _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_sql_database_to_autoscale_request( resource_group_name=resource_group_name, @@ -874,10 +885,10 @@ async def _migrate_sql_database_to_autoscale_initial( # pylint: disable=name-to headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -885,20 +896,22 @@ async def _migrate_sql_database_to_autoscale_initial( # pylint: disable=name-to response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -942,10 +955,11 @@ async def begin_migrate_sql_database_to_autoscale( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -969,7 +983,7 @@ def get_long_running_output(pipeline_response): async def _migrate_sql_database_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -982,7 +996,7 @@ async def _migrate_sql_database_to_manual_throughput_initial( # pylint: disable _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_sql_database_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -993,10 +1007,10 @@ async def _migrate_sql_database_to_manual_throughput_initial( # pylint: disable headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1004,20 +1018,22 @@ async def _migrate_sql_database_to_manual_throughput_initial( # pylint: disable response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1061,10 +1077,11 @@ async def begin_migrate_sql_database_to_manual_throughput( # pylint: disable=na params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1131,7 +1148,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1147,7 +1163,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1219,7 +1234,6 @@ async def get_sql_container( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1233,7 +1247,7 @@ async def get_sql_container( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlContainerGetResults", pipeline_response) + deserialized = self._deserialize("SqlContainerGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1248,7 +1262,7 @@ async def _create_update_sql_container_initial( container_name: str, create_update_sql_container_parameters: Union[_models.SqlContainerCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.SqlContainerGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1262,7 +1276,7 @@ async def _create_update_sql_container_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlContainerGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1285,10 +1299,10 @@ async def _create_update_sql_container_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1296,20 +1310,22 @@ async def _create_update_sql_container_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("SqlContainerGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1439,10 +1455,11 @@ async def begin_create_update_sql_container( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlContainerGetResults", pipeline_response) + deserialized = self._deserialize("SqlContainerGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1464,9 +1481,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_sql_container_initial( # pylint: disable=inconsistent-return-statements + async def _delete_sql_container_initial( self, resource_group_name: str, account_name: str, database_name: str, container_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1479,7 +1496,7 @@ async def _delete_sql_container_initial( # pylint: disable=inconsistent-return- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_container_request( resource_group_name=resource_group_name, @@ -1491,10 +1508,10 @@ async def _delete_sql_container_initial( # pylint: disable=inconsistent-return- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1502,6 +1519,10 @@ async def _delete_sql_container_initial( # pylint: disable=inconsistent-return- response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -1512,8 +1533,12 @@ async def _delete_sql_container_initial( # pylint: disable=inconsistent-return- ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_sql_container( @@ -1543,7 +1568,7 @@ async def begin_delete_sql_container( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_sql_container_initial( # type: ignore + raw_result = await self._delete_sql_container_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -1554,6 +1579,7 @@ async def begin_delete_sql_container( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1619,7 +1645,6 @@ async def get_sql_container_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1633,7 +1658,7 @@ async def get_sql_container_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1648,7 +1673,7 @@ async def _update_sql_container_throughput_initial( container_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1662,7 +1687,7 @@ async def _update_sql_container_throughput_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1685,10 +1710,10 @@ async def _update_sql_container_throughput_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1696,20 +1721,22 @@ async def _update_sql_container_throughput_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1843,10 +1870,11 @@ async def begin_update_sql_container_throughput( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1870,7 +1898,7 @@ def get_long_running_output(pipeline_response): async def _migrate_sql_container_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, container_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1883,7 +1911,7 @@ async def _migrate_sql_container_to_autoscale_initial( # pylint: disable=name-t _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_sql_container_to_autoscale_request( resource_group_name=resource_group_name, @@ -1895,10 +1923,10 @@ async def _migrate_sql_container_to_autoscale_initial( # pylint: disable=name-t headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1906,20 +1934,22 @@ async def _migrate_sql_container_to_autoscale_initial( # pylint: disable=name-t response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1966,10 +1996,11 @@ async def begin_migrate_sql_container_to_autoscale( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1993,7 +2024,7 @@ def get_long_running_output(pipeline_response): async def _migrate_sql_container_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, container_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2006,7 +2037,7 @@ async def _migrate_sql_container_to_manual_throughput_initial( # pylint: disabl _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_sql_container_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -2018,10 +2049,10 @@ async def _migrate_sql_container_to_manual_throughput_initial( # pylint: disabl headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2029,20 +2060,22 @@ async def _migrate_sql_container_to_manual_throughput_initial( # pylint: disabl response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2089,10 +2122,11 @@ async def begin_migrate_sql_container_to_manual_throughput( # pylint: disable=n params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2159,7 +2193,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -2175,7 +2208,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -2252,7 +2284,6 @@ async def get_client_encryption_key( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2266,7 +2297,7 @@ async def get_client_encryption_key( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ClientEncryptionKeyGetResults", pipeline_response) + deserialized = self._deserialize("ClientEncryptionKeyGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2283,7 +2314,7 @@ async def _create_update_client_encryption_key_initial( # pylint: disable=name- _models.ClientEncryptionKeyCreateUpdateParameters, IO[bytes] ], **kwargs: Any - ) -> Optional[_models.ClientEncryptionKeyGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2297,7 +2328,7 @@ async def _create_update_client_encryption_key_initial( # pylint: disable=name- api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ClientEncryptionKeyGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2322,10 +2353,10 @@ async def _create_update_client_encryption_key_initial( # pylint: disable=name- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2333,20 +2364,22 @@ async def _create_update_client_encryption_key_initial( # pylint: disable=name- response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ClientEncryptionKeyGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2485,10 +2518,11 @@ async def begin_create_update_client_encryption_key( # pylint: disable=name-too params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ClientEncryptionKeyGetResults", pipeline_response) + deserialized = self._deserialize("ClientEncryptionKeyGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2558,7 +2592,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -2574,7 +2607,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -2655,7 +2687,6 @@ async def get_sql_stored_procedure( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2669,7 +2700,7 @@ async def get_sql_stored_procedure( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlStoredProcedureGetResults", pipeline_response) + deserialized = self._deserialize("SqlStoredProcedureGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2687,7 +2718,7 @@ async def _create_update_sql_stored_procedure_initial( # pylint: disable=name-t _models.SqlStoredProcedureCreateUpdateParameters, IO[bytes] ], **kwargs: Any - ) -> Optional[_models.SqlStoredProcedureGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2701,7 +2732,7 @@ async def _create_update_sql_stored_procedure_initial( # pylint: disable=name-t api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlStoredProcedureGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2727,10 +2758,10 @@ async def _create_update_sql_stored_procedure_initial( # pylint: disable=name-t headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2738,20 +2769,22 @@ async def _create_update_sql_stored_procedure_initial( # pylint: disable=name-t response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("SqlStoredProcedureGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2897,10 +2930,11 @@ async def begin_create_update_sql_stored_procedure( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlStoredProcedureGetResults", pipeline_response) + deserialized = self._deserialize("SqlStoredProcedureGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2922,7 +2956,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent-return-statements + async def _delete_sql_stored_procedure_initial( self, resource_group_name: str, account_name: str, @@ -2930,7 +2964,7 @@ async def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent- container_name: str, stored_procedure_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2943,7 +2977,7 @@ async def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_stored_procedure_request( resource_group_name=resource_group_name, @@ -2956,10 +2990,10 @@ async def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2967,6 +3001,10 @@ async def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent- response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -2977,8 +3015,12 @@ async def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent- ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_sql_stored_procedure( @@ -3016,7 +3058,7 @@ async def begin_delete_sql_stored_procedure( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_sql_stored_procedure_initial( # type: ignore + raw_result = await self._delete_sql_stored_procedure_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -3028,6 +3070,7 @@ async def begin_delete_sql_stored_procedure( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -3097,7 +3140,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -3113,7 +3155,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -3194,7 +3235,6 @@ async def get_sql_user_defined_function( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3208,7 +3248,7 @@ async def get_sql_user_defined_function( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlUserDefinedFunctionGetResults", pipeline_response) + deserialized = self._deserialize("SqlUserDefinedFunctionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3226,7 +3266,7 @@ async def _create_update_sql_user_defined_function_initial( # pylint: disable=n _models.SqlUserDefinedFunctionCreateUpdateParameters, IO[bytes] ], **kwargs: Any - ) -> Optional[_models.SqlUserDefinedFunctionGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3240,7 +3280,7 @@ async def _create_update_sql_user_defined_function_initial( # pylint: disable=n api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlUserDefinedFunctionGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -3266,10 +3306,10 @@ async def _create_update_sql_user_defined_function_initial( # pylint: disable=n headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3277,20 +3317,22 @@ async def _create_update_sql_user_defined_function_initial( # pylint: disable=n response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("SqlUserDefinedFunctionGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3436,10 +3478,11 @@ async def begin_create_update_sql_user_defined_function( # pylint: disable=name params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlUserDefinedFunctionGetResults", pipeline_response) + deserialized = self._deserialize("SqlUserDefinedFunctionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -3461,7 +3504,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_sql_user_defined_function_initial( # pylint: disable=inconsistent-return-statements,name-too-long + async def _delete_sql_user_defined_function_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, @@ -3469,7 +3512,7 @@ async def _delete_sql_user_defined_function_initial( # pylint: disable=inconsis container_name: str, user_defined_function_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3482,7 +3525,7 @@ async def _delete_sql_user_defined_function_initial( # pylint: disable=inconsis _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_user_defined_function_request( resource_group_name=resource_group_name, @@ -3495,10 +3538,10 @@ async def _delete_sql_user_defined_function_initial( # pylint: disable=inconsis headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3506,6 +3549,10 @@ async def _delete_sql_user_defined_function_initial( # pylint: disable=inconsis response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -3516,8 +3563,12 @@ async def _delete_sql_user_defined_function_initial( # pylint: disable=inconsis ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_sql_user_defined_function( @@ -3555,7 +3606,7 @@ async def begin_delete_sql_user_defined_function( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_sql_user_defined_function_initial( # type: ignore + raw_result = await self._delete_sql_user_defined_function_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -3567,6 +3618,7 @@ async def begin_delete_sql_user_defined_function( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -3636,7 +3688,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -3652,7 +3703,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -3733,7 +3783,6 @@ async def get_sql_trigger( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3747,7 +3796,7 @@ async def get_sql_trigger( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlTriggerGetResults", pipeline_response) + deserialized = self._deserialize("SqlTriggerGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3763,7 +3812,7 @@ async def _create_update_sql_trigger_initial( trigger_name: str, create_update_sql_trigger_parameters: Union[_models.SqlTriggerCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.SqlTriggerGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3777,7 +3826,7 @@ async def _create_update_sql_trigger_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlTriggerGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -3801,10 +3850,10 @@ async def _create_update_sql_trigger_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3812,20 +3861,22 @@ async def _create_update_sql_trigger_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("SqlTriggerGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3965,10 +4016,11 @@ async def begin_create_update_sql_trigger( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlTriggerGetResults", pipeline_response) + deserialized = self._deserialize("SqlTriggerGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -3990,7 +4042,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-statements + async def _delete_sql_trigger_initial( self, resource_group_name: str, account_name: str, @@ -3998,7 +4050,7 @@ async def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-st container_name: str, trigger_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4011,7 +4063,7 @@ async def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-st _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_trigger_request( resource_group_name=resource_group_name, @@ -4024,10 +4076,10 @@ async def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-st headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -4035,6 +4087,10 @@ async def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-st response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -4045,8 +4101,12 @@ async def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-st ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_sql_trigger( @@ -4084,7 +4144,7 @@ async def begin_delete_sql_trigger( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_sql_trigger_initial( # type: ignore + raw_result = await self._delete_sql_trigger_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -4096,6 +4156,7 @@ async def begin_delete_sql_trigger( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -4157,7 +4218,6 @@ async def get_sql_role_definition( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -4171,7 +4231,7 @@ async def get_sql_role_definition( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlRoleDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("SqlRoleDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -4185,7 +4245,7 @@ async def _create_update_sql_role_definition_initial( # pylint: disable=name-to account_name: str, create_update_sql_role_definition_parameters: Union[_models.SqlRoleDefinitionCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.SqlRoleDefinitionGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4199,7 +4259,7 @@ async def _create_update_sql_role_definition_initial( # pylint: disable=name-to api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlRoleDefinitionGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -4223,10 +4283,10 @@ async def _create_update_sql_role_definition_initial( # pylint: disable=name-to headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -4234,12 +4294,14 @@ async def _create_update_sql_role_definition_initial( # pylint: disable=name-to response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("SqlRoleDefinitionGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -4364,10 +4426,11 @@ async def begin_create_update_sql_role_definition( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlRoleDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("SqlRoleDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -4389,9 +4452,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_sql_role_definition_initial( # pylint: disable=inconsistent-return-statements + async def _delete_sql_role_definition_initial( self, role_definition_id: str, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4404,7 +4467,7 @@ async def _delete_sql_role_definition_initial( # pylint: disable=inconsistent-r _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_role_definition_request( role_definition_id=role_definition_id, @@ -4415,10 +4478,10 @@ async def _delete_sql_role_definition_initial( # pylint: disable=inconsistent-r headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -4426,11 +4489,19 @@ async def _delete_sql_role_definition_initial( # pylint: disable=inconsistent-r response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_sql_role_definition( @@ -4458,7 +4529,7 @@ async def begin_delete_sql_role_definition( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_sql_role_definition_initial( # type: ignore + raw_result = await self._delete_sql_role_definition_initial( role_definition_id=role_definition_id, resource_group_name=resource_group_name, account_name=account_name, @@ -4468,6 +4539,7 @@ async def begin_delete_sql_role_definition( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -4531,7 +4603,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -4547,7 +4618,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -4616,7 +4686,6 @@ async def get_sql_role_assignment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -4630,7 +4699,7 @@ async def get_sql_role_assignment( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlRoleAssignmentGetResults", pipeline_response) + deserialized = self._deserialize("SqlRoleAssignmentGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -4644,7 +4713,7 @@ async def _create_update_sql_role_assignment_initial( # pylint: disable=name-to account_name: str, create_update_sql_role_assignment_parameters: Union[_models.SqlRoleAssignmentCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.SqlRoleAssignmentGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4658,7 +4727,7 @@ async def _create_update_sql_role_assignment_initial( # pylint: disable=name-to api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlRoleAssignmentGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -4682,10 +4751,10 @@ async def _create_update_sql_role_assignment_initial( # pylint: disable=name-to headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -4693,12 +4762,14 @@ async def _create_update_sql_role_assignment_initial( # pylint: disable=name-to response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("SqlRoleAssignmentGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -4823,10 +4894,11 @@ async def begin_create_update_sql_role_assignment( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlRoleAssignmentGetResults", pipeline_response) + deserialized = self._deserialize("SqlRoleAssignmentGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -4848,9 +4920,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_sql_role_assignment_initial( # pylint: disable=inconsistent-return-statements + async def _delete_sql_role_assignment_initial( self, role_assignment_id: str, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4863,7 +4935,7 @@ async def _delete_sql_role_assignment_initial( # pylint: disable=inconsistent-r _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_role_assignment_request( role_assignment_id=role_assignment_id, @@ -4874,10 +4946,10 @@ async def _delete_sql_role_assignment_initial( # pylint: disable=inconsistent-r headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -4885,11 +4957,19 @@ async def _delete_sql_role_assignment_initial( # pylint: disable=inconsistent-r response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_sql_role_assignment( @@ -4917,7 +4997,7 @@ async def begin_delete_sql_role_assignment( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_sql_role_assignment_initial( # type: ignore + raw_result = await self._delete_sql_role_assignment_initial( role_assignment_id=role_assignment_id, resource_group_name=resource_group_name, account_name=account_name, @@ -4927,6 +5007,7 @@ async def begin_delete_sql_role_assignment( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -4990,7 +5071,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -5006,7 +5086,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -5043,7 +5122,7 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na container_name: str, location: Union[_models.ContinuousBackupRestoreLocation, IO[bytes]], **kwargs: Any - ) -> Optional[_models.BackupInformation]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -5057,7 +5136,7 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.BackupInformation]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -5080,10 +5159,10 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -5091,12 +5170,14 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -5223,10 +5304,11 @@ async def begin_retrieve_continuous_backup_information( # pylint: disable=name- params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = self._deserialize("BackupInformation", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_table_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_table_resources_operations.py index 4e5a6af288e9..f1f8f4a797cc 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_table_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_table_resources_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._table_resources_operations import ( build_create_update_table_request, build_delete_table_request, @@ -111,7 +111,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -127,7 +126,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -196,7 +194,6 @@ async def get_table( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -210,7 +207,7 @@ async def get_table( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TableGetResults", pipeline_response) + deserialized = self._deserialize("TableGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -224,7 +221,7 @@ async def _create_update_table_initial( table_name: str, create_update_table_parameters: Union[_models.TableCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.TableGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -238,7 +235,7 @@ async def _create_update_table_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.TableGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -260,10 +257,10 @@ async def _create_update_table_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -271,20 +268,22 @@ async def _create_update_table_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("TableGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -403,10 +402,11 @@ async def begin_create_update_table( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TableGetResults", pipeline_response) + deserialized = self._deserialize("TableGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -428,9 +428,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_table_initial( # pylint: disable=inconsistent-return-statements + async def _delete_table_initial( self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -443,7 +443,7 @@ async def _delete_table_initial( # pylint: disable=inconsistent-return-statemen _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_table_request( resource_group_name=resource_group_name, @@ -454,10 +454,10 @@ async def _delete_table_initial( # pylint: disable=inconsistent-return-statemen headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -465,6 +465,10 @@ async def _delete_table_initial( # pylint: disable=inconsistent-return-statemen response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -475,8 +479,12 @@ async def _delete_table_initial( # pylint: disable=inconsistent-return-statemen ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete_table( @@ -504,7 +512,7 @@ async def begin_delete_table( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_table_initial( # type: ignore + raw_result = await self._delete_table_initial( resource_group_name=resource_group_name, account_name=account_name, table_name=table_name, @@ -514,6 +522,7 @@ async def begin_delete_table( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -576,7 +585,6 @@ async def get_table_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -590,7 +598,7 @@ async def get_table_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -604,7 +612,7 @@ async def _update_table_throughput_initial( table_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -618,7 +626,7 @@ async def _update_table_throughput_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -640,10 +648,10 @@ async def _update_table_throughput_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -651,20 +659,22 @@ async def _update_table_throughput_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -788,10 +798,11 @@ async def begin_update_table_throughput( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -815,7 +826,7 @@ def get_long_running_output(pipeline_response): async def _migrate_table_to_autoscale_initial( self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -828,7 +839,7 @@ async def _migrate_table_to_autoscale_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_table_to_autoscale_request( resource_group_name=resource_group_name, @@ -839,10 +850,10 @@ async def _migrate_table_to_autoscale_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -850,20 +861,22 @@ async def _migrate_table_to_autoscale_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -907,10 +920,11 @@ async def begin_migrate_table_to_autoscale( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -934,7 +948,7 @@ def get_long_running_output(pipeline_response): async def _migrate_table_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -947,7 +961,7 @@ async def _migrate_table_to_manual_throughput_initial( # pylint: disable=name-t _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_table_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -958,10 +972,10 @@ async def _migrate_table_to_manual_throughput_initial( # pylint: disable=name-t headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -969,20 +983,22 @@ async def _migrate_table_to_manual_throughput_initial( # pylint: disable=name-t response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1026,10 +1042,11 @@ async def begin_migrate_table_to_manual_throughput( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1058,7 +1075,7 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na table_name: str, location: Union[_models.ContinuousBackupRestoreLocation, IO[bytes]], **kwargs: Any - ) -> Optional[_models.BackupInformation]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1072,7 +1089,7 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.BackupInformation]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1094,10 +1111,10 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1105,12 +1122,14 @@ async def _retrieve_continuous_backup_information_initial( # pylint: disable=na response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1227,10 +1246,11 @@ async def begin_retrieve_continuous_backup_information( # pylint: disable=name- params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = self._deserialize("BackupInformation", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/__init__.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/__init__.py index 7471e35315ac..5c4187ac41f0 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/__init__.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/__init__.py @@ -81,7 +81,10 @@ from ._models_py3 import DatabaseAccountUpdateParameters from ._models_py3 import DatabaseAccountsListResult from ._models_py3 import DatabaseRestoreResource +from ._models_py3 import ErrorAdditionalInfo +from ._models_py3 import ErrorDetail from ._models_py3 import ErrorResponse +from ._models_py3 import ErrorResponseAutoGenerated from ._models_py3 import ExcludedPath from ._models_py3 import ExtendedResourceProperties from ._models_py3 import FailoverPolicies @@ -408,7 +411,10 @@ "DatabaseAccountUpdateParameters", "DatabaseAccountsListResult", "DatabaseRestoreResource", + "ErrorAdditionalInfo", + "ErrorDetail", "ErrorResponse", + "ErrorResponseAutoGenerated", "ExcludedPath", "ExtendedResourceProperties", "FailoverPolicies", diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/_cosmos_db_management_client_enums.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/_cosmos_db_management_client_enums.py index eeaa26ece864..9957c5fd5d73 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/_cosmos_db_management_client_enums.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/_cosmos_db_management_client_enums.py @@ -324,7 +324,7 @@ class RoleDefinitionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): class ServerVersion(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Describes the ServerVersion of an a MongoDB account.""" + """Describes the version of the MongoDB account.""" THREE2 = "3.2" THREE6 = "3.6" @@ -332,6 +332,7 @@ class ServerVersion(str, Enum, metaclass=CaseInsensitiveEnumMeta): FOUR2 = "4.2" FIVE0 = "5.0" SIX0 = "6.0" + SEVEN0 = "7.0" class ServiceSize(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/_models_py3.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/_models_py3.py index cdadb0d91c8f..efbc35393d04 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/_models_py3.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/_models_py3.py @@ -69,8 +69,8 @@ def __init__( class ApiProperties(_serialization.Model): """ApiProperties. - :ivar server_version: Describes the ServerVersion of an a MongoDB account. Known values are: - "3.2", "3.6", "4.0", "4.2", "5.0", and "6.0". + :ivar server_version: Describes the version of the MongoDB account. Known values are: "3.2", + "3.6", "4.0", "4.2", "5.0", "6.0", and "7.0". :vartype server_version: str or ~azure.mgmt.cosmosdb.models.ServerVersion """ @@ -80,8 +80,8 @@ class ApiProperties(_serialization.Model): def __init__(self, *, server_version: Optional[Union[str, "_models.ServerVersion"]] = None, **kwargs: Any) -> None: """ - :keyword server_version: Describes the ServerVersion of an a MongoDB account. Known values are: - "3.2", "3.6", "4.0", "4.2", "5.0", and "6.0". + :keyword server_version: Describes the version of the MongoDB account. Known values are: "3.2", + "3.6", "4.0", "4.2", "5.0", "6.0", and "7.0". :paramtype server_version: str or ~azure.mgmt.cosmosdb.models.ServerVersion """ super().__init__(**kwargs) @@ -2955,8 +2955,8 @@ class DatabaseAccountCreateUpdateParameters(ARMResourceProperties): # pylint: d 1.2. Cassandra and Mongo APIs only work with Tls 1.2. Known values are: "Tls", "Tls11", and "Tls12". :vartype minimal_tls_version: str or ~azure.mgmt.cosmosdb.models.MinimalTlsVersion - :ivar enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity Preview - feature on the account. + :ivar enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity feature on + the account. :vartype enable_burst_capacity: bool :ivar customer_managed_key_status: Indicates the status of the Customer Managed Key feature on the account. In case there are errors, the property provides troubleshooting guidance. @@ -3153,8 +3153,8 @@ def __init__( # pylint: disable=too-many-locals Tls 1.2. Cassandra and Mongo APIs only work with Tls 1.2. Known values are: "Tls", "Tls11", and "Tls12". :paramtype minimal_tls_version: str or ~azure.mgmt.cosmosdb.models.MinimalTlsVersion - :keyword enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity Preview - feature on the account. + :keyword enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity feature + on the account. :paramtype enable_burst_capacity: bool :keyword customer_managed_key_status: Indicates the status of the Customer Managed Key feature on the account. In case there are errors, the property provides troubleshooting guidance. @@ -3328,8 +3328,8 @@ class DatabaseAccountGetResults(ARMResourceProperties): # pylint: disable=too-m 1.2. Cassandra and Mongo APIs only work with Tls 1.2. Known values are: "Tls", "Tls11", and "Tls12". :vartype minimal_tls_version: str or ~azure.mgmt.cosmosdb.models.MinimalTlsVersion - :ivar enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity Preview - feature on the account. + :ivar enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity feature on + the account. :vartype enable_burst_capacity: bool :ivar customer_managed_key_status: Indicates the status of the Customer Managed Key feature on the account. In case there are errors, the property provides troubleshooting guidance. @@ -3539,8 +3539,8 @@ def __init__( # pylint: disable=too-many-locals Tls 1.2. Cassandra and Mongo APIs only work with Tls 1.2. Known values are: "Tls", "Tls11", and "Tls12". :paramtype minimal_tls_version: str or ~azure.mgmt.cosmosdb.models.MinimalTlsVersion - :keyword enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity Preview - feature on the account. + :keyword enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity feature + on the account. :paramtype enable_burst_capacity: bool :keyword customer_managed_key_status: Indicates the status of the Customer Managed Key feature on the account. In case there are errors, the property provides troubleshooting guidance. @@ -3863,8 +3863,8 @@ class DatabaseAccountUpdateParameters(_serialization.Model): # pylint: disable= 1.2. Cassandra and Mongo APIs only work with Tls 1.2. Known values are: "Tls", "Tls11", and "Tls12". :vartype minimal_tls_version: str or ~azure.mgmt.cosmosdb.models.MinimalTlsVersion - :ivar enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity Preview - feature on the account. + :ivar enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity feature on + the account. :vartype enable_burst_capacity: bool :ivar customer_managed_key_status: Indicates the status of the Customer Managed Key feature on the account. In case there are errors, the property provides troubleshooting guidance. @@ -4036,8 +4036,8 @@ def __init__( # pylint: disable=too-many-locals Tls 1.2. Cassandra and Mongo APIs only work with Tls 1.2. Known values are: "Tls", "Tls11", and "Tls12". :paramtype minimal_tls_version: str or ~azure.mgmt.cosmosdb.models.MinimalTlsVersion - :keyword enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity Preview - feature on the account. + :keyword enable_burst_capacity: Flag to indicate enabling/disabling of Burst Capacity feature + on the account. :paramtype enable_burst_capacity: bool :keyword customer_managed_key_status: Indicates the status of the Customer Managed Key feature on the account. In case there are errors, the property provides troubleshooting guidance. @@ -4649,6 +4649,77 @@ def __init__( self.locations = None +class ErrorAdditionalInfo(_serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: JSON + """ + + _validation = { + "type": {"readonly": True}, + "info": {"readonly": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "info": {"key": "info", "type": "object"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorDetail(_serialization.Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.cosmosdb.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.cosmosdb.models.ErrorAdditionalInfo] + """ + + _validation = { + "code": {"readonly": True}, + "message": {"readonly": True}, + "target": {"readonly": True}, + "details": {"readonly": True}, + "additional_info": {"readonly": True}, + } + + _attribute_map = { + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "details": {"key": "details", "type": "[ErrorDetail]"}, + "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + class ErrorResponse(_serialization.Model): """Error Response. @@ -4675,6 +4746,27 @@ def __init__(self, *, code: Optional[str] = None, message: Optional[str] = None, self.message = message +class ErrorResponseAutoGenerated(_serialization.Model): + """Common error response for all Azure Resource Manager APIs to return error details for failed + operations. (This also follows the OData error response format.). + + :ivar error: The error object. + :vartype error: ~azure.mgmt.cosmosdb.models.ErrorDetail + """ + + _attribute_map = { + "error": {"key": "error", "type": "ErrorDetail"}, + } + + def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs: Any) -> None: + """ + :keyword error: The error object. + :paramtype error: ~azure.mgmt.cosmosdb.models.ErrorDetail + """ + super().__init__(**kwargs) + self.error = error + + class ExcludedPath(_serialization.Model): """ExcludedPath. @@ -8775,11 +8867,15 @@ class RestoreParametersBase(_serialization.Model): :vartype restore_source: str :ivar restore_timestamp_in_utc: Time to which the account has to be restored (ISO-8601 format). :vartype restore_timestamp_in_utc: ~datetime.datetime + :ivar restore_with_ttl_disabled: Specifies whether the restored account will have Time-To-Live + disabled upon the successful restore. + :vartype restore_with_ttl_disabled: bool """ _attribute_map = { "restore_source": {"key": "restoreSource", "type": "str"}, "restore_timestamp_in_utc": {"key": "restoreTimestampInUtc", "type": "iso-8601"}, + "restore_with_ttl_disabled": {"key": "restoreWithTtlDisabled", "type": "bool"}, } def __init__( @@ -8787,6 +8883,7 @@ def __init__( *, restore_source: Optional[str] = None, restore_timestamp_in_utc: Optional[datetime.datetime] = None, + restore_with_ttl_disabled: Optional[bool] = None, **kwargs: Any ) -> None: """ @@ -8797,10 +8894,14 @@ def __init__( :keyword restore_timestamp_in_utc: Time to which the account has to be restored (ISO-8601 format). :paramtype restore_timestamp_in_utc: ~datetime.datetime + :keyword restore_with_ttl_disabled: Specifies whether the restored account will have + Time-To-Live disabled upon the successful restore. + :paramtype restore_with_ttl_disabled: bool """ super().__init__(**kwargs) self.restore_source = restore_source self.restore_timestamp_in_utc = restore_timestamp_in_utc + self.restore_with_ttl_disabled = restore_with_ttl_disabled class ResourceRestoreParameters(RestoreParametersBase): @@ -8812,6 +8913,9 @@ class ResourceRestoreParameters(RestoreParametersBase): :vartype restore_source: str :ivar restore_timestamp_in_utc: Time to which the account has to be restored (ISO-8601 format). :vartype restore_timestamp_in_utc: ~datetime.datetime + :ivar restore_with_ttl_disabled: Specifies whether the restored account will have Time-To-Live + disabled upon the successful restore. + :vartype restore_with_ttl_disabled: bool """ @@ -10498,6 +10602,9 @@ class RestoreParameters(RestoreParametersBase): :vartype restore_source: str :ivar restore_timestamp_in_utc: Time to which the account has to be restored (ISO-8601 format). :vartype restore_timestamp_in_utc: ~datetime.datetime + :ivar restore_with_ttl_disabled: Specifies whether the restored account will have Time-To-Live + disabled upon the successful restore. + :vartype restore_with_ttl_disabled: bool :ivar restore_mode: Describes the mode of the restore. "PointInTime" :vartype restore_mode: str or ~azure.mgmt.cosmosdb.models.RestoreMode :ivar databases_to_restore: List of specific databases available for restore. @@ -10512,6 +10619,7 @@ class RestoreParameters(RestoreParametersBase): _attribute_map = { "restore_source": {"key": "restoreSource", "type": "str"}, "restore_timestamp_in_utc": {"key": "restoreTimestampInUtc", "type": "iso-8601"}, + "restore_with_ttl_disabled": {"key": "restoreWithTtlDisabled", "type": "bool"}, "restore_mode": {"key": "restoreMode", "type": "str"}, "databases_to_restore": {"key": "databasesToRestore", "type": "[DatabaseRestoreResource]"}, "gremlin_databases_to_restore": { @@ -10526,6 +10634,7 @@ def __init__( *, restore_source: Optional[str] = None, restore_timestamp_in_utc: Optional[datetime.datetime] = None, + restore_with_ttl_disabled: Optional[bool] = None, restore_mode: Optional[Union[str, "_models.RestoreMode"]] = None, databases_to_restore: Optional[List["_models.DatabaseRestoreResource"]] = None, gremlin_databases_to_restore: Optional[List["_models.GremlinDatabaseRestoreResource"]] = None, @@ -10540,6 +10649,9 @@ def __init__( :keyword restore_timestamp_in_utc: Time to which the account has to be restored (ISO-8601 format). :paramtype restore_timestamp_in_utc: ~datetime.datetime + :keyword restore_with_ttl_disabled: Specifies whether the restored account will have + Time-To-Live disabled upon the successful restore. + :paramtype restore_with_ttl_disabled: bool :keyword restore_mode: Describes the mode of the restore. "PointInTime" :paramtype restore_mode: str or ~azure.mgmt.cosmosdb.models.RestoreMode :keyword databases_to_restore: List of specific databases available for restore. @@ -10551,7 +10663,12 @@ def __init__( :keyword tables_to_restore: List of specific tables available for restore. :paramtype tables_to_restore: list[str] """ - super().__init__(restore_source=restore_source, restore_timestamp_in_utc=restore_timestamp_in_utc, **kwargs) + super().__init__( + restore_source=restore_source, + restore_timestamp_in_utc=restore_timestamp_in_utc, + restore_with_ttl_disabled=restore_with_ttl_disabled, + **kwargs + ) self.restore_mode = restore_mode self.databases_to_restore = databases_to_restore self.gremlin_databases_to_restore = gremlin_databases_to_restore diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_clusters_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_clusters_operations.py index 5061742402cd..efc8557f2c03 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_clusters_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_clusters_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -48,7 +48,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -74,7 +74,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -104,7 +104,7 @@ def build_get_request(resource_group_name: str, cluster_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -139,7 +139,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -174,7 +174,7 @@ def build_create_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -212,7 +212,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -250,7 +250,7 @@ def build_invoke_command_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -288,7 +288,7 @@ def build_deallocate_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -323,7 +323,7 @@ def build_start_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -358,7 +358,7 @@ def build_status_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -437,7 +437,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -453,7 +452,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -517,7 +515,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -533,7 +530,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -597,7 +593,6 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -611,16 +606,14 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ClusterResource", pipeline_response) + deserialized = self._deserialize("ClusterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, cluster_name: str, **kwargs: Any - ) -> None: + def _delete_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -633,7 +626,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -643,10 +636,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -654,11 +647,19 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> LROPoller[None]: @@ -682,7 +683,7 @@ def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: An lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, api_version=api_version, @@ -691,6 +692,7 @@ def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: An params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -718,7 +720,7 @@ def _create_update_initial( cluster_name: str, body: Union[_models.ClusterResource, IO[bytes]], **kwargs: Any - ) -> _models.ClusterResource: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -732,7 +734,7 @@ def _create_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ClusterResource] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -753,10 +755,10 @@ def _create_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -764,14 +766,14 @@ def _create_update_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("ClusterResource", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("ClusterResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -883,10 +885,11 @@ def begin_create_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ClusterResource", pipeline_response) + deserialized = self._deserialize("ClusterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -914,7 +917,7 @@ def _update_initial( cluster_name: str, body: Union[_models.ClusterResource, IO[bytes]], **kwargs: Any - ) -> _models.ClusterResource: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -928,7 +931,7 @@ def _update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.ClusterResource] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -949,10 +952,10 @@ def _update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -960,14 +963,14 @@ def _update_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("ClusterResource", pipeline_response) - - if response.status_code == 202: - deserialized = self._deserialize("ClusterResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1074,10 +1077,11 @@ def begin_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ClusterResource", pipeline_response) + deserialized = self._deserialize("ClusterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1105,7 +1109,7 @@ def _invoke_command_initial( cluster_name: str, body: Union[_models.CommandPostBody, IO[bytes]], **kwargs: Any - ) -> _models.CommandOutput: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1119,7 +1123,7 @@ def _invoke_command_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.CommandOutput] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1140,10 +1144,10 @@ def _invoke_command_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1151,10 +1155,14 @@ def _invoke_command_initial( response = pipeline_response.http_response if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CommandOutput", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1261,10 +1269,11 @@ def begin_invoke_command( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("CommandOutput", pipeline_response) + deserialized = self._deserialize("CommandOutput", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1286,9 +1295,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _deallocate_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, cluster_name: str, **kwargs: Any - ) -> None: + def _deallocate_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1301,7 +1308,7 @@ def _deallocate_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_deallocate_request( resource_group_name=resource_group_name, @@ -1311,10 +1318,10 @@ def _deallocate_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1322,11 +1329,19 @@ def _deallocate_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_deallocate(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> LROPoller[None]: @@ -1352,7 +1367,7 @@ def begin_deallocate(self, resource_group_name: str, cluster_name: str, **kwargs lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._deallocate_initial( # type: ignore + raw_result = self._deallocate_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, api_version=api_version, @@ -1361,6 +1376,7 @@ def begin_deallocate(self, resource_group_name: str, cluster_name: str, **kwargs params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1382,9 +1398,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _start_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, cluster_name: str, **kwargs: Any - ) -> None: + def _start_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1397,7 +1411,7 @@ def _start_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -1407,10 +1421,10 @@ def _start_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1418,11 +1432,19 @@ def _start_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_start(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> LROPoller[None]: @@ -1448,7 +1470,7 @@ def begin_start(self, resource_group_name: str, cluster_name: str, **kwargs: Any lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._start_initial( # type: ignore + raw_result = self._start_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, api_version=api_version, @@ -1457,6 +1479,7 @@ def begin_start(self, resource_group_name: str, cluster_name: str, **kwargs: Any params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1515,7 +1538,6 @@ def status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1529,7 +1551,7 @@ def status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CassandraClusterPublicStatus", pipeline_response) + deserialized = self._deserialize("CassandraClusterPublicStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_data_centers_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_data_centers_operations.py index 1a6529a9cd2b..a23316628a11 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_data_centers_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_data_centers_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -48,7 +48,7 @@ def build_list_request(resource_group_name: str, cluster_name: str, subscription _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -83,7 +83,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -126,7 +126,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -169,7 +169,7 @@ def build_create_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -215,7 +215,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -314,7 +314,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -330,7 +329,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -399,7 +397,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -413,16 +410,16 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataCenterResource", pipeline_response) + deserialized = self._deserialize("DataCenterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, cluster_name: str, data_center_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -435,7 +432,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -446,10 +443,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -457,11 +454,19 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -489,7 +494,7 @@ def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, data_center_name=data_center_name, @@ -499,6 +504,7 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -527,7 +533,7 @@ def _create_update_initial( data_center_name: str, body: Union[_models.DataCenterResource, IO[bytes]], **kwargs: Any - ) -> _models.DataCenterResource: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -541,7 +547,7 @@ def _create_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DataCenterResource] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -563,10 +569,10 @@ def _create_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -574,14 +580,14 @@ def _create_update_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DataCenterResource", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DataCenterResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -701,10 +707,11 @@ def begin_create_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataCenterResource", pipeline_response) + deserialized = self._deserialize("DataCenterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -733,7 +740,7 @@ def _update_initial( data_center_name: str, body: Union[_models.DataCenterResource, IO[bytes]], **kwargs: Any - ) -> _models.DataCenterResource: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -747,7 +754,7 @@ def _update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DataCenterResource] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -769,10 +776,10 @@ def _update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -780,14 +787,14 @@ def _update_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DataCenterResource", pipeline_response) - - if response.status_code == 202: - deserialized = self._deserialize("DataCenterResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -904,10 +911,11 @@ def begin_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataCenterResource", pipeline_response) + deserialized = self._deserialize("DataCenterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_resources_operations.py index 7a55872d205a..9785bfec05c8 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_cassandra_resources_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -50,7 +50,7 @@ def build_list_cassandra_keyspaces_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -85,7 +85,7 @@ def build_get_cassandra_keyspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -121,7 +121,7 @@ def build_create_update_cassandra_keyspace_request( # pylint: disable=name-too- _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -159,7 +159,7 @@ def build_delete_cassandra_keyspace_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -190,7 +190,7 @@ def build_get_cassandra_keyspace_throughput_request( # pylint: disable=name-too _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -226,7 +226,7 @@ def build_update_cassandra_keyspace_throughput_request( # pylint: disable=name- _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -265,7 +265,7 @@ def build_migrate_cassandra_keyspace_to_autoscale_request( # pylint: disable=na _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -301,7 +301,7 @@ def build_migrate_cassandra_keyspace_to_manual_throughput_request( # pylint: di _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -337,7 +337,7 @@ def build_list_cassandra_tables_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -378,7 +378,7 @@ def build_get_cassandra_table_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -420,7 +420,7 @@ def build_create_update_cassandra_table_request( # pylint: disable=name-too-lon _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -464,7 +464,7 @@ def build_delete_cassandra_table_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -501,7 +501,7 @@ def build_get_cassandra_table_throughput_request( # pylint: disable=name-too-lo _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -543,7 +543,7 @@ def build_update_cassandra_table_throughput_request( # pylint: disable=name-too _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -588,7 +588,7 @@ def build_migrate_cassandra_table_to_autoscale_request( # pylint: disable=name- _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -630,7 +630,7 @@ def build_migrate_cassandra_table_to_manual_throughput_request( # pylint: disab _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -721,7 +721,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -737,7 +736,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -807,7 +805,6 @@ def get_cassandra_keyspace( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -821,7 +818,7 @@ def get_cassandra_keyspace( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CassandraKeyspaceGetResults", pipeline_response) + deserialized = self._deserialize("CassandraKeyspaceGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -835,7 +832,7 @@ def _create_update_cassandra_keyspace_initial( # pylint: disable=name-too-long keyspace_name: str, create_update_cassandra_keyspace_parameters: Union[_models.CassandraKeyspaceCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.CassandraKeyspaceGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -849,7 +846,7 @@ def _create_update_cassandra_keyspace_initial( # pylint: disable=name-too-long api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.CassandraKeyspaceGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -873,10 +870,10 @@ def _create_update_cassandra_keyspace_initial( # pylint: disable=name-too-long headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -884,20 +881,22 @@ def _create_update_cassandra_keyspace_initial( # pylint: disable=name-too-long response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("CassandraKeyspaceGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1018,10 +1017,11 @@ def begin_create_update_cassandra_keyspace( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("CassandraKeyspaceGetResults", pipeline_response) + deserialized = self._deserialize("CassandraKeyspaceGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1043,9 +1043,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_cassandra_keyspace_initial( # pylint: disable=inconsistent-return-statements + def _delete_cassandra_keyspace_initial( self, resource_group_name: str, account_name: str, keyspace_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1058,7 +1058,7 @@ def _delete_cassandra_keyspace_initial( # pylint: disable=inconsistent-return-s _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_cassandra_keyspace_request( resource_group_name=resource_group_name, @@ -1069,10 +1069,10 @@ def _delete_cassandra_keyspace_initial( # pylint: disable=inconsistent-return-s headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1080,6 +1080,10 @@ def _delete_cassandra_keyspace_initial( # pylint: disable=inconsistent-return-s response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -1090,8 +1094,12 @@ def _delete_cassandra_keyspace_initial( # pylint: disable=inconsistent-return-s ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_cassandra_keyspace( @@ -1119,7 +1127,7 @@ def begin_delete_cassandra_keyspace( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_cassandra_keyspace_initial( # type: ignore + raw_result = self._delete_cassandra_keyspace_initial( resource_group_name=resource_group_name, account_name=account_name, keyspace_name=keyspace_name, @@ -1129,6 +1137,7 @@ def begin_delete_cassandra_keyspace( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1191,7 +1200,6 @@ def get_cassandra_keyspace_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1205,7 +1213,7 @@ def get_cassandra_keyspace_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1219,7 +1227,7 @@ def _update_cassandra_keyspace_throughput_initial( # pylint: disable=name-too-l keyspace_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1233,7 +1241,7 @@ def _update_cassandra_keyspace_throughput_initial( # pylint: disable=name-too-l api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1255,10 +1263,10 @@ def _update_cassandra_keyspace_throughput_initial( # pylint: disable=name-too-l headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1266,20 +1274,22 @@ def _update_cassandra_keyspace_throughput_initial( # pylint: disable=name-too-l response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1400,10 +1410,11 @@ def begin_update_cassandra_keyspace_throughput( # pylint: disable=name-too-long params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1427,7 +1438,7 @@ def get_long_running_output(pipeline_response): def _migrate_cassandra_keyspace_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, keyspace_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1440,7 +1451,7 @@ def _migrate_cassandra_keyspace_to_autoscale_initial( # pylint: disable=name-to _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_cassandra_keyspace_to_autoscale_request( resource_group_name=resource_group_name, @@ -1451,10 +1462,10 @@ def _migrate_cassandra_keyspace_to_autoscale_initial( # pylint: disable=name-to headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1462,20 +1473,22 @@ def _migrate_cassandra_keyspace_to_autoscale_initial( # pylint: disable=name-to response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1518,10 +1531,11 @@ def begin_migrate_cassandra_keyspace_to_autoscale( # pylint: disable=name-too-l params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1545,7 +1559,7 @@ def get_long_running_output(pipeline_response): def _migrate_cassandra_keyspace_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, keyspace_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1558,7 +1572,7 @@ def _migrate_cassandra_keyspace_to_manual_throughput_initial( # pylint: disable _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_cassandra_keyspace_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -1569,10 +1583,10 @@ def _migrate_cassandra_keyspace_to_manual_throughput_initial( # pylint: disable headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1580,20 +1594,22 @@ def _migrate_cassandra_keyspace_to_manual_throughput_initial( # pylint: disable response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1636,10 +1652,11 @@ def begin_migrate_cassandra_keyspace_to_manual_throughput( # pylint: disable=na params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1705,7 +1722,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1721,7 +1737,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1793,7 +1808,6 @@ def get_cassandra_table( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1807,7 +1821,7 @@ def get_cassandra_table( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CassandraTableGetResults", pipeline_response) + deserialized = self._deserialize("CassandraTableGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1822,7 +1836,7 @@ def _create_update_cassandra_table_initial( table_name: str, create_update_cassandra_table_parameters: Union[_models.CassandraTableCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.CassandraTableGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1836,7 +1850,7 @@ def _create_update_cassandra_table_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.CassandraTableGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1861,10 +1875,10 @@ def _create_update_cassandra_table_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1872,20 +1886,22 @@ def _create_update_cassandra_table_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("CassandraTableGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2016,10 +2032,11 @@ def begin_create_update_cassandra_table( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("CassandraTableGetResults", pipeline_response) + deserialized = self._deserialize("CassandraTableGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2041,9 +2058,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_cassandra_table_initial( # pylint: disable=inconsistent-return-statements + def _delete_cassandra_table_initial( self, resource_group_name: str, account_name: str, keyspace_name: str, table_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2056,7 +2073,7 @@ def _delete_cassandra_table_initial( # pylint: disable=inconsistent-return-stat _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_cassandra_table_request( resource_group_name=resource_group_name, @@ -2068,10 +2085,10 @@ def _delete_cassandra_table_initial( # pylint: disable=inconsistent-return-stat headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2079,6 +2096,10 @@ def _delete_cassandra_table_initial( # pylint: disable=inconsistent-return-stat response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -2089,8 +2110,12 @@ def _delete_cassandra_table_initial( # pylint: disable=inconsistent-return-stat ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_cassandra_table( @@ -2120,7 +2145,7 @@ def begin_delete_cassandra_table( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_cassandra_table_initial( # type: ignore + raw_result = self._delete_cassandra_table_initial( resource_group_name=resource_group_name, account_name=account_name, keyspace_name=keyspace_name, @@ -2131,6 +2156,7 @@ def begin_delete_cassandra_table( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2196,7 +2222,6 @@ def get_cassandra_table_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2210,7 +2235,7 @@ def get_cassandra_table_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2225,7 +2250,7 @@ def _update_cassandra_table_throughput_initial( # pylint: disable=name-too-long table_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2239,7 +2264,7 @@ def _update_cassandra_table_throughput_initial( # pylint: disable=name-too-long api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2262,10 +2287,10 @@ def _update_cassandra_table_throughput_initial( # pylint: disable=name-too-long headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2273,20 +2298,22 @@ def _update_cassandra_table_throughput_initial( # pylint: disable=name-too-long response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2417,10 +2444,11 @@ def begin_update_cassandra_table_throughput( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2444,7 +2472,7 @@ def get_long_running_output(pipeline_response): def _migrate_cassandra_table_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, keyspace_name: str, table_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2457,7 +2485,7 @@ def _migrate_cassandra_table_to_autoscale_initial( # pylint: disable=name-too-l _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_cassandra_table_to_autoscale_request( resource_group_name=resource_group_name, @@ -2469,10 +2497,10 @@ def _migrate_cassandra_table_to_autoscale_initial( # pylint: disable=name-too-l headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2480,20 +2508,22 @@ def _migrate_cassandra_table_to_autoscale_initial( # pylint: disable=name-too-l response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2539,10 +2569,11 @@ def begin_migrate_cassandra_table_to_autoscale( # pylint: disable=name-too-long params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2566,7 +2597,7 @@ def get_long_running_output(pipeline_response): def _migrate_cassandra_table_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, keyspace_name: str, table_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2579,7 +2610,7 @@ def _migrate_cassandra_table_to_manual_throughput_initial( # pylint: disable=na _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_cassandra_table_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -2591,10 +2622,10 @@ def _migrate_cassandra_table_to_manual_throughput_initial( # pylint: disable=na headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2602,20 +2633,22 @@ def _migrate_cassandra_table_to_manual_throughput_initial( # pylint: disable=na response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2661,10 +2694,11 @@ def begin_migrate_cassandra_table_to_manual_throughput( # pylint: disable=name- params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_operations.py index 2305916c12f0..8dcb3927c210 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -54,7 +52,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -99,7 +97,7 @@ def build_list_usages_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -143,7 +141,7 @@ def build_list_metric_definitions_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -251,7 +249,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -267,7 +264,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -353,7 +349,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -369,7 +364,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -444,7 +438,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -460,7 +453,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_partition_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_partition_operations.py index 0de82212ec2b..16d710439e36 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_partition_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_partition_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -54,7 +52,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -99,7 +97,7 @@ def build_list_usages_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -209,7 +207,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -225,7 +222,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -311,7 +307,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -327,7 +322,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_partition_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_partition_region_operations.py index 9b2ac9bef821..2b5df3c80452 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_partition_region_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_partition_region_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -55,7 +53,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -169,7 +167,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -185,7 +182,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_region_operations.py index 4e86fbea6133..c1ae9995aa44 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_region_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_collection_region_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -55,7 +53,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -169,7 +167,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -185,7 +182,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_account_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_account_region_operations.py index f3ee3023a838..829d1060e3db 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_account_region_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_account_region_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -47,7 +45,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -145,7 +143,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -161,7 +158,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_accounts_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_accounts_operations.py index 3bd1da8625c2..751ed241e34e 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_accounts_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_accounts_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -48,7 +48,7 @@ def build_get_request(resource_group_name: str, account_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -83,7 +83,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -121,7 +121,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -158,7 +158,7 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -188,7 +188,7 @@ def build_failover_priority_change_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # Construct URL _url = kwargs.pop( @@ -221,7 +221,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -245,7 +245,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -277,7 +277,7 @@ def build_list_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -312,7 +312,7 @@ def build_list_connection_strings_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -347,7 +347,7 @@ def build_offline_region_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -385,7 +385,7 @@ def build_online_region_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -423,7 +423,7 @@ def build_get_read_only_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -458,7 +458,7 @@ def build_list_read_only_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -493,7 +493,7 @@ def build_regenerate_key_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # Construct URL _url = kwargs.pop( @@ -525,7 +525,7 @@ def build_regenerate_key_request( def build_check_name_exists_request(account_name: str, **kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop("template_url", "/providers/Microsoft.DocumentDB/databaseAccountNames/{accountName}") path_format_arguments = { @@ -548,7 +548,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -584,7 +584,7 @@ def build_list_usages_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -621,7 +621,7 @@ def build_list_metric_definitions_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -704,7 +704,6 @@ def get(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _mo headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -718,7 +717,7 @@ def get(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _mo map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response) + deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -731,7 +730,7 @@ def _update_initial( account_name: str, update_parameters: Union[_models.DatabaseAccountUpdateParameters, IO[bytes]], **kwargs: Any - ) -> _models.DatabaseAccountGetResults: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -745,7 +744,7 @@ def _update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatabaseAccountGetResults] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -766,10 +765,10 @@ def _update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -777,10 +776,14 @@ def _update_initial( response = pipeline_response.http_response if response.status_code not in [200]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -888,10 +891,11 @@ def begin_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response) + deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -919,7 +923,7 @@ def _create_or_update_initial( account_name: str, create_update_parameters: Union[_models.DatabaseAccountCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> _models.DatabaseAccountGetResults: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -933,7 +937,7 @@ def _create_or_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatabaseAccountGetResults] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -954,10 +958,10 @@ def _create_or_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -965,10 +969,14 @@ def _create_or_update_initial( response = pipeline_response.http_response if response.status_code not in [200]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1082,10 +1090,11 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response) + deserialized = self._deserialize("DatabaseAccountGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1107,9 +1116,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: + def _delete_initial(self, resource_group_name: str, account_name: str, **kwargs: Any) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1122,7 +1129,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -1132,10 +1139,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1143,6 +1150,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -1153,8 +1164,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete(self, resource_group_name: str, account_name: str, **kwargs: Any) -> LROPoller[None]: @@ -1178,7 +1193,7 @@ def begin_delete(self, resource_group_name: str, account_name: str, **kwargs: An lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, account_name=account_name, api_version=api_version, @@ -1187,6 +1202,7 @@ def begin_delete(self, resource_group_name: str, account_name: str, **kwargs: An params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1208,13 +1224,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _failover_priority_change_initial( # pylint: disable=inconsistent-return-statements + def _failover_priority_change_initial( self, resource_group_name: str, account_name: str, failover_parameters: Union[_models.FailoverPolicies, IO[bytes]], **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1228,7 +1244,7 @@ def _failover_priority_change_initial( # pylint: disable=inconsistent-return-st api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1249,10 +1265,10 @@ def _failover_priority_change_initial( # pylint: disable=inconsistent-return-st headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1260,6 +1276,10 @@ def _failover_priority_change_initial( # pylint: disable=inconsistent-return-st response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -1270,8 +1290,12 @@ def _failover_priority_change_initial( # pylint: disable=inconsistent-return-st ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @overload def begin_failover_priority_change( @@ -1368,7 +1392,7 @@ def begin_failover_priority_change( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._failover_priority_change_initial( # type: ignore + raw_result = self._failover_priority_change_initial( resource_group_name=resource_group_name, account_name=account_name, failover_parameters=failover_parameters, @@ -1379,6 +1403,7 @@ def begin_failover_priority_change( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1432,7 +1457,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1448,7 +1472,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1515,7 +1538,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1531,7 +1553,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1597,7 +1618,6 @@ def list_keys( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1611,7 +1631,7 @@ def list_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountListKeysResult", pipeline_response) + deserialized = self._deserialize("DatabaseAccountListKeysResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1655,7 +1675,6 @@ def list_connection_strings( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1669,20 +1688,20 @@ def list_connection_strings( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountListConnectionStringsResult", pipeline_response) + deserialized = self._deserialize("DatabaseAccountListConnectionStringsResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - def _offline_region_initial( # pylint: disable=inconsistent-return-statements + def _offline_region_initial( self, resource_group_name: str, account_name: str, region_parameter_for_offline: Union[_models.RegionForOnlineOffline, IO[bytes]], **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1696,7 +1715,7 @@ def _offline_region_initial( # pylint: disable=inconsistent-return-statements api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1717,10 +1736,10 @@ def _offline_region_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1728,6 +1747,10 @@ def _offline_region_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -1739,8 +1762,12 @@ def _offline_region_initial( # pylint: disable=inconsistent-return-statements ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @overload def begin_offline_region( @@ -1831,7 +1858,7 @@ def begin_offline_region( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._offline_region_initial( # type: ignore + raw_result = self._offline_region_initial( resource_group_name=resource_group_name, account_name=account_name, region_parameter_for_offline=region_parameter_for_offline, @@ -1842,6 +1869,7 @@ def begin_offline_region( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1863,13 +1891,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _online_region_initial( # pylint: disable=inconsistent-return-statements + def _online_region_initial( self, resource_group_name: str, account_name: str, region_parameter_for_online: Union[_models.RegionForOnlineOffline, IO[bytes]], **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1883,7 +1911,7 @@ def _online_region_initial( # pylint: disable=inconsistent-return-statements api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1904,10 +1932,10 @@ def _online_region_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1915,6 +1943,10 @@ def _online_region_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -1926,8 +1958,12 @@ def _online_region_initial( # pylint: disable=inconsistent-return-statements ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @overload def begin_online_region( @@ -2018,7 +2054,7 @@ def begin_online_region( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._online_region_initial( # type: ignore + raw_result = self._online_region_initial( resource_group_name=resource_group_name, account_name=account_name, region_parameter_for_online=region_parameter_for_online, @@ -2029,6 +2065,7 @@ def begin_online_region( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2087,7 +2124,6 @@ def get_read_only_keys( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2101,7 +2137,7 @@ def get_read_only_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountListReadOnlyKeysResult", pipeline_response) + deserialized = self._deserialize("DatabaseAccountListReadOnlyKeysResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2145,7 +2181,6 @@ def list_read_only_keys( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2159,20 +2194,20 @@ def list_read_only_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseAccountListReadOnlyKeysResult", pipeline_response) + deserialized = self._deserialize("DatabaseAccountListReadOnlyKeysResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - def _regenerate_key_initial( # pylint: disable=inconsistent-return-statements + def _regenerate_key_initial( self, resource_group_name: str, account_name: str, key_to_regenerate: Union[_models.DatabaseAccountRegenerateKeyParameters, IO[bytes]], **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2186,7 +2221,7 @@ def _regenerate_key_initial( # pylint: disable=inconsistent-return-statements api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2207,10 +2242,10 @@ def _regenerate_key_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2218,6 +2253,10 @@ def _regenerate_key_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -2228,8 +2267,12 @@ def _regenerate_key_initial( # pylint: disable=inconsistent-return-statements ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @overload def begin_regenerate_key( @@ -2318,7 +2361,7 @@ def begin_regenerate_key( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._regenerate_key_initial( # type: ignore + raw_result = self._regenerate_key_initial( resource_group_name=resource_group_name, account_name=account_name, key_to_regenerate=key_to_regenerate, @@ -2329,6 +2372,7 @@ def begin_regenerate_key( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2382,7 +2426,6 @@ def check_name_exists(self, account_name: str, **kwargs: Any) -> bool: headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2445,7 +2488,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -2461,7 +2503,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -2535,7 +2576,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -2551,7 +2591,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -2620,7 +2659,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -2636,7 +2674,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_operations.py index 5ac9a39666c8..b8a13340d45f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_database_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -47,7 +45,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -90,7 +88,7 @@ def build_list_usages_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -128,7 +126,7 @@ def build_list_metric_definitions_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -226,7 +224,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -242,7 +239,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -324,7 +320,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -340,7 +335,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -412,7 +406,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -428,7 +421,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_gremlin_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_gremlin_resources_operations.py index 0c35ce44e32d..2e7682c8c028 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_gremlin_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_gremlin_resources_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -50,7 +50,7 @@ def build_list_gremlin_databases_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -85,7 +85,7 @@ def build_get_gremlin_database_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -121,7 +121,7 @@ def build_create_update_gremlin_database_request( # pylint: disable=name-too-lo _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -159,7 +159,7 @@ def build_delete_gremlin_database_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -190,7 +190,7 @@ def build_get_gremlin_database_throughput_request( # pylint: disable=name-too-l _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -226,7 +226,7 @@ def build_update_gremlin_database_throughput_request( # pylint: disable=name-to _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -265,7 +265,7 @@ def build_migrate_gremlin_database_to_autoscale_request( # pylint: disable=name _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -301,7 +301,7 @@ def build_migrate_gremlin_database_to_manual_throughput_request( # pylint: disa _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -337,7 +337,7 @@ def build_list_gremlin_graphs_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -378,7 +378,7 @@ def build_get_gremlin_graph_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -420,7 +420,7 @@ def build_create_update_gremlin_graph_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -464,7 +464,7 @@ def build_delete_gremlin_graph_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -501,7 +501,7 @@ def build_get_gremlin_graph_throughput_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -543,7 +543,7 @@ def build_update_gremlin_graph_throughput_request( # pylint: disable=name-too-l _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -588,7 +588,7 @@ def build_migrate_gremlin_graph_to_autoscale_request( # pylint: disable=name-to _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -630,7 +630,7 @@ def build_migrate_gremlin_graph_to_manual_throughput_request( # pylint: disable _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -672,7 +672,7 @@ def build_retrieve_continuous_backup_information_request( # pylint: disable=nam _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -766,7 +766,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -782,7 +781,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -852,7 +850,6 @@ def get_gremlin_database( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -866,7 +863,7 @@ def get_gremlin_database( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GremlinDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("GremlinDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -880,7 +877,7 @@ def _create_update_gremlin_database_initial( database_name: str, create_update_gremlin_database_parameters: Union[_models.GremlinDatabaseCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.GremlinDatabaseGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -894,7 +891,7 @@ def _create_update_gremlin_database_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.GremlinDatabaseGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -918,10 +915,10 @@ def _create_update_gremlin_database_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -929,20 +926,22 @@ def _create_update_gremlin_database_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("GremlinDatabaseGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1063,10 +1062,11 @@ def begin_create_update_gremlin_database( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("GremlinDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("GremlinDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1088,9 +1088,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_gremlin_database_initial( # pylint: disable=inconsistent-return-statements + def _delete_gremlin_database_initial( self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1103,7 +1103,7 @@ def _delete_gremlin_database_initial( # pylint: disable=inconsistent-return-sta _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_gremlin_database_request( resource_group_name=resource_group_name, @@ -1114,10 +1114,10 @@ def _delete_gremlin_database_initial( # pylint: disable=inconsistent-return-sta headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1125,6 +1125,10 @@ def _delete_gremlin_database_initial( # pylint: disable=inconsistent-return-sta response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -1135,8 +1139,12 @@ def _delete_gremlin_database_initial( # pylint: disable=inconsistent-return-sta ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_gremlin_database( @@ -1164,7 +1172,7 @@ def begin_delete_gremlin_database( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_gremlin_database_initial( # type: ignore + raw_result = self._delete_gremlin_database_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -1174,6 +1182,7 @@ def begin_delete_gremlin_database( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1236,7 +1245,6 @@ def get_gremlin_database_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1250,7 +1258,7 @@ def get_gremlin_database_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1264,7 +1272,7 @@ def _update_gremlin_database_throughput_initial( # pylint: disable=name-too-lon database_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1278,7 +1286,7 @@ def _update_gremlin_database_throughput_initial( # pylint: disable=name-too-lon api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1300,10 +1308,10 @@ def _update_gremlin_database_throughput_initial( # pylint: disable=name-too-lon headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1311,20 +1319,22 @@ def _update_gremlin_database_throughput_initial( # pylint: disable=name-too-lon response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1445,10 +1455,11 @@ def begin_update_gremlin_database_throughput( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1472,7 +1483,7 @@ def get_long_running_output(pipeline_response): def _migrate_gremlin_database_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1485,7 +1496,7 @@ def _migrate_gremlin_database_to_autoscale_initial( # pylint: disable=name-too- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_gremlin_database_to_autoscale_request( resource_group_name=resource_group_name, @@ -1496,10 +1507,10 @@ def _migrate_gremlin_database_to_autoscale_initial( # pylint: disable=name-too- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1507,20 +1518,22 @@ def _migrate_gremlin_database_to_autoscale_initial( # pylint: disable=name-too- response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1563,10 +1576,11 @@ def begin_migrate_gremlin_database_to_autoscale( # pylint: disable=name-too-lon params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1590,7 +1604,7 @@ def get_long_running_output(pipeline_response): def _migrate_gremlin_database_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1603,7 +1617,7 @@ def _migrate_gremlin_database_to_manual_throughput_initial( # pylint: disable=n _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_gremlin_database_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -1614,10 +1628,10 @@ def _migrate_gremlin_database_to_manual_throughput_initial( # pylint: disable=n headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1625,20 +1639,22 @@ def _migrate_gremlin_database_to_manual_throughput_initial( # pylint: disable=n response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1681,10 +1697,11 @@ def begin_migrate_gremlin_database_to_manual_throughput( # pylint: disable=name params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1750,7 +1767,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1766,7 +1782,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1838,7 +1853,6 @@ def get_gremlin_graph( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1852,7 +1866,7 @@ def get_gremlin_graph( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GremlinGraphGetResults", pipeline_response) + deserialized = self._deserialize("GremlinGraphGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1867,7 +1881,7 @@ def _create_update_gremlin_graph_initial( graph_name: str, create_update_gremlin_graph_parameters: Union[_models.GremlinGraphCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.GremlinGraphGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1881,7 +1895,7 @@ def _create_update_gremlin_graph_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.GremlinGraphGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1904,10 +1918,10 @@ def _create_update_gremlin_graph_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1915,20 +1929,22 @@ def _create_update_gremlin_graph_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("GremlinGraphGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2059,10 +2075,11 @@ def begin_create_update_gremlin_graph( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("GremlinGraphGetResults", pipeline_response) + deserialized = self._deserialize("GremlinGraphGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2084,9 +2101,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_gremlin_graph_initial( # pylint: disable=inconsistent-return-statements + def _delete_gremlin_graph_initial( self, resource_group_name: str, account_name: str, database_name: str, graph_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2099,7 +2116,7 @@ def _delete_gremlin_graph_initial( # pylint: disable=inconsistent-return-statem _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_gremlin_graph_request( resource_group_name=resource_group_name, @@ -2111,10 +2128,10 @@ def _delete_gremlin_graph_initial( # pylint: disable=inconsistent-return-statem headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2122,6 +2139,10 @@ def _delete_gremlin_graph_initial( # pylint: disable=inconsistent-return-statem response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -2132,8 +2153,12 @@ def _delete_gremlin_graph_initial( # pylint: disable=inconsistent-return-statem ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_gremlin_graph( @@ -2163,7 +2188,7 @@ def begin_delete_gremlin_graph( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_gremlin_graph_initial( # type: ignore + raw_result = self._delete_gremlin_graph_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -2174,6 +2199,7 @@ def begin_delete_gremlin_graph( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2239,7 +2265,6 @@ def get_gremlin_graph_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2253,7 +2278,7 @@ def get_gremlin_graph_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2268,7 +2293,7 @@ def _update_gremlin_graph_throughput_initial( graph_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2282,7 +2307,7 @@ def _update_gremlin_graph_throughput_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2305,10 +2330,10 @@ def _update_gremlin_graph_throughput_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2316,20 +2341,22 @@ def _update_gremlin_graph_throughput_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2460,10 +2487,11 @@ def begin_update_gremlin_graph_throughput( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2487,7 +2515,7 @@ def get_long_running_output(pipeline_response): def _migrate_gremlin_graph_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, graph_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2500,7 +2528,7 @@ def _migrate_gremlin_graph_to_autoscale_initial( # pylint: disable=name-too-lon _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_gremlin_graph_to_autoscale_request( resource_group_name=resource_group_name, @@ -2512,10 +2540,10 @@ def _migrate_gremlin_graph_to_autoscale_initial( # pylint: disable=name-too-lon headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2523,20 +2551,22 @@ def _migrate_gremlin_graph_to_autoscale_initial( # pylint: disable=name-too-lon response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2582,10 +2612,11 @@ def begin_migrate_gremlin_graph_to_autoscale( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2609,7 +2640,7 @@ def get_long_running_output(pipeline_response): def _migrate_gremlin_graph_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, graph_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2622,7 +2653,7 @@ def _migrate_gremlin_graph_to_manual_throughput_initial( # pylint: disable=name _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_gremlin_graph_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -2634,10 +2665,10 @@ def _migrate_gremlin_graph_to_manual_throughput_initial( # pylint: disable=name headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2645,20 +2676,22 @@ def _migrate_gremlin_graph_to_manual_throughput_initial( # pylint: disable=name response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2704,10 +2737,11 @@ def begin_migrate_gremlin_graph_to_manual_throughput( # pylint: disable=name-to params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2737,7 +2771,7 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too graph_name: str, location: Union[_models.ContinuousBackupRestoreLocation, IO[bytes]], **kwargs: Any - ) -> Optional[_models.BackupInformation]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2751,7 +2785,7 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.BackupInformation]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2774,10 +2808,10 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2785,12 +2819,14 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2917,10 +2953,11 @@ def begin_retrieve_continuous_backup_information( # pylint: disable=name-too-lo params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = self._deserialize("BackupInformation", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_locations_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_locations_operations.py index a704c5d714b6..18d9564090af 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_locations_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_locations_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -45,7 +43,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,7 +67,7 @@ def build_get_request(location: str, subscription_id: str, **kwargs: Any) -> Htt _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -142,7 +140,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -158,7 +155,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -219,7 +215,6 @@ def get(self, location: str, **kwargs: Any) -> _models.LocationGetResult: headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -233,7 +228,7 @@ def get(self, location: str, **kwargs: Any) -> _models.LocationGetResult: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LocationGetResult", pipeline_response) + deserialized = self._deserialize("LocationGetResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_mongo_db_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_mongo_db_resources_operations.py index 4b87abe9132c..28a675a2ae6d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_mongo_db_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_mongo_db_resources_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -50,7 +50,7 @@ def build_list_mongo_db_databases_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -85,7 +85,7 @@ def build_get_mongo_db_database_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -121,7 +121,7 @@ def build_create_update_mongo_db_database_request( # pylint: disable=name-too-l _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -159,7 +159,7 @@ def build_delete_mongo_db_database_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -190,7 +190,7 @@ def build_get_mongo_db_database_throughput_request( # pylint: disable=name-too- _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -226,7 +226,7 @@ def build_update_mongo_db_database_throughput_request( # pylint: disable=name-t _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -265,7 +265,7 @@ def build_migrate_mongo_db_database_to_autoscale_request( # pylint: disable=nam _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -301,7 +301,7 @@ def build_migrate_mongo_db_database_to_manual_throughput_request( # pylint: dis _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -337,7 +337,7 @@ def build_list_mongo_db_collections_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -378,7 +378,7 @@ def build_get_mongo_db_collection_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -420,7 +420,7 @@ def build_create_update_mongo_db_collection_request( # pylint: disable=name-too _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -464,7 +464,7 @@ def build_delete_mongo_db_collection_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -501,7 +501,7 @@ def build_get_mongo_db_collection_throughput_request( # pylint: disable=name-to _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -543,7 +543,7 @@ def build_update_mongo_db_collection_throughput_request( # pylint: disable=name _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -588,7 +588,7 @@ def build_migrate_mongo_db_collection_to_autoscale_request( # pylint: disable=n _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -630,7 +630,7 @@ def build_migrate_mongo_db_collection_to_manual_throughput_request( # pylint: d _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -667,7 +667,7 @@ def build_get_mongo_role_definition_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -703,7 +703,7 @@ def build_create_update_mongo_role_definition_request( # pylint: disable=name-t _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -742,7 +742,7 @@ def build_delete_mongo_role_definition_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -778,7 +778,7 @@ def build_list_mongo_role_definitions_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -813,7 +813,7 @@ def build_get_mongo_user_definition_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -849,7 +849,7 @@ def build_create_update_mongo_user_definition_request( # pylint: disable=name-t _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -888,7 +888,7 @@ def build_delete_mongo_user_definition_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -924,7 +924,7 @@ def build_list_mongo_user_definitions_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -964,7 +964,7 @@ def build_retrieve_continuous_backup_information_request( # pylint: disable=nam _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -1058,7 +1058,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1074,7 +1073,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1144,7 +1142,6 @@ def get_mongo_db_database( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1158,7 +1155,7 @@ def get_mongo_db_database( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("MongoDBDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("MongoDBDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1172,7 +1169,7 @@ def _create_update_mongo_db_database_initial( database_name: str, create_update_mongo_db_database_parameters: Union[_models.MongoDBDatabaseCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.MongoDBDatabaseGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1186,7 +1183,7 @@ def _create_update_mongo_db_database_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.MongoDBDatabaseGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1210,10 +1207,10 @@ def _create_update_mongo_db_database_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1221,20 +1218,22 @@ def _create_update_mongo_db_database_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("MongoDBDatabaseGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1355,10 +1354,11 @@ def begin_create_update_mongo_db_database( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("MongoDBDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("MongoDBDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1380,9 +1380,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_mongo_db_database_initial( # pylint: disable=inconsistent-return-statements + def _delete_mongo_db_database_initial( self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1395,7 +1395,7 @@ def _delete_mongo_db_database_initial( # pylint: disable=inconsistent-return-st _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_mongo_db_database_request( resource_group_name=resource_group_name, @@ -1406,10 +1406,10 @@ def _delete_mongo_db_database_initial( # pylint: disable=inconsistent-return-st headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1417,6 +1417,10 @@ def _delete_mongo_db_database_initial( # pylint: disable=inconsistent-return-st response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -1427,8 +1431,12 @@ def _delete_mongo_db_database_initial( # pylint: disable=inconsistent-return-st ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_mongo_db_database( @@ -1456,7 +1464,7 @@ def begin_delete_mongo_db_database( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_mongo_db_database_initial( # type: ignore + raw_result = self._delete_mongo_db_database_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -1466,6 +1474,7 @@ def begin_delete_mongo_db_database( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1528,7 +1537,6 @@ def get_mongo_db_database_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1542,7 +1550,7 @@ def get_mongo_db_database_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1556,7 +1564,7 @@ def _update_mongo_db_database_throughput_initial( # pylint: disable=name-too-lo database_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1570,7 +1578,7 @@ def _update_mongo_db_database_throughput_initial( # pylint: disable=name-too-lo api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1592,10 +1600,10 @@ def _update_mongo_db_database_throughput_initial( # pylint: disable=name-too-lo headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1603,20 +1611,22 @@ def _update_mongo_db_database_throughput_initial( # pylint: disable=name-too-lo response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1737,10 +1747,11 @@ def begin_update_mongo_db_database_throughput( # pylint: disable=name-too-long params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1764,7 +1775,7 @@ def get_long_running_output(pipeline_response): def _migrate_mongo_db_database_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1777,7 +1788,7 @@ def _migrate_mongo_db_database_to_autoscale_initial( # pylint: disable=name-too _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_mongo_db_database_to_autoscale_request( resource_group_name=resource_group_name, @@ -1788,10 +1799,10 @@ def _migrate_mongo_db_database_to_autoscale_initial( # pylint: disable=name-too headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1799,20 +1810,22 @@ def _migrate_mongo_db_database_to_autoscale_initial( # pylint: disable=name-too response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1855,10 +1868,11 @@ def begin_migrate_mongo_db_database_to_autoscale( # pylint: disable=name-too-lo params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1882,7 +1896,7 @@ def get_long_running_output(pipeline_response): def _migrate_mongo_db_database_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1895,7 +1909,7 @@ def _migrate_mongo_db_database_to_manual_throughput_initial( # pylint: disable= _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_mongo_db_database_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -1906,10 +1920,10 @@ def _migrate_mongo_db_database_to_manual_throughput_initial( # pylint: disable= headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1917,20 +1931,22 @@ def _migrate_mongo_db_database_to_manual_throughput_initial( # pylint: disable= response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1973,10 +1989,11 @@ def begin_migrate_mongo_db_database_to_manual_throughput( # pylint: disable=nam params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2042,7 +2059,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -2058,7 +2074,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -2130,7 +2145,6 @@ def get_mongo_db_collection( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2144,7 +2158,7 @@ def get_mongo_db_collection( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("MongoDBCollectionGetResults", pipeline_response) + deserialized = self._deserialize("MongoDBCollectionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2159,7 +2173,7 @@ def _create_update_mongo_db_collection_initial( # pylint: disable=name-too-long collection_name: str, create_update_mongo_db_collection_parameters: Union[_models.MongoDBCollectionCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.MongoDBCollectionGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2173,7 +2187,7 @@ def _create_update_mongo_db_collection_initial( # pylint: disable=name-too-long api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.MongoDBCollectionGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2198,10 +2212,10 @@ def _create_update_mongo_db_collection_initial( # pylint: disable=name-too-long headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2209,20 +2223,22 @@ def _create_update_mongo_db_collection_initial( # pylint: disable=name-too-long response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("MongoDBCollectionGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2353,10 +2369,11 @@ def begin_create_update_mongo_db_collection( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("MongoDBCollectionGetResults", pipeline_response) + deserialized = self._deserialize("MongoDBCollectionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2378,9 +2395,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_mongo_db_collection_initial( # pylint: disable=inconsistent-return-statements + def _delete_mongo_db_collection_initial( self, resource_group_name: str, account_name: str, database_name: str, collection_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2393,7 +2410,7 @@ def _delete_mongo_db_collection_initial( # pylint: disable=inconsistent-return- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_mongo_db_collection_request( resource_group_name=resource_group_name, @@ -2405,10 +2422,10 @@ def _delete_mongo_db_collection_initial( # pylint: disable=inconsistent-return- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2416,6 +2433,10 @@ def _delete_mongo_db_collection_initial( # pylint: disable=inconsistent-return- response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -2426,8 +2447,12 @@ def _delete_mongo_db_collection_initial( # pylint: disable=inconsistent-return- ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_mongo_db_collection( @@ -2457,7 +2482,7 @@ def begin_delete_mongo_db_collection( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_mongo_db_collection_initial( # type: ignore + raw_result = self._delete_mongo_db_collection_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -2468,6 +2493,7 @@ def begin_delete_mongo_db_collection( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2533,7 +2559,6 @@ def get_mongo_db_collection_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2547,7 +2572,7 @@ def get_mongo_db_collection_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2562,7 +2587,7 @@ def _update_mongo_db_collection_throughput_initial( # pylint: disable=name-too- collection_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2576,7 +2601,7 @@ def _update_mongo_db_collection_throughput_initial( # pylint: disable=name-too- api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2599,10 +2624,10 @@ def _update_mongo_db_collection_throughput_initial( # pylint: disable=name-too- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2610,20 +2635,22 @@ def _update_mongo_db_collection_throughput_initial( # pylint: disable=name-too- response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2754,10 +2781,11 @@ def begin_update_mongo_db_collection_throughput( # pylint: disable=name-too-lon params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2781,7 +2809,7 @@ def get_long_running_output(pipeline_response): def _migrate_mongo_db_collection_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, collection_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2794,7 +2822,7 @@ def _migrate_mongo_db_collection_to_autoscale_initial( # pylint: disable=name-t _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_mongo_db_collection_to_autoscale_request( resource_group_name=resource_group_name, @@ -2806,10 +2834,10 @@ def _migrate_mongo_db_collection_to_autoscale_initial( # pylint: disable=name-t headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2817,20 +2845,22 @@ def _migrate_mongo_db_collection_to_autoscale_initial( # pylint: disable=name-t response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2876,10 +2906,11 @@ def begin_migrate_mongo_db_collection_to_autoscale( # pylint: disable=name-too- params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2903,7 +2934,7 @@ def get_long_running_output(pipeline_response): def _migrate_mongo_db_collection_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, collection_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2916,7 +2947,7 @@ def _migrate_mongo_db_collection_to_manual_throughput_initial( # pylint: disabl _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_mongo_db_collection_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -2928,10 +2959,10 @@ def _migrate_mongo_db_collection_to_manual_throughput_initial( # pylint: disabl headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2939,20 +2970,22 @@ def _migrate_mongo_db_collection_to_manual_throughput_initial( # pylint: disabl response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2998,10 +3031,11 @@ def begin_migrate_mongo_db_collection_to_manual_throughput( # pylint: disable=n params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -3064,7 +3098,6 @@ def get_mongo_role_definition( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3078,7 +3111,7 @@ def get_mongo_role_definition( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("MongoRoleDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("MongoRoleDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3094,7 +3127,7 @@ def _create_update_mongo_role_definition_initial( # pylint: disable=name-too-lo _models.MongoRoleDefinitionCreateUpdateParameters, IO[bytes] ], **kwargs: Any - ) -> Optional[_models.MongoRoleDefinitionGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3108,7 +3141,7 @@ def _create_update_mongo_role_definition_initial( # pylint: disable=name-too-lo api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.MongoRoleDefinitionGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -3132,10 +3165,10 @@ def _create_update_mongo_role_definition_initial( # pylint: disable=name-too-lo headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3143,12 +3176,14 @@ def _create_update_mongo_role_definition_initial( # pylint: disable=name-too-lo response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("MongoRoleDefinitionGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3275,10 +3310,11 @@ def begin_create_update_mongo_role_definition( # pylint: disable=name-too-long params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("MongoRoleDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("MongoRoleDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -3300,9 +3336,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_mongo_role_definition_initial( # pylint: disable=inconsistent-return-statements + def _delete_mongo_role_definition_initial( self, mongo_role_definition_id: str, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3315,7 +3351,7 @@ def _delete_mongo_role_definition_initial( # pylint: disable=inconsistent-retur _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_mongo_role_definition_request( mongo_role_definition_id=mongo_role_definition_id, @@ -3326,10 +3362,10 @@ def _delete_mongo_role_definition_initial( # pylint: disable=inconsistent-retur headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3337,11 +3373,19 @@ def _delete_mongo_role_definition_initial( # pylint: disable=inconsistent-retur response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_mongo_role_definition( @@ -3369,7 +3413,7 @@ def begin_delete_mongo_role_definition( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_mongo_role_definition_initial( # type: ignore + raw_result = self._delete_mongo_role_definition_initial( mongo_role_definition_id=mongo_role_definition_id, resource_group_name=resource_group_name, account_name=account_name, @@ -3379,6 +3423,7 @@ def begin_delete_mongo_role_definition( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -3441,7 +3486,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -3457,7 +3501,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -3527,7 +3570,6 @@ def get_mongo_user_definition( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3541,7 +3583,7 @@ def get_mongo_user_definition( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("MongoUserDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("MongoUserDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3557,7 +3599,7 @@ def _create_update_mongo_user_definition_initial( # pylint: disable=name-too-lo _models.MongoUserDefinitionCreateUpdateParameters, IO[bytes] ], **kwargs: Any - ) -> Optional[_models.MongoUserDefinitionGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3571,7 +3613,7 @@ def _create_update_mongo_user_definition_initial( # pylint: disable=name-too-lo api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.MongoUserDefinitionGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -3595,10 +3637,10 @@ def _create_update_mongo_user_definition_initial( # pylint: disable=name-too-lo headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3606,12 +3648,14 @@ def _create_update_mongo_user_definition_initial( # pylint: disable=name-too-lo response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("MongoUserDefinitionGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3738,10 +3782,11 @@ def begin_create_update_mongo_user_definition( # pylint: disable=name-too-long params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("MongoUserDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("MongoUserDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -3763,9 +3808,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_mongo_user_definition_initial( # pylint: disable=inconsistent-return-statements + def _delete_mongo_user_definition_initial( self, mongo_user_definition_id: str, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3778,7 +3823,7 @@ def _delete_mongo_user_definition_initial( # pylint: disable=inconsistent-retur _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_mongo_user_definition_request( mongo_user_definition_id=mongo_user_definition_id, @@ -3789,10 +3834,10 @@ def _delete_mongo_user_definition_initial( # pylint: disable=inconsistent-retur headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3800,11 +3845,19 @@ def _delete_mongo_user_definition_initial( # pylint: disable=inconsistent-retur response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_mongo_user_definition( @@ -3832,7 +3885,7 @@ def begin_delete_mongo_user_definition( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_mongo_user_definition_initial( # type: ignore + raw_result = self._delete_mongo_user_definition_initial( mongo_user_definition_id=mongo_user_definition_id, resource_group_name=resource_group_name, account_name=account_name, @@ -3842,6 +3895,7 @@ def begin_delete_mongo_user_definition( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -3904,7 +3958,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -3920,7 +3973,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -3957,7 +4009,7 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too collection_name: str, location: Union[_models.ContinuousBackupRestoreLocation, IO[bytes]], **kwargs: Any - ) -> Optional[_models.BackupInformation]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3971,7 +4023,7 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.BackupInformation]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -3994,10 +4046,10 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -4005,12 +4057,14 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -4137,10 +4191,11 @@ def begin_retrieve_continuous_backup_information( # pylint: disable=name-too-lo params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = self._deserialize("BackupInformation", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_notebook_workspaces_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_notebook_workspaces_operations.py index e806ac3360eb..66a5157170fb 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_notebook_workspaces_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_notebook_workspaces_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -50,7 +50,7 @@ def build_list_by_database_account_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -89,7 +89,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -129,7 +129,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -172,7 +172,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -212,7 +212,7 @@ def build_list_connection_info_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -252,7 +252,7 @@ def build_regenerate_auth_token_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -292,7 +292,7 @@ def build_start_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -381,7 +381,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -397,7 +396,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -472,7 +470,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -487,7 +484,7 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NotebookWorkspace", pipeline_response) + deserialized = self._deserialize("NotebookWorkspace", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -501,7 +498,7 @@ def _create_or_update_initial( notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], notebook_create_update_parameters: Union[_models.NotebookWorkspaceCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> _models.NotebookWorkspace: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -515,7 +512,7 @@ def _create_or_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.NotebookWorkspace] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -537,10 +534,10 @@ def _create_or_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -548,11 +545,15 @@ def _create_or_update_initial( response = pipeline_response.http_response if response.status_code not in [200]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NotebookWorkspace", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -677,10 +678,11 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("NotebookWorkspace", pipeline_response) + deserialized = self._deserialize("NotebookWorkspace", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -702,13 +704,13 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, account_name: str, notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -721,7 +723,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -732,10 +734,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -743,12 +745,20 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -781,7 +791,7 @@ def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, account_name=account_name, notebook_workspace_name=notebook_workspace_name, @@ -791,6 +801,7 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -857,7 +868,6 @@ def list_connection_info( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -872,20 +882,20 @@ def list_connection_info( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NotebookWorkspaceConnectionInfoResult", pipeline_response) + deserialized = self._deserialize("NotebookWorkspaceConnectionInfoResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - def _regenerate_auth_token_initial( # pylint: disable=inconsistent-return-statements + def _regenerate_auth_token_initial( self, resource_group_name: str, account_name: str, notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -898,7 +908,7 @@ def _regenerate_auth_token_initial( # pylint: disable=inconsistent-return-state _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_regenerate_auth_token_request( resource_group_name=resource_group_name, @@ -909,10 +919,10 @@ def _regenerate_auth_token_initial( # pylint: disable=inconsistent-return-state headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -920,12 +930,20 @@ def _regenerate_auth_token_initial( # pylint: disable=inconsistent-return-state response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_regenerate_auth_token( @@ -958,7 +976,7 @@ def begin_regenerate_auth_token( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._regenerate_auth_token_initial( # type: ignore + raw_result = self._regenerate_auth_token_initial( resource_group_name=resource_group_name, account_name=account_name, notebook_workspace_name=notebook_workspace_name, @@ -968,6 +986,7 @@ def begin_regenerate_auth_token( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -989,13 +1008,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _start_initial( # pylint: disable=inconsistent-return-statements + def _start_initial( self, resource_group_name: str, account_name: str, notebook_workspace_name: Union[str, _models.NotebookWorkspaceName], **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1008,7 +1027,7 @@ def _start_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -1019,10 +1038,10 @@ def _start_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1030,12 +1049,20 @@ def _start_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_start( @@ -1068,7 +1095,7 @@ def begin_start( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._start_initial( # type: ignore + raw_result = self._start_initial( resource_group_name=resource_group_name, account_name=account_name, notebook_workspace_name=notebook_workspace_name, @@ -1078,6 +1105,7 @@ def begin_start( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_operations.py index 300b0fdb646f..d66f30208822 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -45,7 +43,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -109,7 +107,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -125,7 +122,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_partition_key_range_id_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_partition_key_range_id_operations.py index b061e1bfc900..bb31f8cebc45 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_partition_key_range_id_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_partition_key_range_id_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -55,7 +53,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -168,7 +166,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -184,7 +181,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_partition_key_range_id_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_partition_key_range_id_region_operations.py index 79e584de3aa3..9548be9c3a2b 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_partition_key_range_id_region_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_partition_key_range_id_region_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -56,7 +54,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -175,7 +173,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -191,7 +188,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_operations.py index a8557627c62c..347587af16df 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -47,7 +45,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -142,7 +140,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -158,7 +155,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_source_target_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_source_target_operations.py index ee8702c68cb1..767125c2a9c1 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_source_target_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_source_target_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -54,7 +52,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -165,7 +163,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -181,7 +178,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_target_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_target_operations.py index f64e0de09ee6..c92c6dfea5f7 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_target_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_percentile_target_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -47,7 +45,7 @@ def build_list_metrics_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -147,7 +145,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -163,7 +160,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_private_endpoint_connections_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_private_endpoint_connections_operations.py index f861cead1c1e..d0477388345b 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_private_endpoint_connections_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_private_endpoint_connections_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -50,7 +50,7 @@ def build_list_by_database_account_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -89,7 +89,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -131,7 +131,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -176,7 +176,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -268,7 +268,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -284,7 +283,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -353,7 +351,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -367,7 +364,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -381,7 +378,7 @@ def _create_or_update_initial( private_endpoint_connection_name: str, parameters: Union[_models.PrivateEndpointConnection, IO[bytes]], **kwargs: Any - ) -> Optional[_models.PrivateEndpointConnection]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -395,7 +392,7 @@ def _create_or_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.PrivateEndpointConnection]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -417,10 +414,10 @@ def _create_or_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -428,13 +425,15 @@ def _create_or_update_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponseAutoGenerated, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -550,10 +549,11 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -575,9 +575,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -590,7 +590,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -601,10 +601,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -612,12 +612,20 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponseAutoGenerated, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -645,7 +653,7 @@ def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, account_name=account_name, private_endpoint_connection_name=private_endpoint_connection_name, @@ -655,6 +663,7 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_private_link_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_private_link_resources_operations.py index 3249f4c9ff67..3609926a7b24 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_private_link_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_private_link_resources_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -47,7 +45,7 @@ def build_list_by_database_account_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -82,7 +80,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -171,7 +169,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -187,7 +184,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -256,7 +252,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -270,7 +265,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateLinkResource", pipeline_response) + deserialized = self._deserialize("PrivateLinkResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_database_accounts_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_database_accounts_operations.py index 8cd051b8ed0d..bd41d28ba253 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_database_accounts_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_database_accounts_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -45,7 +43,7 @@ def build_list_by_location_request(location: str, subscription_id: str, **kwargs _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -73,7 +71,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -99,7 +97,7 @@ def build_get_by_location_request(location: str, instance_id: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -182,7 +180,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -198,7 +195,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -261,7 +257,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -277,7 +272,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -344,7 +338,6 @@ def get_by_location( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -358,7 +351,7 @@ def get_by_location( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("RestorableDatabaseAccountGetResult", pipeline_response) + deserialized = self._deserialize("RestorableDatabaseAccountGetResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_databases_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_databases_operations.py index ef0590a8ade6..fa72e6a2b402 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_databases_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_databases_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -45,7 +43,7 @@ def build_list_request(location: str, instance_id: str, subscription_id: str, ** _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -134,7 +132,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -150,7 +147,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_graphs_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_graphs_operations.py index 2c4f835fc010..fc9bb5bcdeba 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_graphs_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_graphs_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -54,7 +52,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -166,7 +164,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -182,7 +179,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_resources_operations.py index c9877dad1b33..58997b340881 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_gremlin_resources_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -53,7 +51,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -161,7 +159,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -177,7 +174,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_collections_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_collections_operations.py index 57b39e7efc9f..04bd3e408fe3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_collections_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_collections_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -54,7 +52,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -166,7 +164,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -182,7 +179,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_databases_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_databases_operations.py index ae59bd932d2d..413da88e3555 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_databases_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_databases_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -45,7 +43,7 @@ def build_list_request(location: str, instance_id: str, subscription_id: str, ** _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -134,7 +132,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -150,7 +147,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_resources_operations.py index 92209f4fdc88..eb552e716c35 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_mongodb_resources_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -53,7 +51,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -161,7 +159,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -177,7 +174,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_containers_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_containers_operations.py index a03339e2eebe..fc7922e44c1a 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_containers_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_containers_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -54,7 +52,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -165,7 +163,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -181,7 +178,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_databases_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_databases_operations.py index 476ffc0656e4..04cbe6e7b6e5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_databases_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_databases_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -45,7 +43,7 @@ def build_list_request(location: str, instance_id: str, subscription_id: str, ** _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -134,7 +132,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -150,7 +147,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_resources_operations.py index e92ea922bea7..073e092e64d1 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_sql_resources_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -53,7 +51,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -161,7 +159,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -177,7 +174,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_table_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_table_resources_operations.py index 5400a09def55..8bb7de833e27 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_table_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_table_resources_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -53,7 +51,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -160,7 +158,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -176,7 +173,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_tables_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_tables_operations.py index 69d0e15c2d09..a219965b5c33 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_tables_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_restorable_tables_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -53,7 +51,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -155,7 +153,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -171,7 +168,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_service_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_service_operations.py index 5aa131a3adb8..38a0459cc8ed 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_service_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_service_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -48,7 +48,7 @@ def build_list_request(resource_group_name: str, account_name: str, subscription _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -83,7 +83,7 @@ def build_create_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -122,7 +122,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -158,7 +158,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -245,7 +245,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -261,7 +260,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -297,7 +295,7 @@ def _create_initial( service_name: str, create_update_parameters: Union[_models.ServiceResourceCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ServiceResource]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -311,7 +309,7 @@ def _create_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ServiceResource]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -333,10 +331,10 @@ def _create_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -344,20 +342,22 @@ def _create_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ServiceResource", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -475,10 +475,11 @@ def begin_create( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ServiceResource", pipeline_response) + deserialized = self._deserialize("ServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -540,7 +541,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -554,16 +554,16 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ServiceResource", pipeline_response) + deserialized = self._deserialize("ServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, account_name: str, service_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -576,7 +576,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -587,10 +587,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -598,6 +598,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -608,8 +612,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -637,7 +645,7 @@ def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, account_name=account_name, service_name=service_name, @@ -647,6 +655,7 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_sql_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_sql_resources_operations.py index 5eb7aad6dd6a..6ac69c082d8f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_sql_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_sql_resources_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -50,7 +50,7 @@ def build_list_sql_databases_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -85,7 +85,7 @@ def build_get_sql_database_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -121,7 +121,7 @@ def build_create_update_sql_database_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -159,7 +159,7 @@ def build_delete_sql_database_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -190,7 +190,7 @@ def build_get_sql_database_throughput_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -226,7 +226,7 @@ def build_update_sql_database_throughput_request( # pylint: disable=name-too-lo _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -265,7 +265,7 @@ def build_migrate_sql_database_to_autoscale_request( # pylint: disable=name-too _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -301,7 +301,7 @@ def build_migrate_sql_database_to_manual_throughput_request( # pylint: disable= _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -337,7 +337,7 @@ def build_list_sql_containers_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -378,7 +378,7 @@ def build_get_sql_container_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -420,7 +420,7 @@ def build_create_update_sql_container_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -464,7 +464,7 @@ def build_delete_sql_container_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -501,7 +501,7 @@ def build_get_sql_container_throughput_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -543,7 +543,7 @@ def build_update_sql_container_throughput_request( # pylint: disable=name-too-l _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -588,7 +588,7 @@ def build_migrate_sql_container_to_autoscale_request( # pylint: disable=name-to _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -630,7 +630,7 @@ def build_migrate_sql_container_to_manual_throughput_request( # pylint: disable _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -667,7 +667,7 @@ def build_list_client_encryption_keys_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -708,7 +708,7 @@ def build_get_client_encryption_key_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -750,7 +750,7 @@ def build_create_update_client_encryption_key_request( # pylint: disable=name-t _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -795,7 +795,7 @@ def build_list_sql_stored_procedures_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -838,7 +838,7 @@ def build_get_sql_stored_procedure_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -882,7 +882,7 @@ def build_create_update_sql_stored_procedure_request( # pylint: disable=name-to _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -928,7 +928,7 @@ def build_delete_sql_stored_procedure_request( # pylint: disable=name-too-long ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -966,7 +966,7 @@ def build_list_sql_user_defined_functions_request( # pylint: disable=name-too-l _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1009,7 +1009,7 @@ def build_get_sql_user_defined_function_request( # pylint: disable=name-too-lon _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1053,7 +1053,7 @@ def build_create_update_sql_user_defined_function_request( # pylint: disable=na _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -1099,7 +1099,7 @@ def build_delete_sql_user_defined_function_request( # pylint: disable=name-too- ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -1137,7 +1137,7 @@ def build_list_sql_triggers_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1180,7 +1180,7 @@ def build_get_sql_trigger_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1224,7 +1224,7 @@ def build_create_update_sql_trigger_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -1270,7 +1270,7 @@ def build_delete_sql_trigger_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -1303,7 +1303,7 @@ def build_get_sql_role_definition_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1339,7 +1339,7 @@ def build_create_update_sql_role_definition_request( # pylint: disable=name-too _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -1378,7 +1378,7 @@ def build_delete_sql_role_definition_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1414,7 +1414,7 @@ def build_list_sql_role_definitions_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1449,7 +1449,7 @@ def build_get_sql_role_assignment_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1485,7 +1485,7 @@ def build_create_update_sql_role_assignment_request( # pylint: disable=name-too _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -1524,7 +1524,7 @@ def build_delete_sql_role_assignment_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1560,7 +1560,7 @@ def build_list_sql_role_assignments_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1600,7 +1600,7 @@ def build_retrieve_continuous_backup_information_request( # pylint: disable=nam _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -1694,7 +1694,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1710,7 +1709,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1780,7 +1778,6 @@ def get_sql_database( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1794,7 +1791,7 @@ def get_sql_database( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("SqlDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1808,7 +1805,7 @@ def _create_update_sql_database_initial( database_name: str, create_update_sql_database_parameters: Union[_models.SqlDatabaseCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.SqlDatabaseGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1822,7 +1819,7 @@ def _create_update_sql_database_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlDatabaseGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1844,10 +1841,10 @@ def _create_update_sql_database_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1855,20 +1852,22 @@ def _create_update_sql_database_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("SqlDatabaseGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1988,10 +1987,11 @@ def begin_create_update_sql_database( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlDatabaseGetResults", pipeline_response) + deserialized = self._deserialize("SqlDatabaseGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2013,9 +2013,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_sql_database_initial( # pylint: disable=inconsistent-return-statements + def _delete_sql_database_initial( self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2028,7 +2028,7 @@ def _delete_sql_database_initial( # pylint: disable=inconsistent-return-stateme _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_database_request( resource_group_name=resource_group_name, @@ -2039,10 +2039,10 @@ def _delete_sql_database_initial( # pylint: disable=inconsistent-return-stateme headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2050,6 +2050,10 @@ def _delete_sql_database_initial( # pylint: disable=inconsistent-return-stateme response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -2060,8 +2064,12 @@ def _delete_sql_database_initial( # pylint: disable=inconsistent-return-stateme ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_sql_database( @@ -2089,7 +2097,7 @@ def begin_delete_sql_database( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_sql_database_initial( # type: ignore + raw_result = self._delete_sql_database_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -2099,6 +2107,7 @@ def begin_delete_sql_database( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2161,7 +2170,6 @@ def get_sql_database_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2175,7 +2183,7 @@ def get_sql_database_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2189,7 +2197,7 @@ def _update_sql_database_throughput_initial( database_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2203,7 +2211,7 @@ def _update_sql_database_throughput_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2225,10 +2233,10 @@ def _update_sql_database_throughput_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2236,20 +2244,22 @@ def _update_sql_database_throughput_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2370,10 +2380,11 @@ def begin_update_sql_database_throughput( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2397,7 +2408,7 @@ def get_long_running_output(pipeline_response): def _migrate_sql_database_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2410,7 +2421,7 @@ def _migrate_sql_database_to_autoscale_initial( # pylint: disable=name-too-long _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_sql_database_to_autoscale_request( resource_group_name=resource_group_name, @@ -2421,10 +2432,10 @@ def _migrate_sql_database_to_autoscale_initial( # pylint: disable=name-too-long headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2432,20 +2443,22 @@ def _migrate_sql_database_to_autoscale_initial( # pylint: disable=name-too-long response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2488,10 +2501,11 @@ def begin_migrate_sql_database_to_autoscale( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2515,7 +2529,7 @@ def get_long_running_output(pipeline_response): def _migrate_sql_database_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2528,7 +2542,7 @@ def _migrate_sql_database_to_manual_throughput_initial( # pylint: disable=name- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_sql_database_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -2539,10 +2553,10 @@ def _migrate_sql_database_to_manual_throughput_initial( # pylint: disable=name- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2550,20 +2564,22 @@ def _migrate_sql_database_to_manual_throughput_initial( # pylint: disable=name- response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2606,10 +2622,11 @@ def begin_migrate_sql_database_to_manual_throughput( # pylint: disable=name-too params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2675,7 +2692,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -2691,7 +2707,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -2763,7 +2778,6 @@ def get_sql_container( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2777,7 +2791,7 @@ def get_sql_container( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlContainerGetResults", pipeline_response) + deserialized = self._deserialize("SqlContainerGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2792,7 +2806,7 @@ def _create_update_sql_container_initial( container_name: str, create_update_sql_container_parameters: Union[_models.SqlContainerCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.SqlContainerGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2806,7 +2820,7 @@ def _create_update_sql_container_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlContainerGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2829,10 +2843,10 @@ def _create_update_sql_container_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2840,20 +2854,22 @@ def _create_update_sql_container_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("SqlContainerGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2983,10 +2999,11 @@ def begin_create_update_sql_container( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlContainerGetResults", pipeline_response) + deserialized = self._deserialize("SqlContainerGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -3008,9 +3025,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_sql_container_initial( # pylint: disable=inconsistent-return-statements + def _delete_sql_container_initial( self, resource_group_name: str, account_name: str, database_name: str, container_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3023,7 +3040,7 @@ def _delete_sql_container_initial( # pylint: disable=inconsistent-return-statem _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_container_request( resource_group_name=resource_group_name, @@ -3035,10 +3052,10 @@ def _delete_sql_container_initial( # pylint: disable=inconsistent-return-statem headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3046,6 +3063,10 @@ def _delete_sql_container_initial( # pylint: disable=inconsistent-return-statem response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -3056,8 +3077,12 @@ def _delete_sql_container_initial( # pylint: disable=inconsistent-return-statem ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_sql_container( @@ -3087,7 +3112,7 @@ def begin_delete_sql_container( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_sql_container_initial( # type: ignore + raw_result = self._delete_sql_container_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -3098,6 +3123,7 @@ def begin_delete_sql_container( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -3163,7 +3189,6 @@ def get_sql_container_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3177,7 +3202,7 @@ def get_sql_container_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3192,7 +3217,7 @@ def _update_sql_container_throughput_initial( container_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3206,7 +3231,7 @@ def _update_sql_container_throughput_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -3229,10 +3254,10 @@ def _update_sql_container_throughput_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3240,20 +3265,22 @@ def _update_sql_container_throughput_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3384,10 +3411,11 @@ def begin_update_sql_container_throughput( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -3411,7 +3439,7 @@ def get_long_running_output(pipeline_response): def _migrate_sql_container_to_autoscale_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, container_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3424,7 +3452,7 @@ def _migrate_sql_container_to_autoscale_initial( # pylint: disable=name-too-lon _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_sql_container_to_autoscale_request( resource_group_name=resource_group_name, @@ -3436,10 +3464,10 @@ def _migrate_sql_container_to_autoscale_initial( # pylint: disable=name-too-lon headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3447,20 +3475,22 @@ def _migrate_sql_container_to_autoscale_initial( # pylint: disable=name-too-lon response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3506,10 +3536,11 @@ def begin_migrate_sql_container_to_autoscale( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -3533,7 +3564,7 @@ def get_long_running_output(pipeline_response): def _migrate_sql_container_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, database_name: str, container_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3546,7 +3577,7 @@ def _migrate_sql_container_to_manual_throughput_initial( # pylint: disable=name _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_sql_container_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -3558,10 +3589,10 @@ def _migrate_sql_container_to_manual_throughput_initial( # pylint: disable=name headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3569,20 +3600,22 @@ def _migrate_sql_container_to_manual_throughput_initial( # pylint: disable=name response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3628,10 +3661,11 @@ def begin_migrate_sql_container_to_manual_throughput( # pylint: disable=name-to params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -3697,7 +3731,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -3713,7 +3746,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -3790,7 +3822,6 @@ def get_client_encryption_key( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -3804,7 +3835,7 @@ def get_client_encryption_key( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ClientEncryptionKeyGetResults", pipeline_response) + deserialized = self._deserialize("ClientEncryptionKeyGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3821,7 +3852,7 @@ def _create_update_client_encryption_key_initial( # pylint: disable=name-too-lo _models.ClientEncryptionKeyCreateUpdateParameters, IO[bytes] ], **kwargs: Any - ) -> Optional[_models.ClientEncryptionKeyGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -3835,7 +3866,7 @@ def _create_update_client_encryption_key_initial( # pylint: disable=name-too-lo api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ClientEncryptionKeyGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -3860,10 +3891,10 @@ def _create_update_client_encryption_key_initial( # pylint: disable=name-too-lo headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3871,20 +3902,22 @@ def _create_update_client_encryption_key_initial( # pylint: disable=name-too-lo response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ClientEncryptionKeyGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -4023,10 +4056,11 @@ def begin_create_update_client_encryption_key( # pylint: disable=name-too-long params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ClientEncryptionKeyGetResults", pipeline_response) + deserialized = self._deserialize("ClientEncryptionKeyGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -4095,7 +4129,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -4111,7 +4144,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -4192,7 +4224,6 @@ def get_sql_stored_procedure( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -4206,7 +4237,7 @@ def get_sql_stored_procedure( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlStoredProcedureGetResults", pipeline_response) + deserialized = self._deserialize("SqlStoredProcedureGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -4224,7 +4255,7 @@ def _create_update_sql_stored_procedure_initial( # pylint: disable=name-too-lon _models.SqlStoredProcedureCreateUpdateParameters, IO[bytes] ], **kwargs: Any - ) -> Optional[_models.SqlStoredProcedureGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4238,7 +4269,7 @@ def _create_update_sql_stored_procedure_initial( # pylint: disable=name-too-lon api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlStoredProcedureGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -4264,10 +4295,10 @@ def _create_update_sql_stored_procedure_initial( # pylint: disable=name-too-lon headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -4275,20 +4306,22 @@ def _create_update_sql_stored_procedure_initial( # pylint: disable=name-too-lon response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("SqlStoredProcedureGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -4431,10 +4464,11 @@ def begin_create_update_sql_stored_procedure( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlStoredProcedureGetResults", pipeline_response) + deserialized = self._deserialize("SqlStoredProcedureGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -4456,7 +4490,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent-return-statements + def _delete_sql_stored_procedure_initial( self, resource_group_name: str, account_name: str, @@ -4464,7 +4498,7 @@ def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent-return container_name: str, stored_procedure_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4477,7 +4511,7 @@ def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent-return _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_stored_procedure_request( resource_group_name=resource_group_name, @@ -4490,10 +4524,10 @@ def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent-return headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -4501,6 +4535,10 @@ def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent-return response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -4511,8 +4549,12 @@ def _delete_sql_stored_procedure_initial( # pylint: disable=inconsistent-return ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_sql_stored_procedure( @@ -4550,7 +4592,7 @@ def begin_delete_sql_stored_procedure( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_sql_stored_procedure_initial( # type: ignore + raw_result = self._delete_sql_stored_procedure_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -4562,6 +4604,7 @@ def begin_delete_sql_stored_procedure( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -4631,7 +4674,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -4647,7 +4689,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -4728,7 +4769,6 @@ def get_sql_user_defined_function( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -4742,7 +4782,7 @@ def get_sql_user_defined_function( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlUserDefinedFunctionGetResults", pipeline_response) + deserialized = self._deserialize("SqlUserDefinedFunctionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -4760,7 +4800,7 @@ def _create_update_sql_user_defined_function_initial( # pylint: disable=name-to _models.SqlUserDefinedFunctionCreateUpdateParameters, IO[bytes] ], **kwargs: Any - ) -> Optional[_models.SqlUserDefinedFunctionGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4774,7 +4814,7 @@ def _create_update_sql_user_defined_function_initial( # pylint: disable=name-to api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlUserDefinedFunctionGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -4800,10 +4840,10 @@ def _create_update_sql_user_defined_function_initial( # pylint: disable=name-to headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -4811,20 +4851,22 @@ def _create_update_sql_user_defined_function_initial( # pylint: disable=name-to response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("SqlUserDefinedFunctionGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -4970,10 +5012,11 @@ def begin_create_update_sql_user_defined_function( # pylint: disable=name-too-l params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlUserDefinedFunctionGetResults", pipeline_response) + deserialized = self._deserialize("SqlUserDefinedFunctionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -4995,7 +5038,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_sql_user_defined_function_initial( # pylint: disable=inconsistent-return-statements,name-too-long + def _delete_sql_user_defined_function_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, @@ -5003,7 +5046,7 @@ def _delete_sql_user_defined_function_initial( # pylint: disable=inconsistent-r container_name: str, user_defined_function_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -5016,7 +5059,7 @@ def _delete_sql_user_defined_function_initial( # pylint: disable=inconsistent-r _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_user_defined_function_request( resource_group_name=resource_group_name, @@ -5029,10 +5072,10 @@ def _delete_sql_user_defined_function_initial( # pylint: disable=inconsistent-r headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -5040,6 +5083,10 @@ def _delete_sql_user_defined_function_initial( # pylint: disable=inconsistent-r response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -5050,8 +5097,12 @@ def _delete_sql_user_defined_function_initial( # pylint: disable=inconsistent-r ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_sql_user_defined_function( @@ -5089,7 +5140,7 @@ def begin_delete_sql_user_defined_function( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_sql_user_defined_function_initial( # type: ignore + raw_result = self._delete_sql_user_defined_function_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -5101,6 +5152,7 @@ def begin_delete_sql_user_defined_function( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -5169,7 +5221,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -5185,7 +5236,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -5266,7 +5316,6 @@ def get_sql_trigger( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -5280,7 +5329,7 @@ def get_sql_trigger( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlTriggerGetResults", pipeline_response) + deserialized = self._deserialize("SqlTriggerGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -5296,7 +5345,7 @@ def _create_update_sql_trigger_initial( trigger_name: str, create_update_sql_trigger_parameters: Union[_models.SqlTriggerCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.SqlTriggerGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -5310,7 +5359,7 @@ def _create_update_sql_trigger_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlTriggerGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -5334,10 +5383,10 @@ def _create_update_sql_trigger_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -5345,20 +5394,22 @@ def _create_update_sql_trigger_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("SqlTriggerGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -5498,10 +5549,11 @@ def begin_create_update_sql_trigger( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlTriggerGetResults", pipeline_response) + deserialized = self._deserialize("SqlTriggerGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -5523,7 +5575,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-statements + def _delete_sql_trigger_initial( self, resource_group_name: str, account_name: str, @@ -5531,7 +5583,7 @@ def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-statemen container_name: str, trigger_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -5544,7 +5596,7 @@ def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-statemen _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_trigger_request( resource_group_name=resource_group_name, @@ -5557,10 +5609,10 @@ def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-statemen headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -5568,6 +5620,10 @@ def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-statemen response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -5578,8 +5634,12 @@ def _delete_sql_trigger_initial( # pylint: disable=inconsistent-return-statemen ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_sql_trigger( @@ -5617,7 +5677,7 @@ def begin_delete_sql_trigger( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_sql_trigger_initial( # type: ignore + raw_result = self._delete_sql_trigger_initial( resource_group_name=resource_group_name, account_name=account_name, database_name=database_name, @@ -5629,6 +5689,7 @@ def begin_delete_sql_trigger( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -5690,7 +5751,6 @@ def get_sql_role_definition( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -5704,7 +5764,7 @@ def get_sql_role_definition( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlRoleDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("SqlRoleDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -5718,7 +5778,7 @@ def _create_update_sql_role_definition_initial( # pylint: disable=name-too-long account_name: str, create_update_sql_role_definition_parameters: Union[_models.SqlRoleDefinitionCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.SqlRoleDefinitionGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -5732,7 +5792,7 @@ def _create_update_sql_role_definition_initial( # pylint: disable=name-too-long api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlRoleDefinitionGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -5756,10 +5816,10 @@ def _create_update_sql_role_definition_initial( # pylint: disable=name-too-long headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -5767,12 +5827,14 @@ def _create_update_sql_role_definition_initial( # pylint: disable=name-too-long response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("SqlRoleDefinitionGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -5894,10 +5956,11 @@ def begin_create_update_sql_role_definition( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlRoleDefinitionGetResults", pipeline_response) + deserialized = self._deserialize("SqlRoleDefinitionGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -5919,9 +5982,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_sql_role_definition_initial( # pylint: disable=inconsistent-return-statements + def _delete_sql_role_definition_initial( self, role_definition_id: str, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -5934,7 +5997,7 @@ def _delete_sql_role_definition_initial( # pylint: disable=inconsistent-return- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_role_definition_request( role_definition_id=role_definition_id, @@ -5945,10 +6008,10 @@ def _delete_sql_role_definition_initial( # pylint: disable=inconsistent-return- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -5956,11 +6019,19 @@ def _delete_sql_role_definition_initial( # pylint: disable=inconsistent-return- response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_sql_role_definition( @@ -5988,7 +6059,7 @@ def begin_delete_sql_role_definition( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_sql_role_definition_initial( # type: ignore + raw_result = self._delete_sql_role_definition_initial( role_definition_id=role_definition_id, resource_group_name=resource_group_name, account_name=account_name, @@ -5998,6 +6069,7 @@ def begin_delete_sql_role_definition( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -6060,7 +6132,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -6076,7 +6147,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -6145,7 +6215,6 @@ def get_sql_role_assignment( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -6159,7 +6228,7 @@ def get_sql_role_assignment( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlRoleAssignmentGetResults", pipeline_response) + deserialized = self._deserialize("SqlRoleAssignmentGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -6173,7 +6242,7 @@ def _create_update_sql_role_assignment_initial( # pylint: disable=name-too-long account_name: str, create_update_sql_role_assignment_parameters: Union[_models.SqlRoleAssignmentCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.SqlRoleAssignmentGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -6187,7 +6256,7 @@ def _create_update_sql_role_assignment_initial( # pylint: disable=name-too-long api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.SqlRoleAssignmentGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -6211,10 +6280,10 @@ def _create_update_sql_role_assignment_initial( # pylint: disable=name-too-long headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -6222,12 +6291,14 @@ def _create_update_sql_role_assignment_initial( # pylint: disable=name-too-long response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("SqlRoleAssignmentGetResults", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -6349,10 +6420,11 @@ def begin_create_update_sql_role_assignment( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlRoleAssignmentGetResults", pipeline_response) + deserialized = self._deserialize("SqlRoleAssignmentGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -6374,9 +6446,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_sql_role_assignment_initial( # pylint: disable=inconsistent-return-statements + def _delete_sql_role_assignment_initial( self, role_assignment_id: str, resource_group_name: str, account_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -6389,7 +6461,7 @@ def _delete_sql_role_assignment_initial( # pylint: disable=inconsistent-return- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_sql_role_assignment_request( role_assignment_id=role_assignment_id, @@ -6400,10 +6472,10 @@ def _delete_sql_role_assignment_initial( # pylint: disable=inconsistent-return- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -6411,11 +6483,19 @@ def _delete_sql_role_assignment_initial( # pylint: disable=inconsistent-return- response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_sql_role_assignment( @@ -6443,7 +6523,7 @@ def begin_delete_sql_role_assignment( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_sql_role_assignment_initial( # type: ignore + raw_result = self._delete_sql_role_assignment_initial( role_assignment_id=role_assignment_id, resource_group_name=resource_group_name, account_name=account_name, @@ -6453,6 +6533,7 @@ def begin_delete_sql_role_assignment( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -6515,7 +6596,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -6531,7 +6611,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -6568,7 +6647,7 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too container_name: str, location: Union[_models.ContinuousBackupRestoreLocation, IO[bytes]], **kwargs: Any - ) -> Optional[_models.BackupInformation]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -6582,7 +6661,7 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.BackupInformation]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -6605,10 +6684,10 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -6616,12 +6695,14 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -6748,10 +6829,11 @@ def begin_retrieve_continuous_backup_information( # pylint: disable=name-too-lo params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = self._deserialize("BackupInformation", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_table_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_table_resources_operations.py index b2f03dd009ae..1d546b87320c 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_table_resources_operations.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_table_resources_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -50,7 +50,7 @@ def build_list_tables_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -85,7 +85,7 @@ def build_get_table_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -121,7 +121,7 @@ def build_create_update_table_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -159,7 +159,7 @@ def build_delete_table_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) # Construct URL _url = kwargs.pop( "template_url", @@ -190,7 +190,7 @@ def build_get_table_throughput_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -226,7 +226,7 @@ def build_update_table_throughput_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -265,7 +265,7 @@ def build_migrate_table_to_autoscale_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -301,7 +301,7 @@ def build_migrate_table_to_manual_throughput_request( # pylint: disable=name-to _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -337,7 +337,7 @@ def build_retrieve_continuous_backup_information_request( # pylint: disable=nam _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-05-15")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-08-15")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -429,7 +429,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -445,7 +444,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -514,7 +512,6 @@ def get_table( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -528,7 +525,7 @@ def get_table( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TableGetResults", pipeline_response) + deserialized = self._deserialize("TableGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -542,7 +539,7 @@ def _create_update_table_initial( table_name: str, create_update_table_parameters: Union[_models.TableCreateUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.TableGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -556,7 +553,7 @@ def _create_update_table_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.TableGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -578,10 +575,10 @@ def _create_update_table_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -589,20 +586,22 @@ def _create_update_table_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("TableGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -721,10 +720,11 @@ def begin_create_update_table( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TableGetResults", pipeline_response) + deserialized = self._deserialize("TableGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -746,9 +746,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_table_initial( # pylint: disable=inconsistent-return-statements + def _delete_table_initial( self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -761,7 +761,7 @@ def _delete_table_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_table_request( resource_group_name=resource_group_name, @@ -772,10 +772,10 @@ def _delete_table_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -783,6 +783,10 @@ def _delete_table_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) @@ -793,8 +797,12 @@ def _delete_table_initial( # pylint: disable=inconsistent-return-statements ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete_table( @@ -822,7 +830,7 @@ def begin_delete_table( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_table_initial( # type: ignore + raw_result = self._delete_table_initial( resource_group_name=resource_group_name, account_name=account_name, table_name=table_name, @@ -832,6 +840,7 @@ def begin_delete_table( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -894,7 +903,6 @@ def get_table_throughput( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -908,7 +916,7 @@ def get_table_throughput( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -922,7 +930,7 @@ def _update_table_throughput_initial( table_name: str, update_throughput_parameters: Union[_models.ThroughputSettingsUpdateParameters, IO[bytes]], **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -936,7 +944,7 @@ def _update_table_throughput_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -958,10 +966,10 @@ def _update_table_throughput_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -969,20 +977,22 @@ def _update_table_throughput_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1103,10 +1113,11 @@ def begin_update_table_throughput( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1130,7 +1141,7 @@ def get_long_running_output(pipeline_response): def _migrate_table_to_autoscale_initial( self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1143,7 +1154,7 @@ def _migrate_table_to_autoscale_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_table_to_autoscale_request( resource_group_name=resource_group_name, @@ -1154,10 +1165,10 @@ def _migrate_table_to_autoscale_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1165,20 +1176,22 @@ def _migrate_table_to_autoscale_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1221,10 +1234,11 @@ def begin_migrate_table_to_autoscale( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1248,7 +1262,7 @@ def get_long_running_output(pipeline_response): def _migrate_table_to_manual_throughput_initial( # pylint: disable=name-too-long self, resource_group_name: str, account_name: str, table_name: str, **kwargs: Any - ) -> Optional[_models.ThroughputSettingsGetResults]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1261,7 +1275,7 @@ def _migrate_table_to_manual_throughput_initial( # pylint: disable=name-too-lon _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.ThroughputSettingsGetResults]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_migrate_table_to_manual_throughput_request( resource_group_name=resource_group_name, @@ -1272,10 +1286,10 @@ def _migrate_table_to_manual_throughput_initial( # pylint: disable=name-too-lon headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1283,20 +1297,22 @@ def _migrate_table_to_manual_throughput_initial( # pylint: disable=name-too-lon response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) - if response.status_code == 202: response_headers["azure-AsyncOperation"] = self._deserialize( "str", response.headers.get("azure-AsyncOperation") ) response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1339,10 +1355,11 @@ def begin_migrate_table_to_manual_throughput( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response) + deserialized = self._deserialize("ThroughputSettingsGetResults", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1371,7 +1388,7 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too table_name: str, location: Union[_models.ContinuousBackupRestoreLocation, IO[bytes]], **kwargs: Any - ) -> Optional[_models.BackupInformation]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1385,7 +1402,7 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.BackupInformation]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1407,10 +1424,10 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1418,12 +1435,14 @@ def _retrieve_continuous_backup_information_initial( # pylint: disable=name-too response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1540,10 +1559,11 @@ def begin_retrieve_continuous_backup_information( # pylint: disable=name-too-lo params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("BackupInformation", pipeline_response) + deserialized = self._deserialize("BackupInformation", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/dev_requirements.txt b/sdk/cosmos/azure-mgmt-cosmosdb/dev_requirements.txt index f6457a93d5e8..6195bb36ac8e 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/dev_requirements.txt +++ b/sdk/cosmos/azure-mgmt-cosmosdb/dev_requirements.txt @@ -1 +1,2 @@ --e ../../../tools/azure-sdk-tools \ No newline at end of file +-e ../../../tools/azure-sdk-tools +aiohttp \ No newline at end of file diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_create_update.py index 200bb9f43810..d0d47282c41a 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraKeyspaceCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraKeyspaceCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_delete.py index d1b2fbe307f0..34a27716c774 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraKeyspaceDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraKeyspaceDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_get.py index 27dcb62f76ef..b681200555a5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraKeyspaceGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraKeyspaceGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_list.py index a1326d46e236..87facd9d5a86 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraKeyspaceList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraKeyspaceList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_migrate_to_autoscale.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_migrate_to_autoscale.py index 837cfb5b5a97..d2e66c3d08f1 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_migrate_to_autoscale.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_migrate_to_autoscale.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraKeyspaceMigrateToAutoscale.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraKeyspaceMigrateToAutoscale.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_migrate_to_manual_throughput.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_migrate_to_manual_throughput.py index 78ae20785aeb..5b3d3d03817e 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_migrate_to_manual_throughput.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_migrate_to_manual_throughput.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraKeyspaceMigrateToManualThroughput.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraKeyspaceMigrateToManualThroughput.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_throughput_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_throughput_get.py index 530823fa98e1..8ab3ecb8a6aa 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_throughput_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_throughput_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraKeyspaceThroughputGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraKeyspaceThroughputGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_throughput_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_throughput_update.py index 5f33145b739a..3f0dafdde918 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_throughput_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_keyspace_throughput_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraKeyspaceThroughputUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraKeyspaceThroughputUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_create_update.py index b42cac4704da..35dfa23c1af0 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -57,6 +55,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraTableCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraTableCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_delete.py index 6ca5d7ec1dc4..32b49df3d655 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_delete.py @@ -38,6 +38,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraTableDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraTableDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_get.py index 32d9abb3d92e..4b275f44a625 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_get.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraTableGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraTableGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_list.py index 6ef7d35dd927..bdc53562a0ca 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_list.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraTableList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraTableList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_migrate_to_autoscale.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_migrate_to_autoscale.py index 8226c73d974a..1479bdfcde87 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_migrate_to_autoscale.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_migrate_to_autoscale.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraTableMigrateToAutoscale.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraTableMigrateToAutoscale.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_migrate_to_manual_throughput.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_migrate_to_manual_throughput.py index 86834b8fccff..99f386509bd8 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_migrate_to_manual_throughput.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_migrate_to_manual_throughput.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraTableMigrateToManualThroughput.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraTableMigrateToManualThroughput.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_throughput_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_throughput_get.py index fa3df823e4ea..11c17da25f83 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_throughput_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_throughput_get.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraTableThroughputGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraTableThroughputGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_throughput_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_throughput_update.py index e3d42c2f29c9..cdc2f3642045 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_throughput_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_cassandra_table_throughput_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -46,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCassandraTableThroughputUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCassandraTableThroughputUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_metric_definitions.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_metric_definitions.py index 5e5a1d7586fb..8391dd734816 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_metric_definitions.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_metric_definitions.py @@ -40,6 +40,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCollectionGetMetricDefinitions.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCollectionGetMetricDefinitions.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_metrics.py similarity index 71% rename from sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_create_update.py rename to sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_metrics.py index 5099f6faee93..00e1592e2daa 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_metrics.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -17,7 +15,7 @@ pip install azure-identity pip install azure-mgmt-cosmosdb # USAGE - python cosmos_db_mongo_db_database_create_update.py + python cosmos_db_collection_get_metrics.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -32,19 +30,17 @@ def main(): subscription_id="subid", ) - response = client.mongo_db_resources.begin_create_update_mongo_db_database( + response = client.collection.list_metrics( resource_group_name="rg1", account_name="ddb1", - database_name="databaseName", - create_update_mongo_db_database_parameters={ - "location": "West US", - "properties": {"options": {}, "resource": {"id": "databaseName"}}, - "tags": {}, - }, - ).result() - print(response) + database_rid="databaseRid", + collection_rid="collectionRid", + filter="$filter=(name.value eq 'Total Requests') and timeGrain eq duration'PT5M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T00:13:55.2780000Z", + ) + for item in response: + print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBDatabaseCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCollectionGetMetrics.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_usages.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_usages.py index c08b97e01219..1040c774cc3e 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_usages.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_get_usages.py @@ -40,6 +40,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCollectionGetUsages.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCollectionGetUsages.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_partition_get_metrics.py similarity index 58% rename from sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_create_update.py rename to sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_partition_get_metrics.py index 526a629ef7c4..930d99cdbbc7 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_partition_get_metrics.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -17,7 +15,7 @@ pip install azure-identity pip install azure-mgmt-cosmosdb # USAGE - python cosmos_db_mongo_db_collection_create_update.py + python cosmos_db_collection_partition_get_metrics.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -32,30 +30,17 @@ def main(): subscription_id="subid", ) - response = client.mongo_db_resources.begin_create_update_mongo_db_collection( + response = client.collection_partition.list_metrics( resource_group_name="rg1", account_name="ddb1", - database_name="databaseName", - collection_name="collectionName", - create_update_mongo_db_collection_parameters={ - "location": "West US", - "properties": { - "options": {}, - "resource": { - "id": "collectionName", - "indexes": [ - {"key": {"keys": ["_ts"]}, "options": {"expireAfterSeconds": 100, "unique": True}}, - {"key": {"keys": ["_id"]}}, - ], - "shardKey": {"testKey": "Hash"}, - }, - }, - "tags": {}, - }, - ).result() - print(response) - - -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBCollectionCreateUpdate.json + database_rid="databaseRid", + collection_rid="collectionRid", + filter="$filter=(name.value eq 'Max RUs Per Second') and timeGrain eq duration'PT1M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T23:58:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCollectionPartitionGetMetrics.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_partition_get_usages.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_partition_get_usages.py index a412c4f7288f..36916b71410f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_partition_get_usages.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_partition_get_usages.py @@ -40,6 +40,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBCollectionPartitionGetUsages.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCollectionPartitionGetUsages.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_partition_region_get_metrics.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_partition_region_get_metrics.py new file mode 100644 index 000000000000..926a839123bd --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_collection_partition_region_get_metrics.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-cosmosdb +# USAGE + python cosmos_db_collection_partition_region_get_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = CosmosDBManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.collection_partition_region.list_metrics( + resource_group_name="rg1", + account_name="ddb1", + region="North Europe", + database_rid="databaseRid", + collection_rid="collectionRid", + filter="$filter=(name.value eq 'Max RUs Per Second') and timeGrain eq duration'PT1M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T23:58:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBCollectionPartitionRegionGetMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_create.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_create.py index c4607653577c..541cd47f9166 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_create.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -43,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDataTransferServiceCreate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDataTransferServiceCreate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_delete.py index a644c65c9a85..738ed7730fe5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDataTransferServiceDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDataTransferServiceDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_get.py index e9ebfd9c2fb5..16f9f942ecba 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_data_transfer_service_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDataTransferServiceGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDataTransferServiceGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_check_name_exists.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_check_name_exists.py index 2b01f4817816..d0dbb1be5950 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_check_name_exists.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_check_name_exists.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountCheckNameExists.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountCheckNameExists.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_create_max.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_create_max.py index 8c3b56db4db2..e0167b8456e7 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_create_max.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_create_max.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -94,6 +92,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountCreateMax.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountCreateMax.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_create_min.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_create_min.py index 84b0f44c2f3a..671f089e0ce3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_create_min.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_create_min.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -47,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountCreateMin.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountCreateMin.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_delete.py index 1988297483b9..9fcf29ff8330 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_failover_priority_change.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_failover_priority_change.py index dc8c9a23ced6..b3bad8dcfae5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_failover_priority_change.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_failover_priority_change.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -44,6 +42,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountFailoverPriorityChange.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountFailoverPriorityChange.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get.py index 3127ec5b6091..e5b2f52eacd5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_metric_definitions.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_metric_definitions.py index beb62cf9b8e8..5e21e104709f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_metric_definitions.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_metric_definitions.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountGetMetricDefinitions.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountGetMetricDefinitions.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_metrics.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_metrics.py new file mode 100644 index 000000000000..9de4c6b601c8 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_metrics.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-cosmosdb +# USAGE + python cosmos_db_database_account_get_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = CosmosDBManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.database_accounts.list_metrics( + resource_group_name="rg1", + account_name="ddb1", + filter="$filter=(name.value eq 'Total Requests') and timeGrain eq duration'PT5M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T00:13:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountGetMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_usages.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_usages.py index e45a5707ee0e..9285dade5bdd 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_usages.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_get_usages.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountGetUsages.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountGetUsages.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list.py index a2a98a8af1be..8abc0de60dbb 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_by_resource_group.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_by_resource_group.py index f27a05d7d27e..86dc9750f5eb 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_by_resource_group.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_by_resource_group.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountListByResourceGroup.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_connection_strings.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_connection_strings.py index 798f9710bfe3..cf198f855f4b 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_connection_strings.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_connection_strings.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountListConnectionStrings.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountListConnectionStrings.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_connection_strings_mongo.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_connection_strings_mongo.py index 71d2bed8390c..8a8f1c912973 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_connection_strings_mongo.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_connection_strings_mongo.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountListConnectionStringsMongo.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountListConnectionStringsMongo.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_keys.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_keys.py index 30d553720a04..53bca77168e5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_keys.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_keys.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountListKeys.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountListKeys.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_read_only_keys.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_read_only_keys.py index 4088f76c079b..c4791242cdc7 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_read_only_keys.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_list_read_only_keys.py @@ -30,13 +30,13 @@ def main(): subscription_id="subid", ) - response = client.database_accounts.get_read_only_keys( + response = client.database_accounts.list_read_only_keys( resource_group_name="rg1", account_name="ddb1", ) print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountListReadOnlyKeys.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountListReadOnlyKeys.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_offline_region.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_offline_region.py index f7600c1f020e..8a98a18dc5a8 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_offline_region.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_offline_region.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -35,10 +33,10 @@ def main(): client.database_accounts.begin_offline_region( resource_group_name="rg1", account_name="ddb1", - region_parameter_for_offline=[{"region": "North Europe"}], + region_parameter_for_offline={"region": "North Europe"}, ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountOfflineRegion.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountOfflineRegion.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_online_region.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_online_region.py index 7e23e6c90051..085790fd4abb 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_online_region.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_online_region.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -35,10 +33,10 @@ def main(): client.database_accounts.begin_online_region( resource_group_name="rg1", account_name="ddb1", - region_parameter_for_online=[{"region": "North Europe"}], + region_parameter_for_online={"region": "North Europe"}, ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountOnlineRegion.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountOnlineRegion.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_patch.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_patch.py index 3655914fddea..61a83a74408c 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_patch.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_patch.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -84,6 +82,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountPatch.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountPatch.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_regenerate_key.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_regenerate_key.py index c796a80c6aa8..34924249be38 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_regenerate_key.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_regenerate_key.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -39,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseAccountRegenerateKey.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountRegenerateKey.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_region_get_metrics.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_region_get_metrics.py new file mode 100644 index 000000000000..054aecb0ec58 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_account_region_get_metrics.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-cosmosdb +# USAGE + python cosmos_db_database_account_region_get_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = CosmosDBManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.database_account_region.list_metrics( + resource_group_name="rg1", + account_name="ddb1", + region="North Europe", + filter="$filter=(name.value eq 'Total Requests') and timeGrain eq duration'PT5M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T00:13:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseAccountRegionGetMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_metric_definitions.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_metric_definitions.py index 395e9a14bf64..f3c9a3497e76 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_metric_definitions.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_metric_definitions.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseGetMetricDefinitions.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseGetMetricDefinitions.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_metrics.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_metrics.py new file mode 100644 index 000000000000..e1070ed86247 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_metrics.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-cosmosdb +# USAGE + python cosmos_db_database_get_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = CosmosDBManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.database.list_metrics( + resource_group_name="rg1", + account_name="ddb1", + database_rid="rid", + filter="$filter=(name.value eq 'Total Requests') and timeGrain eq duration'PT5M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T00:13:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseGetMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_usages.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_usages.py index 221fd9205af0..fd159685e895 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_usages.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_database_get_usages.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBDatabaseGetUsages.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBDatabaseGetUsages.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_create.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_create.py index 41e56163a937..4af060dbe4e3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_create.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -43,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGraphAPIComputeServiceCreate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGraphAPIComputeServiceCreate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_delete.py index 62544413c6ab..c709abf4f30b 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGraphAPIComputeServiceDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGraphAPIComputeServiceDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_get.py index 0d1d47bfcc7f..f873dace3e87 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_graph_api_compute_service_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGraphAPIComputeServiceGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGraphAPIComputeServiceGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_create_update.py index ea2676712fad..b06f7a36869c 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinDatabaseCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinDatabaseCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_delete.py index fbc9381aae16..769051026994 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinDatabaseDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinDatabaseDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_get.py index 53d2b64ec2a5..6241f0a7ab67 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinDatabaseGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinDatabaseGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_list.py index 144d200b6531..a7dab25ca15e 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinDatabaseList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinDatabaseList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_migrate_to_autoscale.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_migrate_to_autoscale.py index ff03dbe0b470..2b2e6bac60b1 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_migrate_to_autoscale.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_migrate_to_autoscale.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinDatabaseMigrateToAutoscale.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinDatabaseMigrateToAutoscale.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_migrate_to_manual_throughput.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_migrate_to_manual_throughput.py index ed7e103493eb..3eb8453ccf35 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_migrate_to_manual_throughput.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_migrate_to_manual_throughput.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinDatabaseMigrateToManualThroughput.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinDatabaseMigrateToManualThroughput.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_throughput_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_throughput_get.py index 4ec93bce498c..4ff0e9d3b497 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_throughput_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_throughput_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinDatabaseThroughputGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinDatabaseThroughputGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_throughput_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_throughput_update.py index 5da2909db85b..c081e017b142 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_throughput_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_database_throughput_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinDatabaseThroughputUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinDatabaseThroughputUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_backup_information.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_backup_information.py index 29b521f08b40..20c0f5d633f4 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_backup_information.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_backup_information.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -42,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinGraphBackupInformation.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinGraphBackupInformation.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_create_update.py index 5e9916aed38b..e4fbdaf49e9f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -69,6 +67,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinGraphCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinGraphCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_delete.py index 5e12ad408e2c..ea79e9174a2d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_delete.py @@ -38,6 +38,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinGraphDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinGraphDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_get.py index 87194001da7c..2cc72736436c 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_get.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinGraphGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinGraphGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_list.py index 761462181ecb..15b10d9102ad 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_list.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinGraphList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinGraphList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_migrate_to_autoscale.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_migrate_to_autoscale.py index cf180a2bd40f..9b7bbca6fc3d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_migrate_to_autoscale.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_migrate_to_autoscale.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinGraphMigrateToAutoscale.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinGraphMigrateToAutoscale.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_migrate_to_manual_throughput.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_migrate_to_manual_throughput.py index 1e3c2738abfe..665105bae451 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_migrate_to_manual_throughput.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_migrate_to_manual_throughput.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinGraphMigrateToManualThroughput.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinGraphMigrateToManualThroughput.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_throughput_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_throughput_get.py index 3b7bde78627a..29d842fcf385 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_throughput_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_throughput_get.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinGraphThroughputGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinGraphThroughputGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_throughput_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_throughput_update.py index fab75e60bcd6..21a6e4411820 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_throughput_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_gremlin_graph_throughput_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -46,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBGremlinGraphThroughputUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBGremlinGraphThroughputUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_location_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_location_get.py index 67ef4c971b29..4af6563dead6 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_location_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_location_get.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBLocationGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBLocationGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_location_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_location_list.py index 2a3b121b1777..433662fcc1e3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_location_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_location_list.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBLocationList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBLocationList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_create.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_create.py index 67cea973fc5b..855788c0630f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_create.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -62,6 +60,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraClusterCreate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraClusterCreate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_deallocate.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_deallocate.py index 5f87388ff5ec..b4bfc71b998a 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_deallocate.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_deallocate.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraClusterDeallocate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraClusterDeallocate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_delete.py index 4083942ca49b..43227f8ce07f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraClusterDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraClusterDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_get.py index 26482ecd7c80..0c998e2e284d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraClusterGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraClusterGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_list_by_resource_group.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_list_by_resource_group.py index daa757680df0..70b7d264f028 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_list_by_resource_group.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_list_by_resource_group.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraClusterListByResourceGroup.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraClusterListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_list_by_subscription.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_list_by_subscription.py index 99c9e186ff92..631a55530389 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_list_by_subscription.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_list_by_subscription.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraClusterListBySubscription.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraClusterListBySubscription.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_patch.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_patch.py index b70d6d04d6fe..d62c6bb8ccb1 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_patch.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_patch.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -54,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraClusterPatch.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraClusterPatch.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_start.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_start.py index 73f5c146e8fc..f4d08283e02d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_start.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_cluster_start.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraClusterStart.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraClusterStart.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_command.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_command.py index edfe5a291bd4..fe721c134c0c 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_command.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_command.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -40,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraCommand.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraCommand.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_create.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_create.py index b20aba0acf4c..0c5e57af5ed5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_create.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -48,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraDataCenterCreate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraDataCenterCreate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_delete.py index 0ea59881670e..bccb96b5ddb9 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraDataCenterDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraDataCenterDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_get.py index cb9dd2be0a62..5e629861c5a5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraDataCenterGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraDataCenterGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_list.py index e846da1ba0a2..dbc7d061f3ba 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraDataCenterList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraDataCenterList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_patch.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_patch.py index 3e4831ad22fd..0e255a3b61ff 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_patch.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_data_center_patch.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -48,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraDataCenterPatch.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraDataCenterPatch.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_status.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_status.py index c8ae9ae79c8b..ca4cf730369a 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_status.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_managed_cassandra_status.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBManagedCassandraStatus.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBManagedCassandraStatus.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_create.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_create.py index 1d983ba130f9..a596b8180ff3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_create.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -43,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMaterializedViewsBuilderServiceCreate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMaterializedViewsBuilderServiceCreate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_delete.py index 2781fd60955f..0613a3166644 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMaterializedViewsBuilderServiceDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMaterializedViewsBuilderServiceDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_get.py index 7aded464ab89..ef338c9c5bd4 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_materialized_views_builder_service_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMaterializedViewsBuilderServiceGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMaterializedViewsBuilderServiceGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_backup_information.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_backup_information.py index e8611cf2b905..2191f4411753 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_backup_information.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_backup_information.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -42,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBCollectionBackupInformation.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBCollectionBackupInformation.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_delete.py index 5fb499bc226d..e4940cbfc873 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_delete.py @@ -38,6 +38,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBCollectionDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBCollectionDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_get.py index ddd3c0014ebd..de76b14b90f8 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_get.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBCollectionGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBCollectionGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_list.py index a760e7cef2da..1c08ddbc6548 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_list.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBCollectionList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBCollectionList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_migrate_to_autoscale.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_migrate_to_autoscale.py index e9a101a9f658..194de6323a24 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_migrate_to_autoscale.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_migrate_to_autoscale.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBCollectionMigrateToAutoscale.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBCollectionMigrateToAutoscale.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_migrate_to_manual_throughput.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_migrate_to_manual_throughput.py index 8e5132be0be9..96424baf73ba 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_migrate_to_manual_throughput.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_migrate_to_manual_throughput.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBCollectionMigrateToManualThroughput.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBCollectionMigrateToManualThroughput.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_throughput_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_throughput_get.py index 056db73179dc..72f4aa1f7ef1 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_throughput_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_throughput_get.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBCollectionThroughputGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBCollectionThroughputGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_throughput_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_throughput_update.py index 2c0f0679bde9..2805ffda18d3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_throughput_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_collection_throughput_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -46,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBCollectionThroughputUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBCollectionThroughputUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_delete.py index 55c09b571efe..bdd15b31d2af 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBDatabaseDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBDatabaseDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_get.py index 737ab265a2f5..f3c9556d5625 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBDatabaseGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBDatabaseGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_list.py index deab6db0715e..554466f628af 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBDatabaseList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBDatabaseList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_migrate_to_autoscale.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_migrate_to_autoscale.py index 54df5877835b..288e48173fdf 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_migrate_to_autoscale.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_migrate_to_autoscale.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBDatabaseMigrateToAutoscale.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBDatabaseMigrateToAutoscale.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_migrate_to_manual_throughput.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_migrate_to_manual_throughput.py index f8d3e5645b5a..738f84656a1f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_migrate_to_manual_throughput.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_migrate_to_manual_throughput.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBDatabaseMigrateToManualThroughput.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBDatabaseMigrateToManualThroughput.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_throughput_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_throughput_get.py index 11c6f7b35de0..f152859261d0 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_throughput_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_throughput_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBDatabaseThroughputGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBDatabaseThroughputGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_throughput_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_throughput_update.py index ce2bf8d04a52..a48a4ef3aae8 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_throughput_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_database_throughput_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBDatabaseThroughputUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBDatabaseThroughputUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_create_update.py index 22dc9a15485a..5a49c22f22fe 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -48,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBRoleDefinitionCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBRoleDefinitionCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_delete.py index 5aac813279af..0edb4caf7d6f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBRoleDefinitionDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBRoleDefinitionDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_get.py index e868661e7f32..d76706e7407d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBRoleDefinitionGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBRoleDefinitionGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_list.py index f4232483892f..a75d07059513 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_role_definition_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBRoleDefinitionList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBRoleDefinitionList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_create_update.py index 717217702ca7..3e5086d3b046 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -50,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBUserDefinitionCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBUserDefinitionCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_delete.py index 96b3cb4163d3..555fa61ee5c3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBUserDefinitionDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBUserDefinitionDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_get.py index 581476dd0f0b..b480429a6b65 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBUserDefinitionGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBUserDefinitionGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_list.py index 9ae0a0ce833e..b449d7457da5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_mongo_db_user_definition_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBMongoDBUserDefinitionList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBMongoDBUserDefinitionList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_delete.py index 3681e1363acc..b3735e47396d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_delete.py @@ -6,15 +6,10 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from .. import models as _models """ # PREREQUISITES pip install azure-identity @@ -42,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBNotebookWorkspaceDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBNotebookWorkspaceDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_get.py index f6f3e4f86123..7e1aff6a3dfb 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_get.py @@ -6,15 +6,10 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from .. import models as _models """ # PREREQUISITES pip install azure-identity @@ -43,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBNotebookWorkspaceGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBNotebookWorkspaceGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_list.py index d75ffa06f72f..ed91445294c5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBNotebookWorkspaceList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBNotebookWorkspaceList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_list_connection_info.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_list_connection_info.py index db4ce3e246d4..cc0bdd8a65b3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_list_connection_info.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_list_connection_info.py @@ -6,15 +6,10 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from .. import models as _models """ # PREREQUISITES pip install azure-identity @@ -43,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBNotebookWorkspaceListConnectionInfo.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBNotebookWorkspaceListConnectionInfo.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_regenerate_auth_token.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_regenerate_auth_token.py index 0d2b1fb352ac..43e76fd7f5a0 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_regenerate_auth_token.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_regenerate_auth_token.py @@ -6,15 +6,10 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from .. import models as _models """ # PREREQUISITES pip install azure-identity @@ -42,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBNotebookWorkspaceRegenerateAuthToken.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBNotebookWorkspaceRegenerateAuthToken.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_start.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_start.py index 86f62496550e..f2999bfcda45 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_start.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_notebook_workspace_start.py @@ -6,15 +6,10 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from .. import models as _models """ # PREREQUISITES pip install azure-identity @@ -42,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBNotebookWorkspaceStart.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBNotebookWorkspaceStart.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_operations_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_operations_list.py index 9e6b09a0312d..716a3a781e71 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_operations_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_operations_list.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBOperationsList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBOperationsList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_percentile_get_metrics.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_percentile_get_metrics.py new file mode 100644 index 000000000000..869cc5071b14 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_percentile_get_metrics.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-cosmosdb +# USAGE + python cosmos_db_percentile_get_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = CosmosDBManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.percentile.list_metrics( + resource_group_name="rg1", + account_name="ddb1", + filter="$filter=(name.value eq 'Probabilistic Bounded Staleness') and timeGrain eq duration'PT5M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T00:13:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPercentileGetMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_percentile_source_target_get_metrics.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_percentile_source_target_get_metrics.py new file mode 100644 index 000000000000..58382d29937a --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_percentile_source_target_get_metrics.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-cosmosdb +# USAGE + python cosmos_db_percentile_source_target_get_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = CosmosDBManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.percentile_source_target.list_metrics( + resource_group_name="rg1", + account_name="ddb1", + source_region="West Central US", + target_region="East US", + filter="$filter=(name.value eq 'Probabilistic Bounded Staleness') and timeGrain eq duration'PT5M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T00:13:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPercentileSourceTargetGetMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_percentile_target_get_metrics.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_percentile_target_get_metrics.py new file mode 100644 index 000000000000..7c8eee86694b --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_percentile_target_get_metrics.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-cosmosdb +# USAGE + python cosmos_db_percentile_target_get_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = CosmosDBManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.percentile_target.list_metrics( + resource_group_name="rg1", + account_name="ddb1", + target_region="East US", + filter="$filter=(name.value eq 'Probabilistic Bounded Staleness') and timeGrain eq duration'PT5M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T00:13:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPercentileTargetGetMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_delete.py index 5d1659ee575c..752c0e68c5c4 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBPrivateEndpointConnectionDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPrivateEndpointConnectionDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_get.py index 139e1c76b4f8..ebaf856abae6 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBPrivateEndpointConnectionGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPrivateEndpointConnectionGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_list_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_list_get.py index 7a6b641fde31..124396447b42 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_list_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_list_get.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBPrivateEndpointConnectionListGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPrivateEndpointConnectionListGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_update.py index 54dc86e477dc..b926d3d76840 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_endpoint_connection_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -48,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBPrivateEndpointConnectionUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPrivateEndpointConnectionUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_link_resource_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_link_resource_get.py index 5fb2c821b0ec..10ed98d90265 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_link_resource_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_link_resource_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBPrivateLinkResourceGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPrivateLinkResourceGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_link_resource_list_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_link_resource_list_get.py index 215168d56ab6..a6fd705e1e23 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_link_resource_list_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_private_link_resource_list_get.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBPrivateLinkResourceListGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPrivateLinkResourceListGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_region_collection_get_metrics.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_region_collection_get_metrics.py new file mode 100644 index 000000000000..ca955c3e219c --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_region_collection_get_metrics.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-cosmosdb +# USAGE + python cosmos_db_region_collection_get_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = CosmosDBManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.collection_region.list_metrics( + resource_group_name="rg1", + account_name="ddb1", + region="North Europe", + database_rid="databaseRid", + collection_rid="collectionRid", + filter="$filter=(name.value eq 'Total Requests') and timeGrain eq duration'PT5M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T00:13:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRegionCollectionGetMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_get.py index 26818653124f..dc5528116e1a 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableDatabaseAccountGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableDatabaseAccountGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_list.py index 2b1e05767995..746f47f94c00 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableDatabaseAccountList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableDatabaseAccountList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_no_location_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_no_location_list.py index c60095852d22..edd799393bed 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_no_location_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_database_account_no_location_list.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableDatabaseAccountNoLocationList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableDatabaseAccountNoLocationList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_database_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_database_list.py index 87f259b15dba..5b53fb912610 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_database_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_database_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableGremlinDatabaseList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableGremlinDatabaseList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_graph_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_graph_list.py index c1951672555c..bc9e7330028b 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_graph_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_graph_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableGremlinGraphList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableGremlinGraphList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_resource_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_resource_list.py index 22871f03529f..1f2333ac292d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_resource_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_gremlin_resource_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableGremlinResourceList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableGremlinResourceList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_collection_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_collection_list.py index b0eb71a5d66b..a0539c3228f5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_collection_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_collection_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableMongodbCollectionList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableMongodbCollectionList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_database_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_database_list.py index 163a5e0c8034..c24f0b9e9cbd 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_database_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_database_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableMongodbDatabaseList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableMongodbDatabaseList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_resource_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_resource_list.py index 0a64a568976a..ce1bb81f82a9 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_resource_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_mongodb_resource_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableMongodbResourceList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableMongodbResourceList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_container_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_container_list.py index bfc5b4f1e121..916c547af30e 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_container_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_container_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableSqlContainerList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableSqlContainerList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_database_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_database_list.py index d54c3af6a8c4..8f22037ec936 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_database_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_database_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableSqlDatabaseList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableSqlDatabaseList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_resource_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_resource_list.py index 8d268c914f5d..0cc1162b12a9 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_resource_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_sql_resource_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableSqlResourceList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableSqlResourceList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_table_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_table_list.py index f41aaf5175f6..87fe42d37a87 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_table_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_table_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableTableList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableTableList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_table_resource_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_table_resource_list.py index 99ff563e2c2d..c8015d103a02 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_table_resource_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restorable_table_resource_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestorableTableResourceList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestorableTableResourceList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restore_database_account_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restore_database_account_create_update.py index 7871292ca6ca..8a71713e0df6 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restore_database_account_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_restore_database_account_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -61,6 +59,7 @@ def main(): "restoreMode": "PointInTime", "restoreSource": "/subscriptions/subid/providers/Microsoft.DocumentDB/locations/westus/restorableDatabaseAccounts/1a97b4bb-f6a0-430e-ade1-638d781830cc", "restoreTimestampInUtc": "2021-03-11T22:05:09Z", + "restoreWithTtlDisabled": False, }, }, "tags": {}, @@ -69,6 +68,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBRestoreDatabaseAccountCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBRestoreDatabaseAccountCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_services_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_services_list.py index 7d32450cb658..1df6daa23037 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_services_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_services_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBServicesList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBServicesList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_key_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_key_create_update.py index cd01d556098f..e9796b4f409c 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_key_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_key_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -56,6 +54,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlClientEncryptionKeyCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlClientEncryptionKeyCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_key_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_key_get.py index 3169ab1d6e85..126099a92f35 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_key_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_key_get.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlClientEncryptionKeyGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlClientEncryptionKeyGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_keys_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_keys_list.py index a82314d4ab05..067f63eca791 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_keys_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_client_encryption_keys_list.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlClientEncryptionKeysList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlClientEncryptionKeysList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_backup_information.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_backup_information.py index 9517e88d4202..03c12a9ee75d 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_backup_information.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_backup_information.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -42,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlContainerBackupInformation.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlContainerBackupInformation.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_create_update.py index 074e5e4186fb..278a3af1e18b 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -81,6 +79,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlContainerCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlContainerCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_delete.py index eaa2aadcea13..e91d0ccfbd82 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_delete.py @@ -38,6 +38,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlContainerDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlContainerDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_get.py index e8f1e8da4357..4285fed22e0b 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_get.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlContainerGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlContainerGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_list.py index 9b90da9be4d8..746c40215ec4 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_list.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlContainerList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlContainerList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_migrate_to_autoscale.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_migrate_to_autoscale.py index 02e1c9f12623..3a69f8b58805 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_migrate_to_autoscale.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_migrate_to_autoscale.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlContainerMigrateToAutoscale.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlContainerMigrateToAutoscale.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_migrate_to_manual_throughput.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_migrate_to_manual_throughput.py index e731b8e3da49..23e3e06a20db 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_migrate_to_manual_throughput.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_migrate_to_manual_throughput.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlContainerMigrateToManualThroughput.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlContainerMigrateToManualThroughput.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_throughput_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_throughput_get.py index b6ba4b97b036..6e309952cc92 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_throughput_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_throughput_get.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlContainerThroughputGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlContainerThroughputGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_throughput_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_throughput_update.py index aef6d3f10d5f..85f75a834c8e 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_throughput_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_container_throughput_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -46,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlContainerThroughputUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlContainerThroughputUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_create_update.py index 8c009bd50211..de196bad91f3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlDatabaseCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlDatabaseCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_delete.py index d5171e6c8f07..9f379eb1693c 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlDatabaseDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlDatabaseDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_get.py index 7547633e948d..6e66bc4d29e9 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlDatabaseGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlDatabaseGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_list.py index 990bdd43a4e5..26f629271aec 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlDatabaseList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlDatabaseList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_migrate_to_autoscale.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_migrate_to_autoscale.py index b2fa08346945..18686cbed118 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_migrate_to_autoscale.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_migrate_to_autoscale.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlDatabaseMigrateToAutoscale.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlDatabaseMigrateToAutoscale.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_migrate_to_manual_throughput.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_migrate_to_manual_throughput.py index 13e1d7f33b29..a1d3cd64bdfc 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_migrate_to_manual_throughput.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_migrate_to_manual_throughput.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlDatabaseMigrateToManualThroughput.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlDatabaseMigrateToManualThroughput.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_throughput_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_throughput_get.py index b1e3921abd8b..b7677b0998a2 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_throughput_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_throughput_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlDatabaseThroughputGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlDatabaseThroughputGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_throughput_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_throughput_update.py index a10df37d9cff..5455c735110e 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_throughput_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_database_throughput_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlDatabaseThroughputUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlDatabaseThroughputUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_create_update.py index daa51d0e1efe..929fdfd09cf5 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -47,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlRoleAssignmentCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlRoleAssignmentCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_delete.py index e7585d4ed22e..1d7fa12bdc3a 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlRoleAssignmentDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlRoleAssignmentDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_get.py index 0b0de5acc6ae..4a237607316f 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlRoleAssignmentGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlRoleAssignmentGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_list.py index 9d165ec81345..d1beb0a7fed3 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_assignment_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlRoleAssignmentList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlRoleAssignmentList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_create_update.py index f2bcf93369ca..c5be8aff9537 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -59,6 +57,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlRoleDefinitionCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlRoleDefinitionCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_delete.py index 36d82d24231c..6f5c2133e5fe 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlRoleDefinitionDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlRoleDefinitionDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_get.py index e2b4bc45fdaa..3d1ccbbf9928 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlRoleDefinitionGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlRoleDefinitionGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_list.py index 8037520ffcaf..e4fdded00703 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_role_definition_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlRoleDefinitionList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlRoleDefinitionList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_create_update.py index bd378dc11b25..e7510ec754dc 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlStoredProcedureCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlStoredProcedureCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_delete.py index 8f63398b687e..a80b1e91d3c7 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_delete.py @@ -39,6 +39,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlStoredProcedureDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlStoredProcedureDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_get.py index 8c7216ab3acd..b353f00abc1b 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_get.py @@ -40,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlStoredProcedureGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlStoredProcedureGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_list.py index 35acf3925603..70140145b281 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_stored_procedure_list.py @@ -40,6 +40,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlStoredProcedureList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlStoredProcedureList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_create_update.py index 5c3cbbc8a75d..fa963d272495 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -53,6 +51,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlTriggerCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlTriggerCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_delete.py index 4a5c9f7bcc79..f28acbf7a977 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_delete.py @@ -39,6 +39,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlTriggerDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlTriggerDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_get.py index c0903877e097..75955381d7db 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_get.py @@ -40,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlTriggerGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlTriggerGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_list.py index c0b51af78d87..0a7a4aa1e709 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_trigger_list.py @@ -40,6 +40,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlTriggerList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlTriggerList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_create_update.py index 88bb3e114ec5..0e3a9290bf06 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlUserDefinedFunctionCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlUserDefinedFunctionCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_delete.py index 05fe318ace8f..129d127e7180 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_delete.py @@ -39,6 +39,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlUserDefinedFunctionDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlUserDefinedFunctionDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_get.py index f5fb0f4e86e5..7074249c5d8c 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_get.py @@ -40,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlUserDefinedFunctionGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlUserDefinedFunctionGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_list.py index b4b0e9a064ef..51f6bb045cdb 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_sql_user_defined_function_list.py @@ -40,6 +40,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBSqlUserDefinedFunctionList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBSqlUserDefinedFunctionList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_backup_information.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_backup_information.py index 2a2368f94bf0..b1620a5ee843 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_backup_information.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_backup_information.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -41,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBTableBackupInformation.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBTableBackupInformation.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_create_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_create_update.py index a6725d62e7b9..52f16de61bb0 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_create_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBTableCreateUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBTableCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_delete.py index 020f3fdc9589..06a400f2a7a1 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBTableDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBTableDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_get.py index c03b03be7ba8..901ae4c96b55 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBTableGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBTableGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_list.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_list.py index c4390a5d2bc1..bb4221fdf201 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_list.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBTableList.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBTableList.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_migrate_to_autoscale.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_migrate_to_autoscale.py index 93a3e4c93cb9..ae97bf025cfa 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_migrate_to_autoscale.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_migrate_to_autoscale.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBTableMigrateToAutoscale.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBTableMigrateToAutoscale.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_migrate_to_manual_throughput.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_migrate_to_manual_throughput.py index 98726063d6db..be127cfac6d2 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_migrate_to_manual_throughput.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_migrate_to_manual_throughput.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBTableMigrateToManualThroughput.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBTableMigrateToManualThroughput.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_throughput_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_throughput_get.py index 97ec0b97f495..def51d51e769 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_throughput_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_throughput_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBTableThroughputGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBTableThroughputGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_throughput_update.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_throughput_update.py index 41ad2235d4a0..bf3e427d5b10 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_throughput_update.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_db_table_throughput_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -45,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/CosmosDBTableThroughputUpdate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBTableThroughputUpdate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_dbp_key_range_id_get_metrics.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_dbp_key_range_id_get_metrics.py new file mode 100644 index 000000000000..ac158a963b7f --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_dbp_key_range_id_get_metrics.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-cosmosdb +# USAGE + python cosmos_dbp_key_range_id_get_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = CosmosDBManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.partition_key_range_id.list_metrics( + resource_group_name="rg1", + account_name="ddb1", + database_rid="databaseRid", + collection_rid="collectionRid", + partition_key_range_id="0", + filter="$filter=(name.value eq 'Max RUs Per Second') and timeGrain eq duration'PT1M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T23:58:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPKeyRangeIdGetMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_dbp_key_range_id_region_get_metrics.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_dbp_key_range_id_region_get_metrics.py new file mode 100644 index 000000000000..5d6fa67d98f6 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/cosmos_dbp_key_range_id_region_get_metrics.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-cosmosdb +# USAGE + python cosmos_dbp_key_range_id_region_get_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = CosmosDBManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.partition_key_range_id_region.list_metrics( + resource_group_name="rg1", + account_name="ddb1", + region="West US", + database_rid="databaseRid", + collection_rid="collectionRid", + partition_key_range_id="0", + filter="$filter=(name.value eq 'Max RUs Per Second') and timeGrain eq duration'PT1M' and startTime eq '2017-11-19T23:53:55.2780000Z' and endTime eq '2017-11-20T23:58:55.2780000Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/CosmosDBPKeyRangeIdRegionGetMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_create.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_create.py index c118175b00a6..b189ad90ab22 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_create.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.cosmosdb import CosmosDBManagementClient @@ -48,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/services/sqldedicatedgateway/CosmosDBSqlDedicatedGatewayServiceCreate.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/services/sqldedicatedgateway/CosmosDBSqlDedicatedGatewayServiceCreate.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_delete.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_delete.py index f95fc7e28414..e4fa8b451783 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_delete.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/services/sqldedicatedgateway/CosmosDBSqlDedicatedGatewayServiceDelete.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/services/sqldedicatedgateway/CosmosDBSqlDedicatedGatewayServiceDelete.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_get.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_get.py index 21f9f28deefa..1898e56cf9bf 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_get.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_samples/services/sqldedicatedgateway/cosmos_db_sql_dedicated_gateway_service_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-05-15/examples/services/sqldedicatedgateway/CosmosDBSqlDedicatedGatewayServiceGet.json +# x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2024-08-15/examples/services/sqldedicatedgateway/CosmosDBSqlDedicatedGatewayServiceGet.json if __name__ == "__main__": main() diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/conftest.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/conftest.py new file mode 100644 index 000000000000..c6d1ee70d05f --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/conftest.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# aovid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + cosmosdbmanagement_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + cosmosdbmanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + cosmosdbmanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + cosmosdbmanagement_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=cosmosdbmanagement_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=cosmosdbmanagement_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=cosmosdbmanagement_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=cosmosdbmanagement_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_clusters_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_clusters_operations.py new file mode 100644 index 000000000000..949bc725aeb8 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_clusters_operations.py @@ -0,0 +1,203 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCassandraClustersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_subscription(self, resource_group): + response = self.client.cassandra_clusters.list_by_subscription( + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.cassandra_clusters.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.cassandra_clusters.get( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.cassandra_clusters.begin_delete( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update(self, resource_group): + response = self.client.cassandra_clusters.begin_create_update( + resource_group_name=resource_group.name, + cluster_name="str", + body={ + "id": "str", + "identity": {"principalId": "str", "tenantId": "str", "type": "str"}, + "location": "str", + "name": "str", + "properties": { + "authenticationMethod": "str", + "azureConnectionMethod": "str", + "cassandraAuditLoggingEnabled": bool, + "cassandraVersion": "str", + "clientCertificates": [{"pem": "str"}], + "clusterNameOverride": "str", + "deallocated": bool, + "delegatedManagementSubnetId": "str", + "externalGossipCertificates": [{"pem": "str"}], + "externalSeedNodes": [{"ipAddress": "str"}], + "gossipCertificates": [{"pem": "str"}], + "hoursBetweenBackups": 0, + "initialCassandraAdminPassword": "str", + "privateLinkResourceId": "str", + "prometheusEndpoint": {"ipAddress": "str"}, + "provisionError": {"additionalErrorInfo": "str", "code": "str", "message": "str", "target": "str"}, + "provisioningState": "str", + "repairEnabled": bool, + "restoreFromBackupId": "str", + "seedNodes": [{"ipAddress": "str"}], + }, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update(self, resource_group): + response = self.client.cassandra_clusters.begin_update( + resource_group_name=resource_group.name, + cluster_name="str", + body={ + "id": "str", + "identity": {"principalId": "str", "tenantId": "str", "type": "str"}, + "location": "str", + "name": "str", + "properties": { + "authenticationMethod": "str", + "azureConnectionMethod": "str", + "cassandraAuditLoggingEnabled": bool, + "cassandraVersion": "str", + "clientCertificates": [{"pem": "str"}], + "clusterNameOverride": "str", + "deallocated": bool, + "delegatedManagementSubnetId": "str", + "externalGossipCertificates": [{"pem": "str"}], + "externalSeedNodes": [{"ipAddress": "str"}], + "gossipCertificates": [{"pem": "str"}], + "hoursBetweenBackups": 0, + "initialCassandraAdminPassword": "str", + "privateLinkResourceId": "str", + "prometheusEndpoint": {"ipAddress": "str"}, + "provisionError": {"additionalErrorInfo": "str", "code": "str", "message": "str", "target": "str"}, + "provisioningState": "str", + "repairEnabled": bool, + "restoreFromBackupId": "str", + "seedNodes": [{"ipAddress": "str"}], + }, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_invoke_command(self, resource_group): + response = self.client.cassandra_clusters.begin_invoke_command( + resource_group_name=resource_group.name, + cluster_name="str", + body={ + "command": "str", + "host": "str", + "arguments": {"str": "str"}, + "cassandra-stop-start": bool, + "readwrite": bool, + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_deallocate(self, resource_group): + response = self.client.cassandra_clusters.begin_deallocate( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_start(self, resource_group): + response = self.client.cassandra_clusters.begin_start( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_status(self, resource_group): + response = self.client.cassandra_clusters.status( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_clusters_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_clusters_operations_async.py new file mode 100644 index 000000000000..867916e7057f --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_clusters_operations_async.py @@ -0,0 +1,226 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCassandraClustersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_subscription(self, resource_group): + response = self.client.cassandra_clusters.list_by_subscription( + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.cassandra_clusters.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.cassandra_clusters.get( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.cassandra_clusters.begin_delete( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update(self, resource_group): + response = await ( + await self.client.cassandra_clusters.begin_create_update( + resource_group_name=resource_group.name, + cluster_name="str", + body={ + "id": "str", + "identity": {"principalId": "str", "tenantId": "str", "type": "str"}, + "location": "str", + "name": "str", + "properties": { + "authenticationMethod": "str", + "azureConnectionMethod": "str", + "cassandraAuditLoggingEnabled": bool, + "cassandraVersion": "str", + "clientCertificates": [{"pem": "str"}], + "clusterNameOverride": "str", + "deallocated": bool, + "delegatedManagementSubnetId": "str", + "externalGossipCertificates": [{"pem": "str"}], + "externalSeedNodes": [{"ipAddress": "str"}], + "gossipCertificates": [{"pem": "str"}], + "hoursBetweenBackups": 0, + "initialCassandraAdminPassword": "str", + "privateLinkResourceId": "str", + "prometheusEndpoint": {"ipAddress": "str"}, + "provisionError": { + "additionalErrorInfo": "str", + "code": "str", + "message": "str", + "target": "str", + }, + "provisioningState": "str", + "repairEnabled": bool, + "restoreFromBackupId": "str", + "seedNodes": [{"ipAddress": "str"}], + }, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update(self, resource_group): + response = await ( + await self.client.cassandra_clusters.begin_update( + resource_group_name=resource_group.name, + cluster_name="str", + body={ + "id": "str", + "identity": {"principalId": "str", "tenantId": "str", "type": "str"}, + "location": "str", + "name": "str", + "properties": { + "authenticationMethod": "str", + "azureConnectionMethod": "str", + "cassandraAuditLoggingEnabled": bool, + "cassandraVersion": "str", + "clientCertificates": [{"pem": "str"}], + "clusterNameOverride": "str", + "deallocated": bool, + "delegatedManagementSubnetId": "str", + "externalGossipCertificates": [{"pem": "str"}], + "externalSeedNodes": [{"ipAddress": "str"}], + "gossipCertificates": [{"pem": "str"}], + "hoursBetweenBackups": 0, + "initialCassandraAdminPassword": "str", + "privateLinkResourceId": "str", + "prometheusEndpoint": {"ipAddress": "str"}, + "provisionError": { + "additionalErrorInfo": "str", + "code": "str", + "message": "str", + "target": "str", + }, + "provisioningState": "str", + "repairEnabled": bool, + "restoreFromBackupId": "str", + "seedNodes": [{"ipAddress": "str"}], + }, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_invoke_command(self, resource_group): + response = await ( + await self.client.cassandra_clusters.begin_invoke_command( + resource_group_name=resource_group.name, + cluster_name="str", + body={ + "command": "str", + "host": "str", + "arguments": {"str": "str"}, + "cassandra-stop-start": bool, + "readwrite": bool, + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_deallocate(self, resource_group): + response = await ( + await self.client.cassandra_clusters.begin_deallocate( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_start(self, resource_group): + response = await ( + await self.client.cassandra_clusters.begin_start( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_status(self, resource_group): + response = await self.client.cassandra_clusters.status( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_data_centers_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_data_centers_operations.py new file mode 100644 index 000000000000..2aa988b82d3e --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_data_centers_operations.py @@ -0,0 +1,147 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCassandraDataCentersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.cassandra_data_centers.list( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.cassandra_data_centers.get( + resource_group_name=resource_group.name, + cluster_name="str", + data_center_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.cassandra_data_centers.begin_delete( + resource_group_name=resource_group.name, + cluster_name="str", + data_center_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update(self, resource_group): + response = self.client.cassandra_data_centers.begin_create_update( + resource_group_name=resource_group.name, + cluster_name="str", + data_center_name="str", + body={ + "id": "str", + "name": "str", + "properties": { + "authenticationMethodLdapProperties": { + "connectionTimeoutInMs": 0, + "searchBaseDistinguishedName": "str", + "searchFilterTemplate": "str", + "serverCertificates": [{"pem": "str"}], + "serverHostname": "str", + "serverPort": 0, + "serviceUserDistinguishedName": "str", + "serviceUserPassword": "str", + }, + "availabilityZone": bool, + "backupStorageCustomerKeyUri": "str", + "base64EncodedCassandraYamlFragment": "str", + "dataCenterLocation": "str", + "deallocated": bool, + "delegatedSubnetId": "str", + "diskCapacity": 0, + "diskSku": "str", + "managedDiskCustomerKeyUri": "str", + "nodeCount": 0, + "privateEndpointIpAddress": "str", + "provisionError": {"additionalErrorInfo": "str", "code": "str", "message": "str", "target": "str"}, + "provisioningState": "str", + "seedNodes": [{"ipAddress": "str"}], + "sku": "str", + }, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update(self, resource_group): + response = self.client.cassandra_data_centers.begin_update( + resource_group_name=resource_group.name, + cluster_name="str", + data_center_name="str", + body={ + "id": "str", + "name": "str", + "properties": { + "authenticationMethodLdapProperties": { + "connectionTimeoutInMs": 0, + "searchBaseDistinguishedName": "str", + "searchFilterTemplate": "str", + "serverCertificates": [{"pem": "str"}], + "serverHostname": "str", + "serverPort": 0, + "serviceUserDistinguishedName": "str", + "serviceUserPassword": "str", + }, + "availabilityZone": bool, + "backupStorageCustomerKeyUri": "str", + "base64EncodedCassandraYamlFragment": "str", + "dataCenterLocation": "str", + "deallocated": bool, + "delegatedSubnetId": "str", + "diskCapacity": 0, + "diskSku": "str", + "managedDiskCustomerKeyUri": "str", + "nodeCount": 0, + "privateEndpointIpAddress": "str", + "provisionError": {"additionalErrorInfo": "str", "code": "str", "message": "str", "target": "str"}, + "provisioningState": "str", + "seedNodes": [{"ipAddress": "str"}], + "sku": "str", + }, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_data_centers_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_data_centers_operations_async.py new file mode 100644 index 000000000000..96bea692e541 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_data_centers_operations_async.py @@ -0,0 +1,164 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCassandraDataCentersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.cassandra_data_centers.list( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.cassandra_data_centers.get( + resource_group_name=resource_group.name, + cluster_name="str", + data_center_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.cassandra_data_centers.begin_delete( + resource_group_name=resource_group.name, + cluster_name="str", + data_center_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update(self, resource_group): + response = await ( + await self.client.cassandra_data_centers.begin_create_update( + resource_group_name=resource_group.name, + cluster_name="str", + data_center_name="str", + body={ + "id": "str", + "name": "str", + "properties": { + "authenticationMethodLdapProperties": { + "connectionTimeoutInMs": 0, + "searchBaseDistinguishedName": "str", + "searchFilterTemplate": "str", + "serverCertificates": [{"pem": "str"}], + "serverHostname": "str", + "serverPort": 0, + "serviceUserDistinguishedName": "str", + "serviceUserPassword": "str", + }, + "availabilityZone": bool, + "backupStorageCustomerKeyUri": "str", + "base64EncodedCassandraYamlFragment": "str", + "dataCenterLocation": "str", + "deallocated": bool, + "delegatedSubnetId": "str", + "diskCapacity": 0, + "diskSku": "str", + "managedDiskCustomerKeyUri": "str", + "nodeCount": 0, + "privateEndpointIpAddress": "str", + "provisionError": { + "additionalErrorInfo": "str", + "code": "str", + "message": "str", + "target": "str", + }, + "provisioningState": "str", + "seedNodes": [{"ipAddress": "str"}], + "sku": "str", + }, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update(self, resource_group): + response = await ( + await self.client.cassandra_data_centers.begin_update( + resource_group_name=resource_group.name, + cluster_name="str", + data_center_name="str", + body={ + "id": "str", + "name": "str", + "properties": { + "authenticationMethodLdapProperties": { + "connectionTimeoutInMs": 0, + "searchBaseDistinguishedName": "str", + "searchFilterTemplate": "str", + "serverCertificates": [{"pem": "str"}], + "serverHostname": "str", + "serverPort": 0, + "serviceUserDistinguishedName": "str", + "serviceUserPassword": "str", + }, + "availabilityZone": bool, + "backupStorageCustomerKeyUri": "str", + "base64EncodedCassandraYamlFragment": "str", + "dataCenterLocation": "str", + "deallocated": bool, + "delegatedSubnetId": "str", + "diskCapacity": 0, + "diskSku": "str", + "managedDiskCustomerKeyUri": "str", + "nodeCount": 0, + "privateEndpointIpAddress": "str", + "provisionError": { + "additionalErrorInfo": "str", + "code": "str", + "message": "str", + "target": "str", + }, + "provisioningState": "str", + "seedNodes": [{"ipAddress": "str"}], + "sku": "str", + }, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_resources_operations.py new file mode 100644 index 000000000000..3f5970be1f66 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_resources_operations.py @@ -0,0 +1,298 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCassandraResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_cassandra_keyspaces(self, resource_group): + response = self.client.cassandra_resources.list_cassandra_keyspaces( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_cassandra_keyspace(self, resource_group): + response = self.client.cassandra_resources.get_cassandra_keyspace( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_cassandra_keyspace(self, resource_group): + response = self.client.cassandra_resources.begin_create_update_cassandra_keyspace( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + create_update_cassandra_keyspace_parameters={ + "resource": {"id": "str"}, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_cassandra_keyspace(self, resource_group): + response = self.client.cassandra_resources.begin_delete_cassandra_keyspace( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_cassandra_keyspace_throughput(self, resource_group): + response = self.client.cassandra_resources.get_cassandra_keyspace_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update_cassandra_keyspace_throughput(self, resource_group): + response = self.client.cassandra_resources.begin_update_cassandra_keyspace_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_cassandra_keyspace_to_autoscale(self, resource_group): + response = self.client.cassandra_resources.begin_migrate_cassandra_keyspace_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_cassandra_keyspace_to_manual_throughput(self, resource_group): + response = self.client.cassandra_resources.begin_migrate_cassandra_keyspace_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_cassandra_tables(self, resource_group): + response = self.client.cassandra_resources.list_cassandra_tables( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_cassandra_table(self, resource_group): + response = self.client.cassandra_resources.get_cassandra_table( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_cassandra_table(self, resource_group): + response = self.client.cassandra_resources.begin_create_update_cassandra_table( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + create_update_cassandra_table_parameters={ + "resource": { + "id": "str", + "analyticalStorageTtl": 0, + "defaultTtl": 0, + "schema": { + "clusterKeys": [{"name": "str", "orderBy": "str"}], + "columns": [{"name": "str", "type": "str"}], + "partitionKeys": [{"name": "str"}], + }, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_cassandra_table(self, resource_group): + response = self.client.cassandra_resources.begin_delete_cassandra_table( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_cassandra_table_throughput(self, resource_group): + response = self.client.cassandra_resources.get_cassandra_table_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update_cassandra_table_throughput(self, resource_group): + response = self.client.cassandra_resources.begin_update_cassandra_table_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_cassandra_table_to_autoscale(self, resource_group): + response = self.client.cassandra_resources.begin_migrate_cassandra_table_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_cassandra_table_to_manual_throughput(self, resource_group): + response = self.client.cassandra_resources.begin_migrate_cassandra_table_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_resources_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_resources_operations_async.py new file mode 100644 index 000000000000..1013dcf94734 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_cassandra_resources_operations_async.py @@ -0,0 +1,319 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCassandraResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_cassandra_keyspaces(self, resource_group): + response = self.client.cassandra_resources.list_cassandra_keyspaces( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_cassandra_keyspace(self, resource_group): + response = await self.client.cassandra_resources.get_cassandra_keyspace( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_cassandra_keyspace(self, resource_group): + response = await ( + await self.client.cassandra_resources.begin_create_update_cassandra_keyspace( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + create_update_cassandra_keyspace_parameters={ + "resource": {"id": "str"}, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_cassandra_keyspace(self, resource_group): + response = await ( + await self.client.cassandra_resources.begin_delete_cassandra_keyspace( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_cassandra_keyspace_throughput(self, resource_group): + response = await self.client.cassandra_resources.get_cassandra_keyspace_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update_cassandra_keyspace_throughput(self, resource_group): + response = await ( + await self.client.cassandra_resources.begin_update_cassandra_keyspace_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_cassandra_keyspace_to_autoscale(self, resource_group): + response = await ( + await self.client.cassandra_resources.begin_migrate_cassandra_keyspace_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_cassandra_keyspace_to_manual_throughput(self, resource_group): + response = await ( + await self.client.cassandra_resources.begin_migrate_cassandra_keyspace_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_cassandra_tables(self, resource_group): + response = self.client.cassandra_resources.list_cassandra_tables( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_cassandra_table(self, resource_group): + response = await self.client.cassandra_resources.get_cassandra_table( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_cassandra_table(self, resource_group): + response = await ( + await self.client.cassandra_resources.begin_create_update_cassandra_table( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + create_update_cassandra_table_parameters={ + "resource": { + "id": "str", + "analyticalStorageTtl": 0, + "defaultTtl": 0, + "schema": { + "clusterKeys": [{"name": "str", "orderBy": "str"}], + "columns": [{"name": "str", "type": "str"}], + "partitionKeys": [{"name": "str"}], + }, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_cassandra_table(self, resource_group): + response = await ( + await self.client.cassandra_resources.begin_delete_cassandra_table( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_cassandra_table_throughput(self, resource_group): + response = await self.client.cassandra_resources.get_cassandra_table_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update_cassandra_table_throughput(self, resource_group): + response = await ( + await self.client.cassandra_resources.begin_update_cassandra_table_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_cassandra_table_to_autoscale(self, resource_group): + response = await ( + await self.client.cassandra_resources.begin_migrate_cassandra_table_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_cassandra_table_to_manual_throughput(self, resource_group): + response = await ( + await self.client.cassandra_resources.begin_migrate_cassandra_table_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + keyspace_name="str", + table_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_operations.py new file mode 100644 index 000000000000..e59bd76e9abd --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_operations.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCollectionOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.collection.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_usages(self, resource_group): + response = self.client.collection.list_usages( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metric_definitions(self, resource_group): + response = self.client.collection.list_metric_definitions( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_operations_async.py new file mode 100644 index 000000000000..e63af0204e2d --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_operations_async.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCollectionOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.collection.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_usages(self, resource_group): + response = self.client.collection.list_usages( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metric_definitions(self, resource_group): + response = self.client.collection.list_metric_definitions( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_operations.py new file mode 100644 index 000000000000..c3e78965d932 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_operations.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCollectionPartitionOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.collection_partition.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_usages(self, resource_group): + response = self.client.collection_partition.list_usages( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_operations_async.py new file mode 100644 index 000000000000..f887ec685a08 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_operations_async.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCollectionPartitionOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.collection_partition.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_usages(self, resource_group): + response = self.client.collection_partition.list_usages( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_region_operations.py new file mode 100644 index 000000000000..3c9879f143c2 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_region_operations.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCollectionPartitionRegionOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.collection_partition_region.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + region="str", + database_rid="str", + collection_rid="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_region_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_region_operations_async.py new file mode 100644 index 000000000000..7f4aa4590641 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_partition_region_operations_async.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCollectionPartitionRegionOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.collection_partition_region.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + region="str", + database_rid="str", + collection_rid="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_region_operations.py new file mode 100644 index 000000000000..ba24b84db2be --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_region_operations.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCollectionRegionOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.collection_region.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + region="str", + database_rid="str", + collection_rid="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_region_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_region_operations_async.py new file mode 100644 index 000000000000..c6084c38ea97 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_collection_region_operations_async.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementCollectionRegionOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.collection_region.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + region="str", + database_rid="str", + collection_rid="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_account_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_account_region_operations.py new file mode 100644 index 000000000000..dceaa34d4c57 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_account_region_operations.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementDatabaseAccountRegionOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.database_account_region.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + region="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_account_region_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_account_region_operations_async.py new file mode 100644 index 000000000000..b244a424639f --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_account_region_operations_async.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementDatabaseAccountRegionOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.database_account_region.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + region="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_accounts_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_accounts_operations.py new file mode 100644 index 000000000000..a455e4dbb294 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_accounts_operations.py @@ -0,0 +1,381 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementDatabaseAccountsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.database_accounts.get( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update(self, resource_group): + response = self.client.database_accounts.begin_update( + resource_group_name=resource_group.name, + account_name="str", + update_parameters={ + "analyticalStorageConfiguration": {"schemaType": "str"}, + "apiProperties": {"serverVersion": "str"}, + "backupPolicy": "backup_policy", + "capabilities": [{"name": "str"}], + "capacity": {"totalThroughputLimit": 0}, + "connectorOffer": "str", + "consistencyPolicy": { + "defaultConsistencyLevel": "str", + "maxIntervalInSeconds": 0, + "maxStalenessPrefix": 0, + }, + "cors": [ + { + "allowedOrigins": "str", + "allowedHeaders": "str", + "allowedMethods": "str", + "exposedHeaders": "str", + "maxAgeInSeconds": 0, + } + ], + "customerManagedKeyStatus": "str", + "defaultIdentity": "str", + "disableKeyBasedMetadataWriteAccess": bool, + "disableLocalAuth": bool, + "enableAnalyticalStorage": bool, + "enableAutomaticFailover": bool, + "enableBurstCapacity": bool, + "enableCassandraConnector": bool, + "enableFreeTier": bool, + "enableMultipleWriteLocations": bool, + "enablePartitionMerge": bool, + "identity": { + "principalId": "str", + "tenantId": "str", + "type": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "ipRules": [{"ipAddressOrRange": "str"}], + "isVirtualNetworkFilterEnabled": bool, + "keyVaultKeyUri": "str", + "keysMetadata": { + "primaryMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "primaryReadonlyMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "secondaryMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "secondaryReadonlyMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + }, + "location": "str", + "locations": [ + { + "documentEndpoint": "str", + "failoverPriority": 0, + "id": "str", + "isZoneRedundant": bool, + "locationName": "str", + "provisioningState": "str", + } + ], + "minimalTlsVersion": "str", + "networkAclBypass": "str", + "networkAclBypassResourceIds": ["str"], + "publicNetworkAccess": "str", + "tags": {"str": "str"}, + "virtualNetworkRules": [{"id": "str", "ignoreMissingVNetServiceEndpoint": bool}], + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.database_accounts.begin_create_or_update( + resource_group_name=resource_group.name, + account_name="str", + create_update_parameters={ + "databaseAccountOfferType": "Standard", + "locations": [ + { + "documentEndpoint": "str", + "failoverPriority": 0, + "id": "str", + "isZoneRedundant": bool, + "locationName": "str", + "provisioningState": "str", + } + ], + "analyticalStorageConfiguration": {"schemaType": "str"}, + "apiProperties": {"serverVersion": "str"}, + "backupPolicy": "backup_policy", + "capabilities": [{"name": "str"}], + "capacity": {"totalThroughputLimit": 0}, + "connectorOffer": "str", + "consistencyPolicy": { + "defaultConsistencyLevel": "str", + "maxIntervalInSeconds": 0, + "maxStalenessPrefix": 0, + }, + "cors": [ + { + "allowedOrigins": "str", + "allowedHeaders": "str", + "allowedMethods": "str", + "exposedHeaders": "str", + "maxAgeInSeconds": 0, + } + ], + "createMode": "Default", + "customerManagedKeyStatus": "str", + "defaultIdentity": "str", + "disableKeyBasedMetadataWriteAccess": bool, + "disableLocalAuth": bool, + "enableAnalyticalStorage": bool, + "enableAutomaticFailover": bool, + "enableBurstCapacity": bool, + "enableCassandraConnector": bool, + "enableFreeTier": bool, + "enableMultipleWriteLocations": bool, + "enablePartitionMerge": bool, + "id": "str", + "identity": { + "principalId": "str", + "tenantId": "str", + "type": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "ipRules": [{"ipAddressOrRange": "str"}], + "isVirtualNetworkFilterEnabled": bool, + "keyVaultKeyUri": "str", + "keysMetadata": { + "primaryMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "primaryReadonlyMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "secondaryMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "secondaryReadonlyMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + }, + "kind": "str", + "location": "str", + "minimalTlsVersion": "str", + "name": "str", + "networkAclBypass": "str", + "networkAclBypassResourceIds": ["str"], + "publicNetworkAccess": "str", + "restoreParameters": { + "databasesToRestore": [{"collectionNames": ["str"], "databaseName": "str"}], + "gremlinDatabasesToRestore": [{"databaseName": "str", "graphNames": ["str"]}], + "restoreMode": "str", + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + "tablesToRestore": ["str"], + }, + "tags": {"str": "str"}, + "type": "str", + "virtualNetworkRules": [{"id": "str", "ignoreMissingVNetServiceEndpoint": bool}], + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.database_accounts.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_failover_priority_change(self, resource_group): + response = self.client.database_accounts.begin_failover_priority_change( + resource_group_name=resource_group.name, + account_name="str", + failover_parameters={"failoverPolicies": [{"failoverPriority": 0, "id": "str", "locationName": "str"}]}, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.database_accounts.list( + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.database_accounts.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_keys(self, resource_group): + response = self.client.database_accounts.list_keys( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_connection_strings(self, resource_group): + response = self.client.database_accounts.list_connection_strings( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_offline_region(self, resource_group): + response = self.client.database_accounts.begin_offline_region( + resource_group_name=resource_group.name, + account_name="str", + region_parameter_for_offline={"region": "str"}, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_online_region(self, resource_group): + response = self.client.database_accounts.begin_online_region( + resource_group_name=resource_group.name, + account_name="str", + region_parameter_for_online={"region": "str"}, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_read_only_keys(self, resource_group): + response = self.client.database_accounts.get_read_only_keys( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_read_only_keys(self, resource_group): + response = self.client.database_accounts.list_read_only_keys( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_regenerate_key(self, resource_group): + response = self.client.database_accounts.begin_regenerate_key( + resource_group_name=resource_group.name, + account_name="str", + key_to_regenerate={"keyKind": "str"}, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_check_name_exists(self, resource_group): + response = self.client.database_accounts.check_name_exists( + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.database_accounts.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_usages(self, resource_group): + response = self.client.database_accounts.list_usages( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metric_definitions(self, resource_group): + response = self.client.database_accounts.list_metric_definitions( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_accounts_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_accounts_operations_async.py new file mode 100644 index 000000000000..db9872a38c77 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_accounts_operations_async.py @@ -0,0 +1,396 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementDatabaseAccountsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.database_accounts.get( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update(self, resource_group): + response = await ( + await self.client.database_accounts.begin_update( + resource_group_name=resource_group.name, + account_name="str", + update_parameters={ + "analyticalStorageConfiguration": {"schemaType": "str"}, + "apiProperties": {"serverVersion": "str"}, + "backupPolicy": "backup_policy", + "capabilities": [{"name": "str"}], + "capacity": {"totalThroughputLimit": 0}, + "connectorOffer": "str", + "consistencyPolicy": { + "defaultConsistencyLevel": "str", + "maxIntervalInSeconds": 0, + "maxStalenessPrefix": 0, + }, + "cors": [ + { + "allowedOrigins": "str", + "allowedHeaders": "str", + "allowedMethods": "str", + "exposedHeaders": "str", + "maxAgeInSeconds": 0, + } + ], + "customerManagedKeyStatus": "str", + "defaultIdentity": "str", + "disableKeyBasedMetadataWriteAccess": bool, + "disableLocalAuth": bool, + "enableAnalyticalStorage": bool, + "enableAutomaticFailover": bool, + "enableBurstCapacity": bool, + "enableCassandraConnector": bool, + "enableFreeTier": bool, + "enableMultipleWriteLocations": bool, + "enablePartitionMerge": bool, + "identity": { + "principalId": "str", + "tenantId": "str", + "type": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "ipRules": [{"ipAddressOrRange": "str"}], + "isVirtualNetworkFilterEnabled": bool, + "keyVaultKeyUri": "str", + "keysMetadata": { + "primaryMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "primaryReadonlyMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "secondaryMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "secondaryReadonlyMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + }, + "location": "str", + "locations": [ + { + "documentEndpoint": "str", + "failoverPriority": 0, + "id": "str", + "isZoneRedundant": bool, + "locationName": "str", + "provisioningState": "str", + } + ], + "minimalTlsVersion": "str", + "networkAclBypass": "str", + "networkAclBypassResourceIds": ["str"], + "publicNetworkAccess": "str", + "tags": {"str": "str"}, + "virtualNetworkRules": [{"id": "str", "ignoreMissingVNetServiceEndpoint": bool}], + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.database_accounts.begin_create_or_update( + resource_group_name=resource_group.name, + account_name="str", + create_update_parameters={ + "databaseAccountOfferType": "Standard", + "locations": [ + { + "documentEndpoint": "str", + "failoverPriority": 0, + "id": "str", + "isZoneRedundant": bool, + "locationName": "str", + "provisioningState": "str", + } + ], + "analyticalStorageConfiguration": {"schemaType": "str"}, + "apiProperties": {"serverVersion": "str"}, + "backupPolicy": "backup_policy", + "capabilities": [{"name": "str"}], + "capacity": {"totalThroughputLimit": 0}, + "connectorOffer": "str", + "consistencyPolicy": { + "defaultConsistencyLevel": "str", + "maxIntervalInSeconds": 0, + "maxStalenessPrefix": 0, + }, + "cors": [ + { + "allowedOrigins": "str", + "allowedHeaders": "str", + "allowedMethods": "str", + "exposedHeaders": "str", + "maxAgeInSeconds": 0, + } + ], + "createMode": "Default", + "customerManagedKeyStatus": "str", + "defaultIdentity": "str", + "disableKeyBasedMetadataWriteAccess": bool, + "disableLocalAuth": bool, + "enableAnalyticalStorage": bool, + "enableAutomaticFailover": bool, + "enableBurstCapacity": bool, + "enableCassandraConnector": bool, + "enableFreeTier": bool, + "enableMultipleWriteLocations": bool, + "enablePartitionMerge": bool, + "id": "str", + "identity": { + "principalId": "str", + "tenantId": "str", + "type": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "ipRules": [{"ipAddressOrRange": "str"}], + "isVirtualNetworkFilterEnabled": bool, + "keyVaultKeyUri": "str", + "keysMetadata": { + "primaryMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "primaryReadonlyMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "secondaryMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + "secondaryReadonlyMasterKey": {"generationTime": "2020-02-20 00:00:00"}, + }, + "kind": "str", + "location": "str", + "minimalTlsVersion": "str", + "name": "str", + "networkAclBypass": "str", + "networkAclBypassResourceIds": ["str"], + "publicNetworkAccess": "str", + "restoreParameters": { + "databasesToRestore": [{"collectionNames": ["str"], "databaseName": "str"}], + "gremlinDatabasesToRestore": [{"databaseName": "str", "graphNames": ["str"]}], + "restoreMode": "str", + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + "tablesToRestore": ["str"], + }, + "tags": {"str": "str"}, + "type": "str", + "virtualNetworkRules": [{"id": "str", "ignoreMissingVNetServiceEndpoint": bool}], + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.database_accounts.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_failover_priority_change(self, resource_group): + response = await ( + await self.client.database_accounts.begin_failover_priority_change( + resource_group_name=resource_group.name, + account_name="str", + failover_parameters={"failoverPolicies": [{"failoverPriority": 0, "id": "str", "locationName": "str"}]}, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.database_accounts.list( + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.database_accounts.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_keys(self, resource_group): + response = await self.client.database_accounts.list_keys( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_connection_strings(self, resource_group): + response = await self.client.database_accounts.list_connection_strings( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_offline_region(self, resource_group): + response = await ( + await self.client.database_accounts.begin_offline_region( + resource_group_name=resource_group.name, + account_name="str", + region_parameter_for_offline={"region": "str"}, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_online_region(self, resource_group): + response = await ( + await self.client.database_accounts.begin_online_region( + resource_group_name=resource_group.name, + account_name="str", + region_parameter_for_online={"region": "str"}, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_read_only_keys(self, resource_group): + response = await self.client.database_accounts.get_read_only_keys( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_read_only_keys(self, resource_group): + response = await self.client.database_accounts.list_read_only_keys( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_regenerate_key(self, resource_group): + response = await ( + await self.client.database_accounts.begin_regenerate_key( + resource_group_name=resource_group.name, + account_name="str", + key_to_regenerate={"keyKind": "str"}, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_check_name_exists(self, resource_group): + response = await self.client.database_accounts.check_name_exists( + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.database_accounts.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_usages(self, resource_group): + response = self.client.database_accounts.list_usages( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metric_definitions(self, resource_group): + response = self.client.database_accounts.list_metric_definitions( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_operations.py new file mode 100644 index 000000000000..a653a14fc0b8 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_operations.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementDatabaseOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.database.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_usages(self, resource_group): + response = self.client.database.list_usages( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metric_definitions(self, resource_group): + response = self.client.database.list_metric_definitions( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_operations_async.py new file mode 100644 index 000000000000..4827a58d3a6a --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_database_operations_async.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementDatabaseOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.database.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_usages(self, resource_group): + response = self.client.database.list_usages( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metric_definitions(self, resource_group): + response = self.client.database.list_metric_definitions( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_gremlin_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_gremlin_resources_operations.py new file mode 100644 index 000000000000..32146e56e17f --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_gremlin_resources_operations.py @@ -0,0 +1,339 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementGremlinResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_gremlin_databases(self, resource_group): + response = self.client.gremlin_resources.list_gremlin_databases( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_gremlin_database(self, resource_group): + response = self.client.gremlin_resources.get_gremlin_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_gremlin_database(self, resource_group): + response = self.client.gremlin_resources.begin_create_update_gremlin_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + create_update_gremlin_database_parameters={ + "resource": { + "id": "str", + "createMode": "Default", + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_gremlin_database(self, resource_group): + response = self.client.gremlin_resources.begin_delete_gremlin_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_gremlin_database_throughput(self, resource_group): + response = self.client.gremlin_resources.get_gremlin_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update_gremlin_database_throughput(self, resource_group): + response = self.client.gremlin_resources.begin_update_gremlin_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_gremlin_database_to_autoscale(self, resource_group): + response = self.client.gremlin_resources.begin_migrate_gremlin_database_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_gremlin_database_to_manual_throughput(self, resource_group): + response = self.client.gremlin_resources.begin_migrate_gremlin_database_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_gremlin_graphs(self, resource_group): + response = self.client.gremlin_resources.list_gremlin_graphs( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_gremlin_graph(self, resource_group): + response = self.client.gremlin_resources.get_gremlin_graph( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_gremlin_graph(self, resource_group): + response = self.client.gremlin_resources.begin_create_update_gremlin_graph( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + create_update_gremlin_graph_parameters={ + "resource": { + "id": "str", + "analyticalStorageTtl": 0, + "conflictResolutionPolicy": { + "conflictResolutionPath": "str", + "conflictResolutionProcedure": "str", + "mode": "LastWriterWins", + }, + "createMode": "Default", + "defaultTtl": 0, + "indexingPolicy": { + "automatic": bool, + "compositeIndexes": [[{"order": "str", "path": "str"}]], + "excludedPaths": [{"path": "str"}], + "includedPaths": [ + {"indexes": [{"dataType": "String", "kind": "Hash", "precision": 0}], "path": "str"} + ], + "indexingMode": "consistent", + "spatialIndexes": [{"path": "str", "types": ["str"]}], + }, + "partitionKey": {"kind": "Hash", "paths": ["str"], "systemKey": bool, "version": 0}, + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + "uniqueKeyPolicy": {"uniqueKeys": [{"paths": ["str"]}]}, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_gremlin_graph(self, resource_group): + response = self.client.gremlin_resources.begin_delete_gremlin_graph( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_gremlin_graph_throughput(self, resource_group): + response = self.client.gremlin_resources.get_gremlin_graph_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update_gremlin_graph_throughput(self, resource_group): + response = self.client.gremlin_resources.begin_update_gremlin_graph_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_gremlin_graph_to_autoscale(self, resource_group): + response = self.client.gremlin_resources.begin_migrate_gremlin_graph_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_gremlin_graph_to_manual_throughput(self, resource_group): + response = self.client.gremlin_resources.begin_migrate_gremlin_graph_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_retrieve_continuous_backup_information(self, resource_group): + response = self.client.gremlin_resources.begin_retrieve_continuous_backup_information( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + location={"location": "str"}, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_gremlin_resources_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_gremlin_resources_operations_async.py new file mode 100644 index 000000000000..4015eb3dfe59 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_gremlin_resources_operations_async.py @@ -0,0 +1,362 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementGremlinResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_gremlin_databases(self, resource_group): + response = self.client.gremlin_resources.list_gremlin_databases( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_gremlin_database(self, resource_group): + response = await self.client.gremlin_resources.get_gremlin_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_gremlin_database(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_create_update_gremlin_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + create_update_gremlin_database_parameters={ + "resource": { + "id": "str", + "createMode": "Default", + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_gremlin_database(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_delete_gremlin_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_gremlin_database_throughput(self, resource_group): + response = await self.client.gremlin_resources.get_gremlin_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update_gremlin_database_throughput(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_update_gremlin_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_gremlin_database_to_autoscale(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_migrate_gremlin_database_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_gremlin_database_to_manual_throughput(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_migrate_gremlin_database_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_gremlin_graphs(self, resource_group): + response = self.client.gremlin_resources.list_gremlin_graphs( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_gremlin_graph(self, resource_group): + response = await self.client.gremlin_resources.get_gremlin_graph( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_gremlin_graph(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_create_update_gremlin_graph( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + create_update_gremlin_graph_parameters={ + "resource": { + "id": "str", + "analyticalStorageTtl": 0, + "conflictResolutionPolicy": { + "conflictResolutionPath": "str", + "conflictResolutionProcedure": "str", + "mode": "LastWriterWins", + }, + "createMode": "Default", + "defaultTtl": 0, + "indexingPolicy": { + "automatic": bool, + "compositeIndexes": [[{"order": "str", "path": "str"}]], + "excludedPaths": [{"path": "str"}], + "includedPaths": [ + {"indexes": [{"dataType": "String", "kind": "Hash", "precision": 0}], "path": "str"} + ], + "indexingMode": "consistent", + "spatialIndexes": [{"path": "str", "types": ["str"]}], + }, + "partitionKey": {"kind": "Hash", "paths": ["str"], "systemKey": bool, "version": 0}, + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + "uniqueKeyPolicy": {"uniqueKeys": [{"paths": ["str"]}]}, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_gremlin_graph(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_delete_gremlin_graph( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_gremlin_graph_throughput(self, resource_group): + response = await self.client.gremlin_resources.get_gremlin_graph_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update_gremlin_graph_throughput(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_update_gremlin_graph_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_gremlin_graph_to_autoscale(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_migrate_gremlin_graph_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_gremlin_graph_to_manual_throughput(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_migrate_gremlin_graph_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_retrieve_continuous_backup_information(self, resource_group): + response = await ( + await self.client.gremlin_resources.begin_retrieve_continuous_backup_information( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + graph_name="str", + location={"location": "str"}, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_locations_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_locations_operations.py new file mode 100644 index 000000000000..553979647f95 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_locations_operations.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementLocationsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.locations.list( + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.locations.get( + location="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_locations_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_locations_operations_async.py new file mode 100644 index 000000000000..07fe77be07f5 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_locations_operations_async.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementLocationsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.locations.list( + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.locations.get( + location="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_mongo_db_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_mongo_db_resources_operations.py new file mode 100644 index 000000000000..1a1ce74f949b --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_mongo_db_resources_operations.py @@ -0,0 +1,440 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementMongoDBResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_mongo_db_databases(self, resource_group): + response = self.client.mongo_db_resources.list_mongo_db_databases( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_mongo_db_database(self, resource_group): + response = self.client.mongo_db_resources.get_mongo_db_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_mongo_db_database(self, resource_group): + response = self.client.mongo_db_resources.begin_create_update_mongo_db_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + create_update_mongo_db_database_parameters={ + "resource": { + "id": "str", + "createMode": "Default", + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_mongo_db_database(self, resource_group): + response = self.client.mongo_db_resources.begin_delete_mongo_db_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_mongo_db_database_throughput(self, resource_group): + response = self.client.mongo_db_resources.get_mongo_db_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update_mongo_db_database_throughput(self, resource_group): + response = self.client.mongo_db_resources.begin_update_mongo_db_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_mongo_db_database_to_autoscale(self, resource_group): + response = self.client.mongo_db_resources.begin_migrate_mongo_db_database_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_mongo_db_database_to_manual_throughput(self, resource_group): + response = self.client.mongo_db_resources.begin_migrate_mongo_db_database_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_mongo_db_collections(self, resource_group): + response = self.client.mongo_db_resources.list_mongo_db_collections( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_mongo_db_collection(self, resource_group): + response = self.client.mongo_db_resources.get_mongo_db_collection( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_mongo_db_collection(self, resource_group): + response = self.client.mongo_db_resources.begin_create_update_mongo_db_collection( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + create_update_mongo_db_collection_parameters={ + "resource": { + "id": "str", + "analyticalStorageTtl": 0, + "createMode": "Default", + "indexes": [{"key": {"keys": ["str"]}, "options": {"expireAfterSeconds": 0, "unique": bool}}], + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + "shardKey": {"str": "str"}, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_mongo_db_collection(self, resource_group): + response = self.client.mongo_db_resources.begin_delete_mongo_db_collection( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_mongo_db_collection_throughput(self, resource_group): + response = self.client.mongo_db_resources.get_mongo_db_collection_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update_mongo_db_collection_throughput(self, resource_group): + response = self.client.mongo_db_resources.begin_update_mongo_db_collection_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_mongo_db_collection_to_autoscale(self, resource_group): + response = self.client.mongo_db_resources.begin_migrate_mongo_db_collection_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_mongo_db_collection_to_manual_throughput(self, resource_group): + response = self.client.mongo_db_resources.begin_migrate_mongo_db_collection_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_mongo_role_definition(self, resource_group): + response = self.client.mongo_db_resources.get_mongo_role_definition( + mongo_role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_mongo_role_definition(self, resource_group): + response = self.client.mongo_db_resources.begin_create_update_mongo_role_definition( + mongo_role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + create_update_mongo_role_definition_parameters={ + "databaseName": "str", + "privileges": [{"actions": ["str"], "resource": {"collection": "str", "db": "str"}}], + "roleName": "str", + "roles": [{"db": "str", "role": "str"}], + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_mongo_role_definition(self, resource_group): + response = self.client.mongo_db_resources.begin_delete_mongo_role_definition( + mongo_role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_mongo_role_definitions(self, resource_group): + response = self.client.mongo_db_resources.list_mongo_role_definitions( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_mongo_user_definition(self, resource_group): + response = self.client.mongo_db_resources.get_mongo_user_definition( + mongo_user_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_mongo_user_definition(self, resource_group): + response = self.client.mongo_db_resources.begin_create_update_mongo_user_definition( + mongo_user_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + create_update_mongo_user_definition_parameters={ + "customData": "str", + "databaseName": "str", + "mechanisms": "str", + "password": "str", + "roles": [{"db": "str", "role": "str"}], + "userName": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_mongo_user_definition(self, resource_group): + response = self.client.mongo_db_resources.begin_delete_mongo_user_definition( + mongo_user_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_mongo_user_definitions(self, resource_group): + response = self.client.mongo_db_resources.list_mongo_user_definitions( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_retrieve_continuous_backup_information(self, resource_group): + response = self.client.mongo_db_resources.begin_retrieve_continuous_backup_information( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + location={"location": "str"}, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_mongo_db_resources_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_mongo_db_resources_operations_async.py new file mode 100644 index 000000000000..ecc8700fe85d --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_mongo_db_resources_operations_async.py @@ -0,0 +1,471 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementMongoDBResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_mongo_db_databases(self, resource_group): + response = self.client.mongo_db_resources.list_mongo_db_databases( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_mongo_db_database(self, resource_group): + response = await self.client.mongo_db_resources.get_mongo_db_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_mongo_db_database(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_create_update_mongo_db_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + create_update_mongo_db_database_parameters={ + "resource": { + "id": "str", + "createMode": "Default", + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_mongo_db_database(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_delete_mongo_db_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_mongo_db_database_throughput(self, resource_group): + response = await self.client.mongo_db_resources.get_mongo_db_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update_mongo_db_database_throughput(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_update_mongo_db_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_mongo_db_database_to_autoscale(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_migrate_mongo_db_database_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_mongo_db_database_to_manual_throughput(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_migrate_mongo_db_database_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_mongo_db_collections(self, resource_group): + response = self.client.mongo_db_resources.list_mongo_db_collections( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_mongo_db_collection(self, resource_group): + response = await self.client.mongo_db_resources.get_mongo_db_collection( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_mongo_db_collection(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_create_update_mongo_db_collection( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + create_update_mongo_db_collection_parameters={ + "resource": { + "id": "str", + "analyticalStorageTtl": 0, + "createMode": "Default", + "indexes": [{"key": {"keys": ["str"]}, "options": {"expireAfterSeconds": 0, "unique": bool}}], + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + "shardKey": {"str": "str"}, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_mongo_db_collection(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_delete_mongo_db_collection( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_mongo_db_collection_throughput(self, resource_group): + response = await self.client.mongo_db_resources.get_mongo_db_collection_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update_mongo_db_collection_throughput(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_update_mongo_db_collection_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_mongo_db_collection_to_autoscale(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_migrate_mongo_db_collection_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_mongo_db_collection_to_manual_throughput(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_migrate_mongo_db_collection_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_mongo_role_definition(self, resource_group): + response = await self.client.mongo_db_resources.get_mongo_role_definition( + mongo_role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_mongo_role_definition(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_create_update_mongo_role_definition( + mongo_role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + create_update_mongo_role_definition_parameters={ + "databaseName": "str", + "privileges": [{"actions": ["str"], "resource": {"collection": "str", "db": "str"}}], + "roleName": "str", + "roles": [{"db": "str", "role": "str"}], + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_mongo_role_definition(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_delete_mongo_role_definition( + mongo_role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_mongo_role_definitions(self, resource_group): + response = self.client.mongo_db_resources.list_mongo_role_definitions( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_mongo_user_definition(self, resource_group): + response = await self.client.mongo_db_resources.get_mongo_user_definition( + mongo_user_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_mongo_user_definition(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_create_update_mongo_user_definition( + mongo_user_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + create_update_mongo_user_definition_parameters={ + "customData": "str", + "databaseName": "str", + "mechanisms": "str", + "password": "str", + "roles": [{"db": "str", "role": "str"}], + "userName": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_mongo_user_definition(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_delete_mongo_user_definition( + mongo_user_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_mongo_user_definitions(self, resource_group): + response = self.client.mongo_db_resources.list_mongo_user_definitions( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_retrieve_continuous_backup_information(self, resource_group): + response = await ( + await self.client.mongo_db_resources.begin_retrieve_continuous_backup_information( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + collection_name="str", + location={"location": "str"}, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_notebook_workspaces_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_notebook_workspaces_operations.py new file mode 100644 index 000000000000..0454f2e5a935 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_notebook_workspaces_operations.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementNotebookWorkspacesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_database_account(self, resource_group): + response = self.client.notebook_workspaces.list_by_database_account( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.notebook_workspaces.get( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.notebook_workspaces.begin_create_or_update( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + notebook_create_update_parameters={"id": "str", "name": "str", "type": "str"}, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.notebook_workspaces.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_connection_info(self, resource_group): + response = self.client.notebook_workspaces.list_connection_info( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_regenerate_auth_token(self, resource_group): + response = self.client.notebook_workspaces.begin_regenerate_auth_token( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_start(self, resource_group): + response = self.client.notebook_workspaces.begin_start( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_notebook_workspaces_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_notebook_workspaces_operations_async.py new file mode 100644 index 000000000000..d6d87b993fc4 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_notebook_workspaces_operations_async.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementNotebookWorkspacesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_database_account(self, resource_group): + response = self.client.notebook_workspaces.list_by_database_account( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.notebook_workspaces.get( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.notebook_workspaces.begin_create_or_update( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + notebook_create_update_parameters={"id": "str", "name": "str", "type": "str"}, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.notebook_workspaces.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_connection_info(self, resource_group): + response = await self.client.notebook_workspaces.list_connection_info( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_regenerate_auth_token(self, resource_group): + response = await ( + await self.client.notebook_workspaces.begin_regenerate_auth_token( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_start(self, resource_group): + response = await ( + await self.client.notebook_workspaces.begin_start( + resource_group_name=resource_group.name, + account_name="str", + notebook_workspace_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_operations.py new file mode 100644 index 000000000000..4097831969d2 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_operations.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_operations_async.py new file mode 100644 index 000000000000..a90f811a5df3 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_operations_async.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_operations.py new file mode 100644 index 000000000000..02fb73a2bc51 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_operations.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPartitionKeyRangeIdOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.partition_key_range_id.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + partition_key_range_id="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_operations_async.py new file mode 100644 index 000000000000..690749c6db6a --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_operations_async.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPartitionKeyRangeIdOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.partition_key_range_id.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + database_rid="str", + collection_rid="str", + partition_key_range_id="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_region_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_region_operations.py new file mode 100644 index 000000000000..e527d1ad28b4 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_region_operations.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPartitionKeyRangeIdRegionOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.partition_key_range_id_region.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + region="str", + database_rid="str", + collection_rid="str", + partition_key_range_id="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_region_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_region_operations_async.py new file mode 100644 index 000000000000..aecd53e18f6f --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_partition_key_range_id_region_operations_async.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPartitionKeyRangeIdRegionOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.partition_key_range_id_region.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + region="str", + database_rid="str", + collection_rid="str", + partition_key_range_id="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_operations.py new file mode 100644 index 000000000000..1ea1133871a7 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_operations.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPercentileOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.percentile.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_operations_async.py new file mode 100644 index 000000000000..660bed633ac3 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_operations_async.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPercentileOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.percentile.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_source_target_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_source_target_operations.py new file mode 100644 index 000000000000..2820b4a6c83f --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_source_target_operations.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPercentileSourceTargetOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.percentile_source_target.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + source_region="str", + target_region="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_source_target_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_source_target_operations_async.py new file mode 100644 index 000000000000..f9a0ee9964ca --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_source_target_operations_async.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPercentileSourceTargetOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.percentile_source_target.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + source_region="str", + target_region="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_target_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_target_operations.py new file mode 100644 index 000000000000..3562dadeaeea --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_target_operations.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPercentileTargetOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_metrics(self, resource_group): + response = self.client.percentile_target.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + target_region="str", + filter="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_target_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_target_operations_async.py new file mode 100644 index 000000000000..91e96ca2ee47 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_percentile_target_operations_async.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPercentileTargetOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_metrics(self, resource_group): + response = self.client.percentile_target.list_metrics( + resource_group_name=resource_group.name, + account_name="str", + target_region="str", + filter="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_endpoint_connections_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..b25794ad0241 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_endpoint_connections_operations.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_database_account(self, resource_group): + response = self.client.private_endpoint_connections.list_by_database_account( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.private_endpoint_connections.get( + resource_group_name=resource_group.name, + account_name="str", + private_endpoint_connection_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.private_endpoint_connections.begin_create_or_update( + resource_group_name=resource_group.name, + account_name="str", + private_endpoint_connection_name="str", + parameters={ + "groupId": "str", + "id": "str", + "name": "str", + "privateEndpoint": {"id": "str"}, + "privateLinkServiceConnectionState": {"actionsRequired": "str", "description": "str", "status": "str"}, + "provisioningState": "str", + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + private_endpoint_connection_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_endpoint_connections_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_endpoint_connections_operations_async.py new file mode 100644 index 000000000000..87366a60f7bd --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_endpoint_connections_operations_async.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_database_account(self, resource_group): + response = self.client.private_endpoint_connections.list_by_database_account( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.private_endpoint_connections.get( + resource_group_name=resource_group.name, + account_name="str", + private_endpoint_connection_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.private_endpoint_connections.begin_create_or_update( + resource_group_name=resource_group.name, + account_name="str", + private_endpoint_connection_name="str", + parameters={ + "groupId": "str", + "id": "str", + "name": "str", + "privateEndpoint": {"id": "str"}, + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "provisioningState": "str", + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + private_endpoint_connection_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_link_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_link_resources_operations.py new file mode 100644 index 000000000000..e7cdf9f47933 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_link_resources_operations.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_database_account(self, resource_group): + response = self.client.private_link_resources.list_by_database_account( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.private_link_resources.get( + resource_group_name=resource_group.name, + account_name="str", + group_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_link_resources_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_link_resources_operations_async.py new file mode 100644 index 000000000000..93b9a0864e36 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_private_link_resources_operations_async.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementPrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_database_account(self, resource_group): + response = self.client.private_link_resources.list_by_database_account( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.private_link_resources.get( + resource_group_name=resource_group.name, + account_name="str", + group_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_database_accounts_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_database_accounts_operations.py new file mode 100644 index 000000000000..d14381d3655b --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_database_accounts_operations.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableDatabaseAccountsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_location(self, resource_group): + response = self.client.restorable_database_accounts.list_by_location( + location="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_database_accounts.list( + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_by_location(self, resource_group): + response = self.client.restorable_database_accounts.get_by_location( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_database_accounts_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_database_accounts_operations_async.py new file mode 100644 index 000000000000..c4b673a42dde --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_database_accounts_operations_async.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableDatabaseAccountsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_location(self, resource_group): + response = self.client.restorable_database_accounts.list_by_location( + location="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_database_accounts.list( + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_by_location(self, resource_group): + response = await self.client.restorable_database_accounts.get_by_location( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_databases_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_databases_operations.py new file mode 100644 index 000000000000..7e3f70e5da3a --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_databases_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableGremlinDatabasesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_gremlin_databases.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_databases_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_databases_operations_async.py new file mode 100644 index 000000000000..2906af86599b --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_databases_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableGremlinDatabasesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_gremlin_databases.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_graphs_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_graphs_operations.py new file mode 100644 index 000000000000..8c68b09df592 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_graphs_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableGremlinGraphsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_gremlin_graphs.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_graphs_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_graphs_operations_async.py new file mode 100644 index 000000000000..8635bee2e184 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_graphs_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableGremlinGraphsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_gremlin_graphs.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_resources_operations.py new file mode 100644 index 000000000000..2442d0ec6f49 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_resources_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableGremlinResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_gremlin_resources.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_resources_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_resources_operations_async.py new file mode 100644 index 000000000000..9377c87873f4 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_gremlin_resources_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableGremlinResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_gremlin_resources.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_collections_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_collections_operations.py new file mode 100644 index 000000000000..8e73eacd5e65 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_collections_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableMongodbCollectionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_mongodb_collections.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_collections_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_collections_operations_async.py new file mode 100644 index 000000000000..99d5e223928e --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_collections_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableMongodbCollectionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_mongodb_collections.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_databases_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_databases_operations.py new file mode 100644 index 000000000000..56cedccada22 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_databases_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableMongodbDatabasesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_mongodb_databases.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_databases_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_databases_operations_async.py new file mode 100644 index 000000000000..ca71ba148534 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_databases_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableMongodbDatabasesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_mongodb_databases.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_resources_operations.py new file mode 100644 index 000000000000..85a12dd3a369 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_resources_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableMongodbResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_mongodb_resources.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_resources_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_resources_operations_async.py new file mode 100644 index 000000000000..6c3576ca1557 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_mongodb_resources_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableMongodbResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_mongodb_resources.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_containers_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_containers_operations.py new file mode 100644 index 000000000000..f265cc9238f2 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_containers_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableSqlContainersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_sql_containers.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_containers_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_containers_operations_async.py new file mode 100644 index 000000000000..3868e02a4cc8 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_containers_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableSqlContainersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_sql_containers.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_databases_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_databases_operations.py new file mode 100644 index 000000000000..4708d00a3939 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_databases_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableSqlDatabasesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_sql_databases.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_databases_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_databases_operations_async.py new file mode 100644 index 000000000000..4f959dd3ff5b --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_databases_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableSqlDatabasesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_sql_databases.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_resources_operations.py new file mode 100644 index 000000000000..e08b885ba42e --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_resources_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableSqlResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_sql_resources.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_resources_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_resources_operations_async.py new file mode 100644 index 000000000000..a156fc2237a2 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_sql_resources_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableSqlResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_sql_resources.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_table_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_table_resources_operations.py new file mode 100644 index 000000000000..f02853bfe1d3 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_table_resources_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableTableResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_table_resources.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_table_resources_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_table_resources_operations_async.py new file mode 100644 index 000000000000..c8ef6faa9ef5 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_table_resources_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableTableResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_table_resources.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_tables_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_tables_operations.py new file mode 100644 index 000000000000..b0b74c9db981 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_tables_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableTablesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.restorable_tables.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_tables_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_tables_operations_async.py new file mode 100644 index 000000000000..52f96bb058a4 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_restorable_tables_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementRestorableTablesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.restorable_tables.list( + location="str", + instance_id="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_service_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_service_operations.py new file mode 100644 index 000000000000..de14b67e43f4 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_service_operations.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementServiceOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.service.list( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create(self, resource_group): + response = self.client.service.begin_create( + resource_group_name=resource_group.name, + account_name="str", + service_name="str", + create_update_parameters={"properties": "service_resource_create_update_properties"}, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.service.get( + resource_group_name=resource_group.name, + account_name="str", + service_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.service.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + service_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_service_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_service_operations_async.py new file mode 100644 index 000000000000..9e68d98f40c6 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_service_operations_async.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementServiceOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.service.list( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create(self, resource_group): + response = await ( + await self.client.service.begin_create( + resource_group_name=resource_group.name, + account_name="str", + service_name="str", + create_update_parameters={"properties": "service_resource_create_update_properties"}, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.service.get( + resource_group_name=resource_group.name, + account_name="str", + service_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.service.begin_delete( + resource_group_name=resource_group.name, + account_name="str", + service_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_sql_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_sql_resources_operations.py new file mode 100644 index 000000000000..69b4962f52a6 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_sql_resources_operations.py @@ -0,0 +1,717 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementSqlResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_sql_databases(self, resource_group): + response = self.client.sql_resources.list_sql_databases( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_sql_database(self, resource_group): + response = self.client.sql_resources.get_sql_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_sql_database(self, resource_group): + response = self.client.sql_resources.begin_create_update_sql_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + create_update_sql_database_parameters={ + "resource": { + "id": "str", + "createMode": "Default", + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_sql_database(self, resource_group): + response = self.client.sql_resources.begin_delete_sql_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_sql_database_throughput(self, resource_group): + response = self.client.sql_resources.get_sql_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update_sql_database_throughput(self, resource_group): + response = self.client.sql_resources.begin_update_sql_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_sql_database_to_autoscale(self, resource_group): + response = self.client.sql_resources.begin_migrate_sql_database_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_sql_database_to_manual_throughput(self, resource_group): + response = self.client.sql_resources.begin_migrate_sql_database_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_sql_containers(self, resource_group): + response = self.client.sql_resources.list_sql_containers( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_sql_container(self, resource_group): + response = self.client.sql_resources.get_sql_container( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_sql_container(self, resource_group): + response = self.client.sql_resources.begin_create_update_sql_container( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + create_update_sql_container_parameters={ + "resource": { + "id": "str", + "analyticalStorageTtl": 0, + "clientEncryptionPolicy": { + "includedPaths": [ + { + "clientEncryptionKeyId": "str", + "encryptionAlgorithm": "str", + "encryptionType": "str", + "path": "str", + } + ], + "policyFormatVersion": 0, + }, + "computedProperties": [{"name": "str", "query": "str"}], + "conflictResolutionPolicy": { + "conflictResolutionPath": "str", + "conflictResolutionProcedure": "str", + "mode": "LastWriterWins", + }, + "createMode": "Default", + "defaultTtl": 0, + "indexingPolicy": { + "automatic": bool, + "compositeIndexes": [[{"order": "str", "path": "str"}]], + "excludedPaths": [{"path": "str"}], + "includedPaths": [ + {"indexes": [{"dataType": "String", "kind": "Hash", "precision": 0}], "path": "str"} + ], + "indexingMode": "consistent", + "spatialIndexes": [{"path": "str", "types": ["str"]}], + }, + "partitionKey": {"kind": "Hash", "paths": ["str"], "systemKey": bool, "version": 0}, + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + "uniqueKeyPolicy": {"uniqueKeys": [{"paths": ["str"]}]}, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_sql_container(self, resource_group): + response = self.client.sql_resources.begin_delete_sql_container( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_sql_container_throughput(self, resource_group): + response = self.client.sql_resources.get_sql_container_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update_sql_container_throughput(self, resource_group): + response = self.client.sql_resources.begin_update_sql_container_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_sql_container_to_autoscale(self, resource_group): + response = self.client.sql_resources.begin_migrate_sql_container_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_sql_container_to_manual_throughput(self, resource_group): + response = self.client.sql_resources.begin_migrate_sql_container_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_client_encryption_keys(self, resource_group): + response = self.client.sql_resources.list_client_encryption_keys( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_client_encryption_key(self, resource_group): + response = self.client.sql_resources.get_client_encryption_key( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + client_encryption_key_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_client_encryption_key(self, resource_group): + response = self.client.sql_resources.begin_create_update_client_encryption_key( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + client_encryption_key_name="str", + create_update_client_encryption_key_parameters={ + "resource": { + "encryptionAlgorithm": "str", + "id": "str", + "keyWrapMetadata": {"algorithm": "str", "name": "str", "type": "str", "value": "str"}, + "wrappedDataEncryptionKey": bytes("bytes", encoding="utf-8"), + } + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_sql_stored_procedures(self, resource_group): + response = self.client.sql_resources.list_sql_stored_procedures( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_sql_stored_procedure(self, resource_group): + response = self.client.sql_resources.get_sql_stored_procedure( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + stored_procedure_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_sql_stored_procedure(self, resource_group): + response = self.client.sql_resources.begin_create_update_sql_stored_procedure( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + stored_procedure_name="str", + create_update_sql_stored_procedure_parameters={ + "resource": {"id": "str", "body": "str"}, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_sql_stored_procedure(self, resource_group): + response = self.client.sql_resources.begin_delete_sql_stored_procedure( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + stored_procedure_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_sql_user_defined_functions(self, resource_group): + response = self.client.sql_resources.list_sql_user_defined_functions( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_sql_user_defined_function(self, resource_group): + response = self.client.sql_resources.get_sql_user_defined_function( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + user_defined_function_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_sql_user_defined_function(self, resource_group): + response = self.client.sql_resources.begin_create_update_sql_user_defined_function( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + user_defined_function_name="str", + create_update_sql_user_defined_function_parameters={ + "resource": {"id": "str", "body": "str"}, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_sql_user_defined_function(self, resource_group): + response = self.client.sql_resources.begin_delete_sql_user_defined_function( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + user_defined_function_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_sql_triggers(self, resource_group): + response = self.client.sql_resources.list_sql_triggers( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_sql_trigger(self, resource_group): + response = self.client.sql_resources.get_sql_trigger( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + trigger_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_sql_trigger(self, resource_group): + response = self.client.sql_resources.begin_create_update_sql_trigger( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + trigger_name="str", + create_update_sql_trigger_parameters={ + "resource": {"id": "str", "body": "str", "triggerOperation": "str", "triggerType": "str"}, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_sql_trigger(self, resource_group): + response = self.client.sql_resources.begin_delete_sql_trigger( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + trigger_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_sql_role_definition(self, resource_group): + response = self.client.sql_resources.get_sql_role_definition( + role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_sql_role_definition(self, resource_group): + response = self.client.sql_resources.begin_create_update_sql_role_definition( + role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + create_update_sql_role_definition_parameters={ + "assignableScopes": ["str"], + "permissions": [{"dataActions": ["str"], "notDataActions": ["str"]}], + "roleName": "str", + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_sql_role_definition(self, resource_group): + response = self.client.sql_resources.begin_delete_sql_role_definition( + role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_sql_role_definitions(self, resource_group): + response = self.client.sql_resources.list_sql_role_definitions( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_sql_role_assignment(self, resource_group): + response = self.client.sql_resources.get_sql_role_assignment( + role_assignment_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_sql_role_assignment(self, resource_group): + response = self.client.sql_resources.begin_create_update_sql_role_assignment( + role_assignment_id="str", + resource_group_name=resource_group.name, + account_name="str", + create_update_sql_role_assignment_parameters={ + "principalId": "str", + "roleDefinitionId": "str", + "scope": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_sql_role_assignment(self, resource_group): + response = self.client.sql_resources.begin_delete_sql_role_assignment( + role_assignment_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_sql_role_assignments(self, resource_group): + response = self.client.sql_resources.list_sql_role_assignments( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_retrieve_continuous_backup_information(self, resource_group): + response = self.client.sql_resources.begin_retrieve_continuous_backup_information( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + location={"location": "str"}, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_sql_resources_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_sql_resources_operations_async.py new file mode 100644 index 000000000000..179c071b2b32 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_sql_resources_operations_async.py @@ -0,0 +1,762 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementSqlResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_sql_databases(self, resource_group): + response = self.client.sql_resources.list_sql_databases( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_sql_database(self, resource_group): + response = await self.client.sql_resources.get_sql_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_sql_database(self, resource_group): + response = await ( + await self.client.sql_resources.begin_create_update_sql_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + create_update_sql_database_parameters={ + "resource": { + "id": "str", + "createMode": "Default", + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_sql_database(self, resource_group): + response = await ( + await self.client.sql_resources.begin_delete_sql_database( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_sql_database_throughput(self, resource_group): + response = await self.client.sql_resources.get_sql_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update_sql_database_throughput(self, resource_group): + response = await ( + await self.client.sql_resources.begin_update_sql_database_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_sql_database_to_autoscale(self, resource_group): + response = await ( + await self.client.sql_resources.begin_migrate_sql_database_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_sql_database_to_manual_throughput(self, resource_group): + response = await ( + await self.client.sql_resources.begin_migrate_sql_database_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_sql_containers(self, resource_group): + response = self.client.sql_resources.list_sql_containers( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_sql_container(self, resource_group): + response = await self.client.sql_resources.get_sql_container( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_sql_container(self, resource_group): + response = await ( + await self.client.sql_resources.begin_create_update_sql_container( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + create_update_sql_container_parameters={ + "resource": { + "id": "str", + "analyticalStorageTtl": 0, + "clientEncryptionPolicy": { + "includedPaths": [ + { + "clientEncryptionKeyId": "str", + "encryptionAlgorithm": "str", + "encryptionType": "str", + "path": "str", + } + ], + "policyFormatVersion": 0, + }, + "computedProperties": [{"name": "str", "query": "str"}], + "conflictResolutionPolicy": { + "conflictResolutionPath": "str", + "conflictResolutionProcedure": "str", + "mode": "LastWriterWins", + }, + "createMode": "Default", + "defaultTtl": 0, + "indexingPolicy": { + "automatic": bool, + "compositeIndexes": [[{"order": "str", "path": "str"}]], + "excludedPaths": [{"path": "str"}], + "includedPaths": [ + {"indexes": [{"dataType": "String", "kind": "Hash", "precision": 0}], "path": "str"} + ], + "indexingMode": "consistent", + "spatialIndexes": [{"path": "str", "types": ["str"]}], + }, + "partitionKey": {"kind": "Hash", "paths": ["str"], "systemKey": bool, "version": 0}, + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + "uniqueKeyPolicy": {"uniqueKeys": [{"paths": ["str"]}]}, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_sql_container(self, resource_group): + response = await ( + await self.client.sql_resources.begin_delete_sql_container( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_sql_container_throughput(self, resource_group): + response = await self.client.sql_resources.get_sql_container_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update_sql_container_throughput(self, resource_group): + response = await ( + await self.client.sql_resources.begin_update_sql_container_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_sql_container_to_autoscale(self, resource_group): + response = await ( + await self.client.sql_resources.begin_migrate_sql_container_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_sql_container_to_manual_throughput(self, resource_group): + response = await ( + await self.client.sql_resources.begin_migrate_sql_container_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_client_encryption_keys(self, resource_group): + response = self.client.sql_resources.list_client_encryption_keys( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_client_encryption_key(self, resource_group): + response = await self.client.sql_resources.get_client_encryption_key( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + client_encryption_key_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_client_encryption_key(self, resource_group): + response = await ( + await self.client.sql_resources.begin_create_update_client_encryption_key( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + client_encryption_key_name="str", + create_update_client_encryption_key_parameters={ + "resource": { + "encryptionAlgorithm": "str", + "id": "str", + "keyWrapMetadata": {"algorithm": "str", "name": "str", "type": "str", "value": "str"}, + "wrappedDataEncryptionKey": bytes("bytes", encoding="utf-8"), + } + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_sql_stored_procedures(self, resource_group): + response = self.client.sql_resources.list_sql_stored_procedures( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_sql_stored_procedure(self, resource_group): + response = await self.client.sql_resources.get_sql_stored_procedure( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + stored_procedure_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_sql_stored_procedure(self, resource_group): + response = await ( + await self.client.sql_resources.begin_create_update_sql_stored_procedure( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + stored_procedure_name="str", + create_update_sql_stored_procedure_parameters={ + "resource": {"id": "str", "body": "str"}, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_sql_stored_procedure(self, resource_group): + response = await ( + await self.client.sql_resources.begin_delete_sql_stored_procedure( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + stored_procedure_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_sql_user_defined_functions(self, resource_group): + response = self.client.sql_resources.list_sql_user_defined_functions( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_sql_user_defined_function(self, resource_group): + response = await self.client.sql_resources.get_sql_user_defined_function( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + user_defined_function_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_sql_user_defined_function(self, resource_group): + response = await ( + await self.client.sql_resources.begin_create_update_sql_user_defined_function( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + user_defined_function_name="str", + create_update_sql_user_defined_function_parameters={ + "resource": {"id": "str", "body": "str"}, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_sql_user_defined_function(self, resource_group): + response = await ( + await self.client.sql_resources.begin_delete_sql_user_defined_function( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + user_defined_function_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_sql_triggers(self, resource_group): + response = self.client.sql_resources.list_sql_triggers( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_sql_trigger(self, resource_group): + response = await self.client.sql_resources.get_sql_trigger( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + trigger_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_sql_trigger(self, resource_group): + response = await ( + await self.client.sql_resources.begin_create_update_sql_trigger( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + trigger_name="str", + create_update_sql_trigger_parameters={ + "resource": {"id": "str", "body": "str", "triggerOperation": "str", "triggerType": "str"}, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_sql_trigger(self, resource_group): + response = await ( + await self.client.sql_resources.begin_delete_sql_trigger( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + trigger_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_sql_role_definition(self, resource_group): + response = await self.client.sql_resources.get_sql_role_definition( + role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_sql_role_definition(self, resource_group): + response = await ( + await self.client.sql_resources.begin_create_update_sql_role_definition( + role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + create_update_sql_role_definition_parameters={ + "assignableScopes": ["str"], + "permissions": [{"dataActions": ["str"], "notDataActions": ["str"]}], + "roleName": "str", + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_sql_role_definition(self, resource_group): + response = await ( + await self.client.sql_resources.begin_delete_sql_role_definition( + role_definition_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_sql_role_definitions(self, resource_group): + response = self.client.sql_resources.list_sql_role_definitions( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_sql_role_assignment(self, resource_group): + response = await self.client.sql_resources.get_sql_role_assignment( + role_assignment_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_sql_role_assignment(self, resource_group): + response = await ( + await self.client.sql_resources.begin_create_update_sql_role_assignment( + role_assignment_id="str", + resource_group_name=resource_group.name, + account_name="str", + create_update_sql_role_assignment_parameters={ + "principalId": "str", + "roleDefinitionId": "str", + "scope": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_sql_role_assignment(self, resource_group): + response = await ( + await self.client.sql_resources.begin_delete_sql_role_assignment( + role_assignment_id="str", + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_sql_role_assignments(self, resource_group): + response = self.client.sql_resources.list_sql_role_assignments( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_retrieve_continuous_backup_information(self, resource_group): + response = await ( + await self.client.sql_resources.begin_retrieve_continuous_backup_information( + resource_group_name=resource_group.name, + account_name="str", + database_name="str", + container_name="str", + location={"location": "str"}, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_table_resources_operations.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_table_resources_operations.py new file mode 100644 index 000000000000..a7a85f0807e2 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_table_resources_operations.py @@ -0,0 +1,172 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementTableResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_tables(self, resource_group): + response = self.client.table_resources.list_tables( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_table(self, resource_group): + response = self.client.table_resources.get_table( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update_table(self, resource_group): + response = self.client.table_resources.begin_create_update_table( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + create_update_table_parameters={ + "resource": { + "id": "str", + "createMode": "Default", + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete_table(self, resource_group): + response = self.client.table_resources.begin_delete_table( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_table_throughput(self, resource_group): + response = self.client.table_resources.get_table_throughput( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update_table_throughput(self, resource_group): + response = self.client.table_resources.begin_update_table_throughput( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_table_to_autoscale(self, resource_group): + response = self.client.table_resources.begin_migrate_table_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_migrate_table_to_manual_throughput(self, resource_group): + response = self.client.table_resources.begin_migrate_table_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_retrieve_continuous_backup_information(self, resource_group): + response = self.client.table_resources.begin_retrieve_continuous_backup_information( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + location={"location": "str"}, + api_version="2024-08-15", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_table_resources_operations_async.py b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_table_resources_operations_async.py new file mode 100644 index 000000000000..abbc8c1a6489 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/generated_tests/test_cosmos_db_management_table_resources_operations_async.py @@ -0,0 +1,185 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestCosmosDBManagementTableResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_tables(self, resource_group): + response = self.client.table_resources.list_tables( + resource_group_name=resource_group.name, + account_name="str", + api_version="2024-08-15", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_table(self, resource_group): + response = await self.client.table_resources.get_table( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update_table(self, resource_group): + response = await ( + await self.client.table_resources.begin_create_update_table( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + create_update_table_parameters={ + "resource": { + "id": "str", + "createMode": "Default", + "restoreParameters": { + "restoreSource": "str", + "restoreTimestampInUtc": "2020-02-20 00:00:00", + "restoreWithTtlDisabled": bool, + }, + }, + "id": "str", + "location": "str", + "name": "str", + "options": {"autoscaleSettings": {"maxThroughput": 0}, "throughput": 0}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete_table(self, resource_group): + response = await ( + await self.client.table_resources.begin_delete_table( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_table_throughput(self, resource_group): + response = await self.client.table_resources.get_table_throughput( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + api_version="2024-08-15", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update_table_throughput(self, resource_group): + response = await ( + await self.client.table_resources.begin_update_table_throughput( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + update_throughput_parameters={ + "resource": { + "autoscaleSettings": { + "maxThroughput": 0, + "autoUpgradePolicy": {"throughputPolicy": {"incrementPercent": 0, "isEnabled": bool}}, + "targetMaxThroughput": 0, + }, + "instantMaximumThroughput": "str", + "minimumThroughput": "str", + "offerReplacePending": "str", + "softAllowedMaximumThroughput": "str", + "throughput": 0, + }, + "id": "str", + "location": "str", + "name": "str", + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_table_to_autoscale(self, resource_group): + response = await ( + await self.client.table_resources.begin_migrate_table_to_autoscale( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_migrate_table_to_manual_throughput(self, resource_group): + response = await ( + await self.client.table_resources.begin_migrate_table_to_manual_throughput( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_retrieve_continuous_backup_information(self, resource_group): + response = await ( + await self.client.table_resources.begin_retrieve_continuous_backup_information( + resource_group_name=resource_group.name, + account_name="str", + table_name="str", + location={"location": "str"}, + api_version="2024-08-15", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/setup.py b/sdk/cosmos/azure-mgmt-cosmosdb/setup.py index 7871875933cf..04300d8bd291 100644 --- a/sdk/cosmos/azure-mgmt-cosmosdb/setup.py +++ b/sdk/cosmos/azure-mgmt-cosmosdb/setup.py @@ -75,6 +75,7 @@ }, install_requires=[ "isodate>=0.6.1", + "typing-extensions>=4.6.0", "azure-common>=1.1", "azure-mgmt-core>=1.3.2", ], diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/tests/conftest.py b/sdk/cosmos/azure-mgmt-cosmosdb/tests/conftest.py new file mode 100644 index 000000000000..c6d1ee70d05f --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/tests/conftest.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# aovid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + cosmosdbmanagement_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + cosmosdbmanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + cosmosdbmanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + cosmosdbmanagement_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=cosmosdbmanagement_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=cosmosdbmanagement_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=cosmosdbmanagement_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=cosmosdbmanagement_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_database_accounts_operations_async_test.py b/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_database_accounts_operations_async_test.py new file mode 100644 index 000000000000..982ea83e3751 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_database_accounts_operations_async_test.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.live_test_only +class TestCosmosDBManagementDatabaseAccountsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.database_accounts.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + assert result == [] diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_database_accounts_operations_test.py b/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_database_accounts_operations_test.py new file mode 100644 index 000000000000..abf31d4ef2c4 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_database_accounts_operations_test.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.live_test_only +class TestCosmosDBManagementDatabaseAccountsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.database_accounts.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + assert result == [] diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_operations_async_test.py b/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_operations_async_test.py new file mode 100644 index 000000000000..6de5a28e4a87 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_operations_async_test.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb.aio import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.live_test_only +class TestCosmosDBManagementOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.operations.list() + result = [r async for r in response] + assert result + diff --git a/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_operations_test.py b/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_operations_test.py new file mode 100644 index 000000000000..03bbf3098d95 --- /dev/null +++ b/sdk/cosmos/azure-mgmt-cosmosdb/tests/test_cosmos_db_management_operations_test.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.cosmosdb import CosmosDBManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.live_test_only +class TestCosmosDBManagementOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(CosmosDBManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.operations.list() + result = [r for r in response] + assert result +