From 988351d8843c099ce764870ab739742ccac86245 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Tue, 6 Jun 2023 20:03:01 +0000 Subject: [PATCH] CodeGen from PR 24323 in Azure/azure-rest-api-specs [AI.TextTranslation] Disambiguate example titles (#24323) - Causes error in TypeSpec 0.45.0 - Also change casing of textType in example to match spec - Fixes error ENUM_CASE_MISMATCH in Swagger ModelValidation --- .../azure-ai-translation-text/_meta.json | 15 + .../azure/ai/translation/text/__init__.py | 2 +- .../azure/ai/translation/text/_client.py | 4 +- .../ai/translation/text/_configuration.py | 12 +- .../azure/ai/translation/text/_model_base.py | 82 +++-- .../text/_operations/_operations.py | 289 ++++++++++-------- .../azure/ai/translation/text/_patch.py | 47 +-- .../azure/ai/translation/text/_version.py | 2 +- .../azure/ai/translation/text/aio/_client.py | 4 +- .../ai/translation/text/aio/_configuration.py | 12 +- .../text/aio/_operations/_operations.py | 266 +++++++++------- .../azure/ai/translation/text/aio/_patch.py | 41 ++- .../ai/translation/text/models/__init__.py | 9 - .../ai/translation/text/models/_models.py | 271 ++++++++-------- .../azure-ai-translation-text/setup.py | 94 ++---- .../tests/conftest.py | 12 +- .../tests/preparer.py | 4 +- .../tests/static_access_token_credential.py | 6 +- .../tests/test_break_sentence.py | 21 +- .../tests/test_dictionary_examples.py | 21 +- .../tests/test_dictionary_lookup.py | 13 +- .../tests/test_get_languages.py | 1 - .../tests/test_helper.py | 11 +- .../tests/test_translation.py | 107 ++++--- .../tests/test_transliteration.py | 23 +- .../tests/testcase.py | 6 +- .../tsp-location.yaml | 4 + 27 files changed, 706 insertions(+), 673 deletions(-) create mode 100644 sdk/translation/azure-ai-translation-text/_meta.json create mode 100644 sdk/translation/azure-ai-translation-text/tsp-location.yaml diff --git a/sdk/translation/azure-ai-translation-text/_meta.json b/sdk/translation/azure-ai-translation-text/_meta.json new file mode 100644 index 0000000000000..90ca7bbd6e0cf --- /dev/null +++ b/sdk/translation/azure-ai-translation-text/_meta.json @@ -0,0 +1,15 @@ +{ + "commit": "aa1f08c2bfb9bef613f963aaf971a25558151330", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "typespec_src": "specification/translation/Azure.AI.TextTranslation", + "@azure-tools/typespec-python": { + "version": "0.8.6", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-python/-/typespec-python-0.8.6.tgz", + "dependencies": { + "@autorest/python": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@autorest/python/-/python-6.5.0.tgz" + } + } + } +} \ No newline at end of file diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/__init__.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/__init__.py index dbfdf9d029a2d..ef7a65c8c9878 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/__init__.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/__init__.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._patch import TextTranslationClient +from ._client import TextTranslationClient from ._version import VERSION __version__ = VERSION diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_client.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_client.py index 8b33807eebe0c..9c33ec1da9bb2 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_client.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_client.py @@ -44,8 +44,8 @@ class TextTranslationClient(TextTranslationClientOperationsMixin): # pylint: di :param endpoint: Supported Text Translation endpoints (protocol and hostname, for example: https://api.cognitive.microsofttranslator.com). Required. :type endpoint: str - :keyword api_version: Default value is "3.0". Note that overriding this default value may - result in unsupported behavior. + :keyword api_version: Mandatory API version parameter. Default value is "3.0". Note that + overriding this default value may result in unsupported behavior. :paramtype api_version: str """ diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_configuration.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_configuration.py index cc4411253fde2..e08bda4d7be1e 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_configuration.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_configuration.py @@ -6,7 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any from azure.core.configuration import Configuration @@ -14,11 +13,6 @@ from ._version import VERSION -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports - class TextTranslationClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes """Configuration for TextTranslationClient. @@ -29,14 +23,14 @@ class TextTranslationClientConfiguration(Configuration): # pylint: disable=too- :param endpoint: Supported Text Translation endpoints (protocol and hostname, for example: https://api.cognitive.microsofttranslator.com). Required. :type endpoint: str - :keyword api_version: Default value is "3.0". Note that overriding this default value may - result in unsupported behavior. + :keyword api_version: Mandatory API version parameter. Default value is "3.0". Note that + overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, endpoint: str, **kwargs: Any) -> None: super(TextTranslationClientConfiguration, self).__init__(**kwargs) - api_version: Literal["3.0"] = kwargs.pop("api_version", "3.0") + api_version: str = kwargs.pop("api_version", "3.0") if endpoint is None: raise ValueError("Parameter 'endpoint' must not be None.") diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_model_base.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_model_base.py index c37b9314ab903..3a2f72e93fda2 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_model_base.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_model_base.py @@ -20,7 +20,7 @@ from azure.core.exceptions import DeserializationError from azure.core import CaseInsensitiveEnumMeta from azure.core.pipeline import PipelineResponse -from azure.core.serialization import NULL as AzureCoreNull +from azure.core.serialization import _Null # pylint: disable=protected-access if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -29,24 +29,9 @@ _LOGGER = logging.getLogger(__name__) -__all__ = ["NULL", "AzureJSONEncoder", "Model", "rest_field", "rest_discriminator"] +__all__ = ["AzureJSONEncoder", "Model", "rest_field", "rest_discriminator"] -class _Null(object): - """To create a Falsy object""" - - def __bool__(self): - return False - - __nonzero__ = __bool__ # Python2 compatibility - - -NULL = _Null() -""" -A falsy sentinel object which is supposed to be used to specify attributes -with no data. This gets serialized to `null` on the wire. -""" - TZ_UTC = timezone.utc @@ -74,32 +59,35 @@ def _timedelta_as_isostr(td: timedelta) -> str: if days: date_str = "%sD" % days - # Build time - time_str = "T" + if hours or minutes or seconds: + # Build time + time_str = "T" - # Hours - bigger_exists = date_str or hours - if bigger_exists: - time_str += "{:02}H".format(hours) + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) - # Minutes - bigger_exists = bigger_exists or minutes - if bigger_exists: - time_str += "{:02}M".format(minutes) + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) - # Seconds - try: - if seconds.is_integer(): - seconds_string = "{:02}".format(int(seconds)) - else: - # 9 chars long w/ leading 0, 6 digits after decimal - seconds_string = "%09.6f" % seconds - # Remove trailing zeros - seconds_string = seconds_string.rstrip("0") - except AttributeError: # int.is_integer() raises - seconds_string = "{:02}".format(seconds) + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) - time_str += "{}S".format(seconds_string) + time_str += "{}S".format(seconds_string) + else: + time_str = "" return "P" + date_str + time_str @@ -166,7 +154,7 @@ def default(self, o): # pylint: disable=too-many-return-statements return {k: v for k, v in o.items() if k not in readonly_props} if isinstance(o, (bytes, bytearray)): return base64.b64encode(o).decode() - if o is AzureCoreNull: + if isinstance(o, _Null): return None try: return super(AzureJSONEncoder, self).default(o) @@ -425,7 +413,9 @@ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: if non_attr_kwargs: # actual type errors only throw the first wrong keyword arg they see, so following that. raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") - dict_to_pass.update({self._attr_to_rest_field[k]._rest_name: _serialize(v) for k, v in kwargs.items()}) + dict_to_pass.update( + {self._attr_to_rest_field[k]._rest_name: _serialize(v) for k, v in kwargs.items() if v is not None} + ) super().__init__(dict_to_pass) def copy(self) -> "Model": @@ -653,7 +643,7 @@ def __init__( self, *, name: typing.Optional[str] = None, - type: typing.Optional[typing.Callable] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin is_discriminator: bool = False, readonly: bool = False, default: typing.Any = _UNSET, @@ -672,7 +662,7 @@ def _rest_name(self) -> str: raise ValueError("Rest name was never set") return self._rest_name_input - def __get__(self, obj: Model, type=None): + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin # by this point, type and rest_name will have a value bc we default # them in __new__ of the Model class item = obj.get(self._rest_name) @@ -701,7 +691,7 @@ def _get_deserialize_callable_from_annotation( def rest_field( *, name: typing.Optional[str] = None, - type: typing.Optional[typing.Callable] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin readonly: bool = False, default: typing.Any = _UNSET, ) -> typing.Any: @@ -709,6 +699,8 @@ def rest_field( def rest_discriminator( - *, name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin ) -> typing.Any: return _RestField(name=name, type=type, is_discriminator=True) diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_operations/_operations.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_operations/_operations.py index 4a62611f0559d..42d122425b0e0 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_operations/_operations.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_operations/_operations.py @@ -6,8 +6,8 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import json -import sys from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -29,10 +29,6 @@ from .._serialization import Serializer from .._vendor import TextTranslationClientMixinABC -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +47,7 @@ def build_text_translation_get_languages_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["3.0"] = kwargs.pop("api_version", _params.pop("api-version", "3.0")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "3.0")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -79,10 +75,10 @@ def build_text_translation_translate_request( to: List[str], client_trace_id: Optional[str] = None, from_parameter: Optional[str] = None, - text_type: Optional[Union[str, _models.TextType]] = None, + text_type: Optional[str] = None, category: Optional[str] = None, - profanity_action: Optional[Union[str, _models.ProfanityAction]] = None, - profanity_marker: Optional[Union[str, _models.ProfanityMarker]] = None, + profanity_action: Optional[str] = None, + profanity_marker: Optional[str] = None, include_alignment: Optional[bool] = None, include_sentence_length: Optional[bool] = None, suggested_from: Optional[str] = None, @@ -95,7 +91,7 @@ def build_text_translation_translate_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: Literal["3.0"] = kwargs.pop("api_version", _params.pop("api-version", "3.0")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "3.0")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -144,7 +140,7 @@ def build_text_translation_transliterate_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: Literal["3.0"] = kwargs.pop("api_version", _params.pop("api-version", "3.0")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "3.0")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -177,7 +173,7 @@ def build_text_translation_find_sentence_boundaries_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: Literal["3.0"] = kwargs.pop("api_version", _params.pop("api-version", "3.0")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "3.0")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -207,7 +203,7 @@ def build_text_translation_lookup_dictionary_entries_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: Literal["3.0"] = kwargs.pop("api_version", _params.pop("api-version", "3.0")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "3.0")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -235,7 +231,7 @@ def build_text_translation_lookup_dictionary_examples_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: Literal["3.0"] = kwargs.pop("api_version", _params.pop("api-version", "3.0")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "3.0")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -297,6 +293,8 @@ def get_languages( If the resource has not been modified, the service will return status code 304 and an empty response body. Default value is None. :paramtype if_none_match: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: GetLanguagesResult. The GetLanguagesResult is compatible with MutableMapping :rtype: ~azure.ai.translation.text.models.GetLanguagesResult :raises ~azure.core.exceptions.HttpResponseError: @@ -328,7 +326,7 @@ def get_languages( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -344,7 +342,10 @@ def get_languages( response_headers["X-RequestId"] = self._deserialize("str", response.headers.get("X-RequestId")) response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = _deserialize(_models.GetLanguagesResult, response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.GetLanguagesResult, response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -354,15 +355,15 @@ def get_languages( @overload def translate( self, - content: List[_models.InputTextItem], + request_body: List[_models.InputTextItem], *, to: List[str], client_trace_id: Optional[str] = None, from_parameter: Optional[str] = None, - text_type: Optional[Union[str, _models.TextType]] = None, + text_type: Optional[str] = None, category: Optional[str] = None, - profanity_action: Optional[Union[str, _models.ProfanityAction]] = None, - profanity_marker: Optional[Union[str, _models.ProfanityMarker]] = None, + profanity_action: Optional[str] = None, + profanity_marker: Optional[str] = None, include_alignment: Optional[bool] = None, include_sentence_length: Optional[bool] = None, suggested_from: Optional[str] = None, @@ -376,8 +377,8 @@ def translate( Translate Text. - :param content: Array of the text to be translated. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] + :param request_body: Defines the content of the request. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] :keyword to: Specifies the language of the output text. The target language must be one of the supported languages included in the translation scope. For example, use to=de to translate to German. @@ -400,9 +401,8 @@ def translate( :paramtype from_parameter: str :keyword text_type: Defines whether the text being translated is plain text or HTML text. Any HTML needs to be a well-formed, - complete element. Possible values are: plain (default) or html. Known values are: "plain" and - "html". Default value is None. - :paramtype text_type: str or ~azure.ai.translation.text.models.TextType + complete element. Possible values are: plain (default) or html. Default value is None. + :paramtype text_type: str :keyword category: A string specifying the category (domain) of the translation. This parameter is used to get translations from a customized system built with Custom Translator. Add the Category ID from your Custom @@ -411,13 +411,11 @@ def translate( general. Default value is None. :paramtype category: str :keyword profanity_action: Specifies how profanities should be treated in translations. - Possible values are: NoAction (default), Marked or Deleted. Known values are: "NoAction", - "Marked", and "Deleted". Default value is None. - :paramtype profanity_action: str or ~azure.ai.translation.text.models.ProfanityAction + Possible values are: NoAction (default), Marked or Deleted. Default value is None. + :paramtype profanity_action: str :keyword profanity_marker: Specifies how profanities should be marked in translations. - Possible values are: Asterisk (default) or Tag. Known values are: "Asterisk" and "Tag". - Default value is None. - :paramtype profanity_marker: str or ~azure.ai.translation.text.models.ProfanityMarker + Possible values are: Asterisk (default) or Tag. Default value is None. + :paramtype profanity_marker: str :keyword include_alignment: Specifies whether to include alignment projection from source text to translated text. Possible values are: true or false (default). Default value is None. @@ -453,6 +451,8 @@ def translate( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TranslatedTextItem :rtype: list[~azure.ai.translation.text.models.TranslatedTextItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -461,15 +461,15 @@ def translate( @overload def translate( self, - content: IO, + request_body: IO, *, to: List[str], client_trace_id: Optional[str] = None, from_parameter: Optional[str] = None, - text_type: Optional[Union[str, _models.TextType]] = None, + text_type: Optional[str] = None, category: Optional[str] = None, - profanity_action: Optional[Union[str, _models.ProfanityAction]] = None, - profanity_marker: Optional[Union[str, _models.ProfanityMarker]] = None, + profanity_action: Optional[str] = None, + profanity_marker: Optional[str] = None, include_alignment: Optional[bool] = None, include_sentence_length: Optional[bool] = None, suggested_from: Optional[str] = None, @@ -483,8 +483,8 @@ def translate( Translate Text. - :param content: Array of the text to be translated. Required. - :type content: IO + :param request_body: Defines the content of the request. Required. + :type request_body: IO :keyword to: Specifies the language of the output text. The target language must be one of the supported languages included in the translation scope. For example, use to=de to translate to German. @@ -507,9 +507,8 @@ def translate( :paramtype from_parameter: str :keyword text_type: Defines whether the text being translated is plain text or HTML text. Any HTML needs to be a well-formed, - complete element. Possible values are: plain (default) or html. Known values are: "plain" and - "html". Default value is None. - :paramtype text_type: str or ~azure.ai.translation.text.models.TextType + complete element. Possible values are: plain (default) or html. Default value is None. + :paramtype text_type: str :keyword category: A string specifying the category (domain) of the translation. This parameter is used to get translations from a customized system built with Custom Translator. Add the Category ID from your Custom @@ -518,13 +517,11 @@ def translate( general. Default value is None. :paramtype category: str :keyword profanity_action: Specifies how profanities should be treated in translations. - Possible values are: NoAction (default), Marked or Deleted. Known values are: "NoAction", - "Marked", and "Deleted". Default value is None. - :paramtype profanity_action: str or ~azure.ai.translation.text.models.ProfanityAction + Possible values are: NoAction (default), Marked or Deleted. Default value is None. + :paramtype profanity_action: str :keyword profanity_marker: Specifies how profanities should be marked in translations. - Possible values are: Asterisk (default) or Tag. Known values are: "Asterisk" and "Tag". - Default value is None. - :paramtype profanity_marker: str or ~azure.ai.translation.text.models.ProfanityMarker + Possible values are: Asterisk (default) or Tag. Default value is None. + :paramtype profanity_marker: str :keyword include_alignment: Specifies whether to include alignment projection from source text to translated text. Possible values are: true or false (default). Default value is None. @@ -560,6 +557,8 @@ def translate( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TranslatedTextItem :rtype: list[~azure.ai.translation.text.models.TranslatedTextItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -568,15 +567,15 @@ def translate( @distributed_trace def translate( self, - content: Union[List[_models.InputTextItem], IO], + request_body: Union[List[_models.InputTextItem], IO], *, to: List[str], client_trace_id: Optional[str] = None, from_parameter: Optional[str] = None, - text_type: Optional[Union[str, _models.TextType]] = None, + text_type: Optional[str] = None, category: Optional[str] = None, - profanity_action: Optional[Union[str, _models.ProfanityAction]] = None, - profanity_marker: Optional[Union[str, _models.ProfanityMarker]] = None, + profanity_action: Optional[str] = None, + profanity_marker: Optional[str] = None, include_alignment: Optional[bool] = None, include_sentence_length: Optional[bool] = None, suggested_from: Optional[str] = None, @@ -589,9 +588,9 @@ def translate( Translate Text. - :param content: Array of the text to be translated. Is either a [InputTextItem] type or a IO - type. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] or IO + :param request_body: Defines the content of the request. Is either a [InputTextItem] type or a + IO type. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] or IO :keyword to: Specifies the language of the output text. The target language must be one of the supported languages included in the translation scope. For example, use to=de to translate to German. @@ -614,9 +613,8 @@ def translate( :paramtype from_parameter: str :keyword text_type: Defines whether the text being translated is plain text or HTML text. Any HTML needs to be a well-formed, - complete element. Possible values are: plain (default) or html. Known values are: "plain" and - "html". Default value is None. - :paramtype text_type: str or ~azure.ai.translation.text.models.TextType + complete element. Possible values are: plain (default) or html. Default value is None. + :paramtype text_type: str :keyword category: A string specifying the category (domain) of the translation. This parameter is used to get translations from a customized system built with Custom Translator. Add the Category ID from your Custom @@ -625,13 +623,11 @@ def translate( general. Default value is None. :paramtype category: str :keyword profanity_action: Specifies how profanities should be treated in translations. - Possible values are: NoAction (default), Marked or Deleted. Known values are: "NoAction", - "Marked", and "Deleted". Default value is None. - :paramtype profanity_action: str or ~azure.ai.translation.text.models.ProfanityAction + Possible values are: NoAction (default), Marked or Deleted. Default value is None. + :paramtype profanity_action: str :keyword profanity_marker: Specifies how profanities should be marked in translations. - Possible values are: Asterisk (default) or Tag. Known values are: "Asterisk" and "Tag". - Default value is None. - :paramtype profanity_marker: str or ~azure.ai.translation.text.models.ProfanityMarker + Possible values are: Asterisk (default) or Tag. Default value is None. + :paramtype profanity_marker: str :keyword include_alignment: Specifies whether to include alignment projection from source text to translated text. Possible values are: true or false (default). Default value is None. @@ -667,6 +663,8 @@ def translate( :keyword content_type: Body parameter Content-Type. Known values are: application/json. Default value is None. :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TranslatedTextItem :rtype: list[~azure.ai.translation.text.models.TranslatedTextItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -687,10 +685,10 @@ def translate( content_type = content_type or "application/json" _content = None - if isinstance(content, (IO, bytes)): - _content = content + if isinstance(request_body, (IOBase, bytes)): + _content = request_body else: - _content = json.dumps(content, cls=AzureJSONEncoder) # type: ignore + _content = json.dumps(request_body, cls=AzureJSONEncoder) # type: ignore request = build_text_translation_translate_request( to=to, @@ -717,7 +715,7 @@ def translate( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -734,7 +732,10 @@ def translate( response_headers["x-mt-system"] = self._deserialize("str", response.headers.get("x-mt-system")) response_headers["x-metered-usage"] = self._deserialize("int", response.headers.get("x-metered-usage")) - deserialized = _deserialize(List[_models.TranslatedTextItem], response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.TranslatedTextItem], response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -744,7 +745,7 @@ def translate( @overload def transliterate( self, - content: List[_models.InputTextItem], + request_body: List[_models.InputTextItem], *, language: str, from_script: str, @@ -757,8 +758,8 @@ def transliterate( Transliterate Text. - :param content: Array of the text to be transliterated. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] + :param request_body: Defines the content of the request. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] :keyword language: Specifies the language of the text to convert from one script to another. Possible languages are listed in the transliteration scope obtained by querying the service for its supported languages. Required. @@ -777,6 +778,8 @@ def transliterate( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TransliteratedText :rtype: list[~azure.ai.translation.text.models.TransliteratedText] :raises ~azure.core.exceptions.HttpResponseError: @@ -785,7 +788,7 @@ def transliterate( @overload def transliterate( self, - content: IO, + request_body: IO, *, language: str, from_script: str, @@ -798,8 +801,8 @@ def transliterate( Transliterate Text. - :param content: Array of the text to be transliterated. Required. - :type content: IO + :param request_body: Defines the content of the request. Required. + :type request_body: IO :keyword language: Specifies the language of the text to convert from one script to another. Possible languages are listed in the transliteration scope obtained by querying the service for its supported languages. Required. @@ -818,6 +821,8 @@ def transliterate( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TransliteratedText :rtype: list[~azure.ai.translation.text.models.TransliteratedText] :raises ~azure.core.exceptions.HttpResponseError: @@ -826,7 +831,7 @@ def transliterate( @distributed_trace def transliterate( self, - content: Union[List[_models.InputTextItem], IO], + request_body: Union[List[_models.InputTextItem], IO], *, language: str, from_script: str, @@ -838,9 +843,9 @@ def transliterate( Transliterate Text. - :param content: Array of the text to be transliterated. Is either a [InputTextItem] type or a + :param request_body: Defines the content of the request. Is either a [InputTextItem] type or a IO type. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] or IO + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] or IO :keyword language: Specifies the language of the text to convert from one script to another. Possible languages are listed in the transliteration scope obtained by querying the service for its supported languages. Required. @@ -859,6 +864,8 @@ def transliterate( :keyword content_type: Body parameter Content-Type. Known values are: application/json. Default value is None. :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TransliteratedText :rtype: list[~azure.ai.translation.text.models.TransliteratedText] :raises ~azure.core.exceptions.HttpResponseError: @@ -879,10 +886,10 @@ def transliterate( content_type = content_type or "application/json" _content = None - if isinstance(content, (IO, bytes)): - _content = content + if isinstance(request_body, (IOBase, bytes)): + _content = request_body else: - _content = json.dumps(content, cls=AzureJSONEncoder) # type: ignore + _content = json.dumps(request_body, cls=AzureJSONEncoder) # type: ignore request = build_text_translation_transliterate_request( language=language, @@ -900,7 +907,7 @@ def transliterate( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -915,7 +922,10 @@ def transliterate( response_headers = {} response_headers["X-RequestId"] = self._deserialize("str", response.headers.get("X-RequestId")) - deserialized = _deserialize(List[_models.TransliteratedText], response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.TransliteratedText], response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -925,7 +935,7 @@ def transliterate( @overload def find_sentence_boundaries( self, - content: List[_models.InputTextItem], + request_body: List[_models.InputTextItem], *, client_trace_id: Optional[str] = None, language: Optional[str] = None, @@ -937,9 +947,8 @@ def find_sentence_boundaries( Find Sentence Boundaries. - :param content: Array of the text for which values the sentence boundaries will be calculated. - Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] + :param request_body: Defines the content of the request. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] :keyword client_trace_id: A client-generated GUID to uniquely identify the request. Default value is None. :paramtype client_trace_id: str @@ -954,6 +963,8 @@ def find_sentence_boundaries( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of BreakSentenceItem :rtype: list[~azure.ai.translation.text.models.BreakSentenceItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -962,7 +973,7 @@ def find_sentence_boundaries( @overload def find_sentence_boundaries( self, - content: IO, + request_body: IO, *, client_trace_id: Optional[str] = None, language: Optional[str] = None, @@ -974,9 +985,8 @@ def find_sentence_boundaries( Find Sentence Boundaries. - :param content: Array of the text for which values the sentence boundaries will be calculated. - Required. - :type content: IO + :param request_body: Defines the content of the request. Required. + :type request_body: IO :keyword client_trace_id: A client-generated GUID to uniquely identify the request. Default value is None. :paramtype client_trace_id: str @@ -991,6 +1001,8 @@ def find_sentence_boundaries( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of BreakSentenceItem :rtype: list[~azure.ai.translation.text.models.BreakSentenceItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -999,7 +1011,7 @@ def find_sentence_boundaries( @distributed_trace def find_sentence_boundaries( self, - content: Union[List[_models.InputTextItem], IO], + request_body: Union[List[_models.InputTextItem], IO], *, client_trace_id: Optional[str] = None, language: Optional[str] = None, @@ -1010,9 +1022,9 @@ def find_sentence_boundaries( Find Sentence Boundaries. - :param content: Array of the text for which values the sentence boundaries will be calculated. - Is either a [InputTextItem] type or a IO type. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] or IO + :param request_body: Defines the content of the request. Is either a [InputTextItem] type or a + IO type. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] or IO :keyword client_trace_id: A client-generated GUID to uniquely identify the request. Default value is None. :paramtype client_trace_id: str @@ -1027,6 +1039,8 @@ def find_sentence_boundaries( :keyword content_type: Body parameter Content-Type. Known values are: application/json. Default value is None. :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of BreakSentenceItem :rtype: list[~azure.ai.translation.text.models.BreakSentenceItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -1047,10 +1061,10 @@ def find_sentence_boundaries( content_type = content_type or "application/json" _content = None - if isinstance(content, (IO, bytes)): - _content = content + if isinstance(request_body, (IOBase, bytes)): + _content = request_body else: - _content = json.dumps(content, cls=AzureJSONEncoder) # type: ignore + _content = json.dumps(request_body, cls=AzureJSONEncoder) # type: ignore request = build_text_translation_find_sentence_boundaries_request( client_trace_id=client_trace_id, @@ -1067,7 +1081,7 @@ def find_sentence_boundaries( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -1082,7 +1096,10 @@ def find_sentence_boundaries( response_headers = {} response_headers["X-RequestId"] = self._deserialize("str", response.headers.get("X-RequestId")) - deserialized = _deserialize(List[_models.BreakSentenceItem], response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.BreakSentenceItem], response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1092,7 +1109,7 @@ def find_sentence_boundaries( @overload def lookup_dictionary_entries( self, - content: List[_models.InputTextItem], + request_body: List[_models.InputTextItem], *, from_parameter: str, to: str, @@ -1104,8 +1121,8 @@ def lookup_dictionary_entries( Lookup Dictionary Entries. - :param content: Array of the text to be sent to dictionary. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] + :param request_body: Defines the content of the request. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -1120,6 +1137,8 @@ def lookup_dictionary_entries( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryLookupItem :rtype: list[~azure.ai.translation.text.models.DictionaryLookupItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -1128,7 +1147,7 @@ def lookup_dictionary_entries( @overload def lookup_dictionary_entries( self, - content: IO, + request_body: IO, *, from_parameter: str, to: str, @@ -1140,8 +1159,8 @@ def lookup_dictionary_entries( Lookup Dictionary Entries. - :param content: Array of the text to be sent to dictionary. Required. - :type content: IO + :param request_body: Defines the content of the request. Required. + :type request_body: IO :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -1156,6 +1175,8 @@ def lookup_dictionary_entries( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryLookupItem :rtype: list[~azure.ai.translation.text.models.DictionaryLookupItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -1164,7 +1185,7 @@ def lookup_dictionary_entries( @distributed_trace def lookup_dictionary_entries( self, - content: Union[List[_models.InputTextItem], IO], + request_body: Union[List[_models.InputTextItem], IO], *, from_parameter: str, to: str, @@ -1175,9 +1196,9 @@ def lookup_dictionary_entries( Lookup Dictionary Entries. - :param content: Array of the text to be sent to dictionary. Is either a [InputTextItem] type or - a IO type. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] or IO + :param request_body: Defines the content of the request. Is either a [InputTextItem] type or a + IO type. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] or IO :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -1192,6 +1213,8 @@ def lookup_dictionary_entries( :keyword content_type: Body parameter Content-Type. Known values are: application/json. Default value is None. :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryLookupItem :rtype: list[~azure.ai.translation.text.models.DictionaryLookupItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -1212,10 +1235,10 @@ def lookup_dictionary_entries( content_type = content_type or "application/json" _content = None - if isinstance(content, (IO, bytes)): - _content = content + if isinstance(request_body, (IOBase, bytes)): + _content = request_body else: - _content = json.dumps(content, cls=AzureJSONEncoder) # type: ignore + _content = json.dumps(request_body, cls=AzureJSONEncoder) # type: ignore request = build_text_translation_lookup_dictionary_entries_request( from_parameter=from_parameter, @@ -1232,7 +1255,7 @@ def lookup_dictionary_entries( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -1247,7 +1270,10 @@ def lookup_dictionary_entries( response_headers = {} response_headers["X-RequestId"] = self._deserialize("str", response.headers.get("X-RequestId")) - deserialized = _deserialize(List[_models.DictionaryLookupItem], response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.DictionaryLookupItem], response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1257,7 +1283,7 @@ def lookup_dictionary_entries( @overload def lookup_dictionary_examples( self, - content: List[_models.DictionaryExampleTextItem], + request_body: List[_models.DictionaryExampleTextItem], *, from_parameter: str, to: str, @@ -1269,8 +1295,8 @@ def lookup_dictionary_examples( Lookup Dictionary Examples. - :param content: Array of the text to be sent to dictionary. Required. - :type content: list[~azure.ai.translation.text.models.DictionaryExampleTextItem] + :param request_body: Defines the content of the request. Required. + :type request_body: list[~azure.ai.translation.text.models.DictionaryExampleTextItem] :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -1285,6 +1311,8 @@ def lookup_dictionary_examples( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryExampleItem :rtype: list[~azure.ai.translation.text.models.DictionaryExampleItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -1293,7 +1321,7 @@ def lookup_dictionary_examples( @overload def lookup_dictionary_examples( self, - content: IO, + request_body: IO, *, from_parameter: str, to: str, @@ -1305,8 +1333,8 @@ def lookup_dictionary_examples( Lookup Dictionary Examples. - :param content: Array of the text to be sent to dictionary. Required. - :type content: IO + :param request_body: Defines the content of the request. Required. + :type request_body: IO :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -1321,6 +1349,8 @@ def lookup_dictionary_examples( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryExampleItem :rtype: list[~azure.ai.translation.text.models.DictionaryExampleItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -1329,7 +1359,7 @@ def lookup_dictionary_examples( @distributed_trace def lookup_dictionary_examples( self, - content: Union[List[_models.DictionaryExampleTextItem], IO], + request_body: Union[List[_models.DictionaryExampleTextItem], IO], *, from_parameter: str, to: str, @@ -1340,9 +1370,9 @@ def lookup_dictionary_examples( Lookup Dictionary Examples. - :param content: Array of the text to be sent to dictionary. Is either a + :param request_body: Defines the content of the request. Is either a [DictionaryExampleTextItem] type or a IO type. Required. - :type content: list[~azure.ai.translation.text.models.DictionaryExampleTextItem] or IO + :type request_body: list[~azure.ai.translation.text.models.DictionaryExampleTextItem] or IO :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -1357,6 +1387,8 @@ def lookup_dictionary_examples( :keyword content_type: Body parameter Content-Type. Known values are: application/json. Default value is None. :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryExampleItem :rtype: list[~azure.ai.translation.text.models.DictionaryExampleItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -1377,10 +1409,10 @@ def lookup_dictionary_examples( content_type = content_type or "application/json" _content = None - if isinstance(content, (IO, bytes)): - _content = content + if isinstance(request_body, (IOBase, bytes)): + _content = request_body else: - _content = json.dumps(content, cls=AzureJSONEncoder) # type: ignore + _content = json.dumps(request_body, cls=AzureJSONEncoder) # type: ignore request = build_text_translation_lookup_dictionary_examples_request( from_parameter=from_parameter, @@ -1397,7 +1429,7 @@ def lookup_dictionary_examples( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -1412,7 +1444,10 @@ def lookup_dictionary_examples( response_headers = {} response_headers["X-RequestId"] = self._deserialize("str", response.headers.get("X-RequestId")) - deserialized = _deserialize(List[_models.DictionaryExampleItem], response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.DictionaryExampleItem], response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_patch.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_patch.py index a0c1bdce7cae2..f792a211d0cd1 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_patch.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_patch.py @@ -3,15 +3,16 @@ # Licensed under the MIT License. # ------------------------------------ -from typing import ( Union, Optional ) +from typing import Union, Optional from azure.core.pipeline import PipelineRequest -from azure.core.pipeline.policies import ( SansIOHTTPPolicy, BearerTokenCredentialPolicy, AzureKeyCredentialPolicy ) -from azure.core.credentials import ( TokenCredential, AzureKeyCredential ) +from azure.core.pipeline.policies import SansIOHTTPPolicy, BearerTokenCredentialPolicy, AzureKeyCredentialPolicy +from azure.core.credentials import TokenCredential, AzureKeyCredential from ._client import TextTranslationClient as ServiceClientGenerated DEFAULT_TOKEN_SCOPE = "https://api.microsofttranslator.com/" + def patch_sdk(): """Do not remove from this file. @@ -20,9 +21,10 @@ def patch_sdk(): https://aka.ms/azsdk/python/dpcodegen/python/customize """ + class TranslatorCredential: - """ Credential for Translator Service. It is using combination of Resource key and region. - """ + """Credential for Translator Service. It is using combination of Resource key and region.""" + def __init__(self, key: str, region: str) -> None: self.key = key self.region = region @@ -40,11 +42,13 @@ def update(self, key: str) -> None: raise TypeError("The key used for updating must be a string.") self.key = key + class TranslatorAuthenticationPolicy(SansIOHTTPPolicy): - """ Translator Authentication Policy. Adds both authentication headers that are required. + """Translator Authentication Policy. Adds both authentication headers that are required. Ocp-Apim-Subscription-Region header contains region of the Translator resource. Ocp-Apim-Subscription-Key header contains API key of the Translator resource. """ + def __init__(self, credential: TranslatorCredential): self.credential = credential @@ -52,6 +56,7 @@ def on_request(self, request: PipelineRequest) -> None: request.http_request.headers["Ocp-Apim-Subscription-Key"] = self.credential.key request.http_request.headers["Ocp-Apim-Subscription-Region"] = self.credential.region + def get_translation_endpoint(endpoint, api_version): if not endpoint: endpoint = "https://api.cognitive.microsofttranslator.com" @@ -64,6 +69,7 @@ def get_translation_endpoint(endpoint, api_version): return translator_endpoint + def set_authentication_policy(credential, kwargs): if isinstance(credential, TranslatorCredential): if not kwargs.get("authentication_policy"): @@ -71,10 +77,14 @@ def set_authentication_policy(credential, kwargs): elif isinstance(credential, AzureKeyCredential): if not kwargs.get("authentication_policy"): kwargs["authentication_policy"] = AzureKeyCredentialPolicy( - name="Ocp-Apim-Subscription-Key", credential=credential) + name="Ocp-Apim-Subscription-Key", credential=credential + ) elif hasattr(credential, "get_token"): if not kwargs.get("authentication_policy"): - kwargs["authentication_policy"] = BearerTokenCredentialPolicy(credential, *kwargs.pop("credential_scopes", [DEFAULT_TOKEN_SCOPE]), kwargs) + kwargs["authentication_policy"] = BearerTokenCredentialPolicy( + credential, *kwargs.pop("credential_scopes", [DEFAULT_TOKEN_SCOPE]), kwargs + ) + class TextTranslationClient(ServiceClientGenerated): """Text translation is a cloud-based REST API feature of the Translator service that uses neural @@ -117,22 +127,21 @@ class TextTranslationClient(ServiceClientGenerated): result in unsupported behavior. :paramtype api_version: str """ + def __init__( - self, - credential: Union[AzureKeyCredential , TokenCredential , TranslatorCredential], - *, - endpoint: Optional[str] = None, - api_version = "3.0", - **kwargs): + self, + credential: Union[AzureKeyCredential, TokenCredential, TranslatorCredential], + *, + endpoint: Optional[str] = None, + api_version="3.0", + **kwargs + ): set_authentication_policy(credential, kwargs) translation_endpoint = get_translation_endpoint(endpoint, api_version) - super().__init__( - endpoint=translation_endpoint, - api_version=api_version, - **kwargs - ) + super().__init__(endpoint=translation_endpoint, api_version=api_version, **kwargs) + __all__ = ["TextTranslationClient", "TranslatorCredential"] diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_version.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_version.py index bbcd28b4aa67e..be71c81bd2821 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_version.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0b2" +VERSION = "1.0.0b1" diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_client.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_client.py index 4293b6f55358d..e64c202fba73a 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_client.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_client.py @@ -44,8 +44,8 @@ class TextTranslationClient(TextTranslationClientOperationsMixin): # pylint: di :param endpoint: Supported Text Translation endpoints (protocol and hostname, for example: https://api.cognitive.microsofttranslator.com). Required. :type endpoint: str - :keyword api_version: Default value is "3.0". Note that overriding this default value may - result in unsupported behavior. + :keyword api_version: Mandatory API version parameter. Default value is "3.0". Note that + overriding this default value may result in unsupported behavior. :paramtype api_version: str """ diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_configuration.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_configuration.py index 2ff232a4e5b2a..66a41a8aea182 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_configuration.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_configuration.py @@ -6,7 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any from azure.core.configuration import Configuration @@ -14,11 +13,6 @@ from .._version import VERSION -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports - class TextTranslationClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes """Configuration for TextTranslationClient. @@ -29,14 +23,14 @@ class TextTranslationClientConfiguration(Configuration): # pylint: disable=too- :param endpoint: Supported Text Translation endpoints (protocol and hostname, for example: https://api.cognitive.microsofttranslator.com). Required. :type endpoint: str - :keyword api_version: Default value is "3.0". Note that overriding this default value may - result in unsupported behavior. + :keyword api_version: Mandatory API version parameter. Default value is "3.0". Note that + overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, endpoint: str, **kwargs: Any) -> None: super(TextTranslationClientConfiguration, self).__init__(**kwargs) - api_version: Literal["3.0"] = kwargs.pop("api_version", "3.0") + api_version: str = kwargs.pop("api_version", "3.0") if endpoint is None: raise ValueError("Parameter 'endpoint' must not be None.") diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_operations/_operations.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_operations/_operations.py index 196edc43292dc..fa71952afe946 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_operations/_operations.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_operations/_operations.py @@ -6,6 +6,7 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import json from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload @@ -80,6 +81,8 @@ async def get_languages( If the resource has not been modified, the service will return status code 304 and an empty response body. Default value is None. :paramtype if_none_match: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: GetLanguagesResult. The GetLanguagesResult is compatible with MutableMapping :rtype: ~azure.ai.translation.text.models.GetLanguagesResult :raises ~azure.core.exceptions.HttpResponseError: @@ -111,7 +114,7 @@ async def get_languages( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -127,7 +130,10 @@ async def get_languages( response_headers["X-RequestId"] = self._deserialize("str", response.headers.get("X-RequestId")) response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = _deserialize(_models.GetLanguagesResult, response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.GetLanguagesResult, response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -137,15 +143,15 @@ async def get_languages( @overload async def translate( self, - content: List[_models.InputTextItem], + request_body: List[_models.InputTextItem], *, to: List[str], client_trace_id: Optional[str] = None, from_parameter: Optional[str] = None, - text_type: Optional[Union[str, _models.TextType]] = None, + text_type: Optional[str] = None, category: Optional[str] = None, - profanity_action: Optional[Union[str, _models.ProfanityAction]] = None, - profanity_marker: Optional[Union[str, _models.ProfanityMarker]] = None, + profanity_action: Optional[str] = None, + profanity_marker: Optional[str] = None, include_alignment: Optional[bool] = None, include_sentence_length: Optional[bool] = None, suggested_from: Optional[str] = None, @@ -159,8 +165,8 @@ async def translate( Translate Text. - :param content: Array of the text to be translated. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] + :param request_body: Defines the content of the request. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] :keyword to: Specifies the language of the output text. The target language must be one of the supported languages included in the translation scope. For example, use to=de to translate to German. @@ -183,9 +189,8 @@ async def translate( :paramtype from_parameter: str :keyword text_type: Defines whether the text being translated is plain text or HTML text. Any HTML needs to be a well-formed, - complete element. Possible values are: plain (default) or html. Known values are: "plain" and - "html". Default value is None. - :paramtype text_type: str or ~azure.ai.translation.text.models.TextType + complete element. Possible values are: plain (default) or html. Default value is None. + :paramtype text_type: str :keyword category: A string specifying the category (domain) of the translation. This parameter is used to get translations from a customized system built with Custom Translator. Add the Category ID from your Custom @@ -194,13 +199,11 @@ async def translate( general. Default value is None. :paramtype category: str :keyword profanity_action: Specifies how profanities should be treated in translations. - Possible values are: NoAction (default), Marked or Deleted. Known values are: "NoAction", - "Marked", and "Deleted". Default value is None. - :paramtype profanity_action: str or ~azure.ai.translation.text.models.ProfanityAction + Possible values are: NoAction (default), Marked or Deleted. Default value is None. + :paramtype profanity_action: str :keyword profanity_marker: Specifies how profanities should be marked in translations. - Possible values are: Asterisk (default) or Tag. Known values are: "Asterisk" and "Tag". - Default value is None. - :paramtype profanity_marker: str or ~azure.ai.translation.text.models.ProfanityMarker + Possible values are: Asterisk (default) or Tag. Default value is None. + :paramtype profanity_marker: str :keyword include_alignment: Specifies whether to include alignment projection from source text to translated text. Possible values are: true or false (default). Default value is None. @@ -236,6 +239,8 @@ async def translate( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TranslatedTextItem :rtype: list[~azure.ai.translation.text.models.TranslatedTextItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -244,15 +249,15 @@ async def translate( @overload async def translate( self, - content: IO, + request_body: IO, *, to: List[str], client_trace_id: Optional[str] = None, from_parameter: Optional[str] = None, - text_type: Optional[Union[str, _models.TextType]] = None, + text_type: Optional[str] = None, category: Optional[str] = None, - profanity_action: Optional[Union[str, _models.ProfanityAction]] = None, - profanity_marker: Optional[Union[str, _models.ProfanityMarker]] = None, + profanity_action: Optional[str] = None, + profanity_marker: Optional[str] = None, include_alignment: Optional[bool] = None, include_sentence_length: Optional[bool] = None, suggested_from: Optional[str] = None, @@ -266,8 +271,8 @@ async def translate( Translate Text. - :param content: Array of the text to be translated. Required. - :type content: IO + :param request_body: Defines the content of the request. Required. + :type request_body: IO :keyword to: Specifies the language of the output text. The target language must be one of the supported languages included in the translation scope. For example, use to=de to translate to German. @@ -290,9 +295,8 @@ async def translate( :paramtype from_parameter: str :keyword text_type: Defines whether the text being translated is plain text or HTML text. Any HTML needs to be a well-formed, - complete element. Possible values are: plain (default) or html. Known values are: "plain" and - "html". Default value is None. - :paramtype text_type: str or ~azure.ai.translation.text.models.TextType + complete element. Possible values are: plain (default) or html. Default value is None. + :paramtype text_type: str :keyword category: A string specifying the category (domain) of the translation. This parameter is used to get translations from a customized system built with Custom Translator. Add the Category ID from your Custom @@ -301,13 +305,11 @@ async def translate( general. Default value is None. :paramtype category: str :keyword profanity_action: Specifies how profanities should be treated in translations. - Possible values are: NoAction (default), Marked or Deleted. Known values are: "NoAction", - "Marked", and "Deleted". Default value is None. - :paramtype profanity_action: str or ~azure.ai.translation.text.models.ProfanityAction + Possible values are: NoAction (default), Marked or Deleted. Default value is None. + :paramtype profanity_action: str :keyword profanity_marker: Specifies how profanities should be marked in translations. - Possible values are: Asterisk (default) or Tag. Known values are: "Asterisk" and "Tag". - Default value is None. - :paramtype profanity_marker: str or ~azure.ai.translation.text.models.ProfanityMarker + Possible values are: Asterisk (default) or Tag. Default value is None. + :paramtype profanity_marker: str :keyword include_alignment: Specifies whether to include alignment projection from source text to translated text. Possible values are: true or false (default). Default value is None. @@ -343,6 +345,8 @@ async def translate( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TranslatedTextItem :rtype: list[~azure.ai.translation.text.models.TranslatedTextItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -351,15 +355,15 @@ async def translate( @distributed_trace_async async def translate( self, - content: Union[List[_models.InputTextItem], IO], + request_body: Union[List[_models.InputTextItem], IO], *, to: List[str], client_trace_id: Optional[str] = None, from_parameter: Optional[str] = None, - text_type: Optional[Union[str, _models.TextType]] = None, + text_type: Optional[str] = None, category: Optional[str] = None, - profanity_action: Optional[Union[str, _models.ProfanityAction]] = None, - profanity_marker: Optional[Union[str, _models.ProfanityMarker]] = None, + profanity_action: Optional[str] = None, + profanity_marker: Optional[str] = None, include_alignment: Optional[bool] = None, include_sentence_length: Optional[bool] = None, suggested_from: Optional[str] = None, @@ -372,9 +376,9 @@ async def translate( Translate Text. - :param content: Array of the text to be translated. Is either a [InputTextItem] type or a IO - type. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] or IO + :param request_body: Defines the content of the request. Is either a [InputTextItem] type or a + IO type. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] or IO :keyword to: Specifies the language of the output text. The target language must be one of the supported languages included in the translation scope. For example, use to=de to translate to German. @@ -397,9 +401,8 @@ async def translate( :paramtype from_parameter: str :keyword text_type: Defines whether the text being translated is plain text or HTML text. Any HTML needs to be a well-formed, - complete element. Possible values are: plain (default) or html. Known values are: "plain" and - "html". Default value is None. - :paramtype text_type: str or ~azure.ai.translation.text.models.TextType + complete element. Possible values are: plain (default) or html. Default value is None. + :paramtype text_type: str :keyword category: A string specifying the category (domain) of the translation. This parameter is used to get translations from a customized system built with Custom Translator. Add the Category ID from your Custom @@ -408,13 +411,11 @@ async def translate( general. Default value is None. :paramtype category: str :keyword profanity_action: Specifies how profanities should be treated in translations. - Possible values are: NoAction (default), Marked or Deleted. Known values are: "NoAction", - "Marked", and "Deleted". Default value is None. - :paramtype profanity_action: str or ~azure.ai.translation.text.models.ProfanityAction + Possible values are: NoAction (default), Marked or Deleted. Default value is None. + :paramtype profanity_action: str :keyword profanity_marker: Specifies how profanities should be marked in translations. - Possible values are: Asterisk (default) or Tag. Known values are: "Asterisk" and "Tag". - Default value is None. - :paramtype profanity_marker: str or ~azure.ai.translation.text.models.ProfanityMarker + Possible values are: Asterisk (default) or Tag. Default value is None. + :paramtype profanity_marker: str :keyword include_alignment: Specifies whether to include alignment projection from source text to translated text. Possible values are: true or false (default). Default value is None. @@ -450,6 +451,8 @@ async def translate( :keyword content_type: Body parameter Content-Type. Known values are: application/json. Default value is None. :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TranslatedTextItem :rtype: list[~azure.ai.translation.text.models.TranslatedTextItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -470,10 +473,10 @@ async def translate( content_type = content_type or "application/json" _content = None - if isinstance(content, (IO, bytes)): - _content = content + if isinstance(request_body, (IOBase, bytes)): + _content = request_body else: - _content = json.dumps(content, cls=AzureJSONEncoder) # type: ignore + _content = json.dumps(request_body, cls=AzureJSONEncoder) # type: ignore request = build_text_translation_translate_request( to=to, @@ -500,7 +503,7 @@ async def translate( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -517,7 +520,10 @@ async def translate( response_headers["x-mt-system"] = self._deserialize("str", response.headers.get("x-mt-system")) response_headers["x-metered-usage"] = self._deserialize("int", response.headers.get("x-metered-usage")) - deserialized = _deserialize(List[_models.TranslatedTextItem], response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.TranslatedTextItem], response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -527,7 +533,7 @@ async def translate( @overload async def transliterate( self, - content: List[_models.InputTextItem], + request_body: List[_models.InputTextItem], *, language: str, from_script: str, @@ -540,8 +546,8 @@ async def transliterate( Transliterate Text. - :param content: Array of the text to be transliterated. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] + :param request_body: Defines the content of the request. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] :keyword language: Specifies the language of the text to convert from one script to another. Possible languages are listed in the transliteration scope obtained by querying the service for its supported languages. Required. @@ -560,6 +566,8 @@ async def transliterate( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TransliteratedText :rtype: list[~azure.ai.translation.text.models.TransliteratedText] :raises ~azure.core.exceptions.HttpResponseError: @@ -568,7 +576,7 @@ async def transliterate( @overload async def transliterate( self, - content: IO, + request_body: IO, *, language: str, from_script: str, @@ -581,8 +589,8 @@ async def transliterate( Transliterate Text. - :param content: Array of the text to be transliterated. Required. - :type content: IO + :param request_body: Defines the content of the request. Required. + :type request_body: IO :keyword language: Specifies the language of the text to convert from one script to another. Possible languages are listed in the transliteration scope obtained by querying the service for its supported languages. Required. @@ -601,6 +609,8 @@ async def transliterate( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TransliteratedText :rtype: list[~azure.ai.translation.text.models.TransliteratedText] :raises ~azure.core.exceptions.HttpResponseError: @@ -609,7 +619,7 @@ async def transliterate( @distributed_trace_async async def transliterate( self, - content: Union[List[_models.InputTextItem], IO], + request_body: Union[List[_models.InputTextItem], IO], *, language: str, from_script: str, @@ -621,9 +631,9 @@ async def transliterate( Transliterate Text. - :param content: Array of the text to be transliterated. Is either a [InputTextItem] type or a + :param request_body: Defines the content of the request. Is either a [InputTextItem] type or a IO type. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] or IO + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] or IO :keyword language: Specifies the language of the text to convert from one script to another. Possible languages are listed in the transliteration scope obtained by querying the service for its supported languages. Required. @@ -642,6 +652,8 @@ async def transliterate( :keyword content_type: Body parameter Content-Type. Known values are: application/json. Default value is None. :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of TransliteratedText :rtype: list[~azure.ai.translation.text.models.TransliteratedText] :raises ~azure.core.exceptions.HttpResponseError: @@ -662,10 +674,10 @@ async def transliterate( content_type = content_type or "application/json" _content = None - if isinstance(content, (IO, bytes)): - _content = content + if isinstance(request_body, (IOBase, bytes)): + _content = request_body else: - _content = json.dumps(content, cls=AzureJSONEncoder) # type: ignore + _content = json.dumps(request_body, cls=AzureJSONEncoder) # type: ignore request = build_text_translation_transliterate_request( language=language, @@ -683,7 +695,7 @@ async def transliterate( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -698,7 +710,10 @@ async def transliterate( response_headers = {} response_headers["X-RequestId"] = self._deserialize("str", response.headers.get("X-RequestId")) - deserialized = _deserialize(List[_models.TransliteratedText], response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.TransliteratedText], response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -708,7 +723,7 @@ async def transliterate( @overload async def find_sentence_boundaries( self, - content: List[_models.InputTextItem], + request_body: List[_models.InputTextItem], *, client_trace_id: Optional[str] = None, language: Optional[str] = None, @@ -720,9 +735,8 @@ async def find_sentence_boundaries( Find Sentence Boundaries. - :param content: Array of the text for which values the sentence boundaries will be calculated. - Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] + :param request_body: Defines the content of the request. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] :keyword client_trace_id: A client-generated GUID to uniquely identify the request. Default value is None. :paramtype client_trace_id: str @@ -737,6 +751,8 @@ async def find_sentence_boundaries( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of BreakSentenceItem :rtype: list[~azure.ai.translation.text.models.BreakSentenceItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -745,7 +761,7 @@ async def find_sentence_boundaries( @overload async def find_sentence_boundaries( self, - content: IO, + request_body: IO, *, client_trace_id: Optional[str] = None, language: Optional[str] = None, @@ -757,9 +773,8 @@ async def find_sentence_boundaries( Find Sentence Boundaries. - :param content: Array of the text for which values the sentence boundaries will be calculated. - Required. - :type content: IO + :param request_body: Defines the content of the request. Required. + :type request_body: IO :keyword client_trace_id: A client-generated GUID to uniquely identify the request. Default value is None. :paramtype client_trace_id: str @@ -774,6 +789,8 @@ async def find_sentence_boundaries( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of BreakSentenceItem :rtype: list[~azure.ai.translation.text.models.BreakSentenceItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -782,7 +799,7 @@ async def find_sentence_boundaries( @distributed_trace_async async def find_sentence_boundaries( self, - content: Union[List[_models.InputTextItem], IO], + request_body: Union[List[_models.InputTextItem], IO], *, client_trace_id: Optional[str] = None, language: Optional[str] = None, @@ -793,9 +810,9 @@ async def find_sentence_boundaries( Find Sentence Boundaries. - :param content: Array of the text for which values the sentence boundaries will be calculated. - Is either a [InputTextItem] type or a IO type. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] or IO + :param request_body: Defines the content of the request. Is either a [InputTextItem] type or a + IO type. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] or IO :keyword client_trace_id: A client-generated GUID to uniquely identify the request. Default value is None. :paramtype client_trace_id: str @@ -810,6 +827,8 @@ async def find_sentence_boundaries( :keyword content_type: Body parameter Content-Type. Known values are: application/json. Default value is None. :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of BreakSentenceItem :rtype: list[~azure.ai.translation.text.models.BreakSentenceItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -830,10 +849,10 @@ async def find_sentence_boundaries( content_type = content_type or "application/json" _content = None - if isinstance(content, (IO, bytes)): - _content = content + if isinstance(request_body, (IOBase, bytes)): + _content = request_body else: - _content = json.dumps(content, cls=AzureJSONEncoder) # type: ignore + _content = json.dumps(request_body, cls=AzureJSONEncoder) # type: ignore request = build_text_translation_find_sentence_boundaries_request( client_trace_id=client_trace_id, @@ -850,7 +869,7 @@ async def find_sentence_boundaries( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -865,7 +884,10 @@ async def find_sentence_boundaries( response_headers = {} response_headers["X-RequestId"] = self._deserialize("str", response.headers.get("X-RequestId")) - deserialized = _deserialize(List[_models.BreakSentenceItem], response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.BreakSentenceItem], response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -875,7 +897,7 @@ async def find_sentence_boundaries( @overload async def lookup_dictionary_entries( self, - content: List[_models.InputTextItem], + request_body: List[_models.InputTextItem], *, from_parameter: str, to: str, @@ -887,8 +909,8 @@ async def lookup_dictionary_entries( Lookup Dictionary Entries. - :param content: Array of the text to be sent to dictionary. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] + :param request_body: Defines the content of the request. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -903,6 +925,8 @@ async def lookup_dictionary_entries( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryLookupItem :rtype: list[~azure.ai.translation.text.models.DictionaryLookupItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -911,7 +935,7 @@ async def lookup_dictionary_entries( @overload async def lookup_dictionary_entries( self, - content: IO, + request_body: IO, *, from_parameter: str, to: str, @@ -923,8 +947,8 @@ async def lookup_dictionary_entries( Lookup Dictionary Entries. - :param content: Array of the text to be sent to dictionary. Required. - :type content: IO + :param request_body: Defines the content of the request. Required. + :type request_body: IO :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -939,6 +963,8 @@ async def lookup_dictionary_entries( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryLookupItem :rtype: list[~azure.ai.translation.text.models.DictionaryLookupItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -947,7 +973,7 @@ async def lookup_dictionary_entries( @distributed_trace_async async def lookup_dictionary_entries( self, - content: Union[List[_models.InputTextItem], IO], + request_body: Union[List[_models.InputTextItem], IO], *, from_parameter: str, to: str, @@ -958,9 +984,9 @@ async def lookup_dictionary_entries( Lookup Dictionary Entries. - :param content: Array of the text to be sent to dictionary. Is either a [InputTextItem] type or - a IO type. Required. - :type content: list[~azure.ai.translation.text.models.InputTextItem] or IO + :param request_body: Defines the content of the request. Is either a [InputTextItem] type or a + IO type. Required. + :type request_body: list[~azure.ai.translation.text.models.InputTextItem] or IO :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -975,6 +1001,8 @@ async def lookup_dictionary_entries( :keyword content_type: Body parameter Content-Type. Known values are: application/json. Default value is None. :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryLookupItem :rtype: list[~azure.ai.translation.text.models.DictionaryLookupItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -995,10 +1023,10 @@ async def lookup_dictionary_entries( content_type = content_type or "application/json" _content = None - if isinstance(content, (IO, bytes)): - _content = content + if isinstance(request_body, (IOBase, bytes)): + _content = request_body else: - _content = json.dumps(content, cls=AzureJSONEncoder) # type: ignore + _content = json.dumps(request_body, cls=AzureJSONEncoder) # type: ignore request = build_text_translation_lookup_dictionary_entries_request( from_parameter=from_parameter, @@ -1015,7 +1043,7 @@ async def lookup_dictionary_entries( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -1030,7 +1058,10 @@ async def lookup_dictionary_entries( response_headers = {} response_headers["X-RequestId"] = self._deserialize("str", response.headers.get("X-RequestId")) - deserialized = _deserialize(List[_models.DictionaryLookupItem], response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.DictionaryLookupItem], response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1040,7 +1071,7 @@ async def lookup_dictionary_entries( @overload async def lookup_dictionary_examples( self, - content: List[_models.DictionaryExampleTextItem], + request_body: List[_models.DictionaryExampleTextItem], *, from_parameter: str, to: str, @@ -1052,8 +1083,8 @@ async def lookup_dictionary_examples( Lookup Dictionary Examples. - :param content: Array of the text to be sent to dictionary. Required. - :type content: list[~azure.ai.translation.text.models.DictionaryExampleTextItem] + :param request_body: Defines the content of the request. Required. + :type request_body: list[~azure.ai.translation.text.models.DictionaryExampleTextItem] :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -1068,6 +1099,8 @@ async def lookup_dictionary_examples( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryExampleItem :rtype: list[~azure.ai.translation.text.models.DictionaryExampleItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -1076,7 +1109,7 @@ async def lookup_dictionary_examples( @overload async def lookup_dictionary_examples( self, - content: IO, + request_body: IO, *, from_parameter: str, to: str, @@ -1088,8 +1121,8 @@ async def lookup_dictionary_examples( Lookup Dictionary Examples. - :param content: Array of the text to be sent to dictionary. Required. - :type content: IO + :param request_body: Defines the content of the request. Required. + :type request_body: IO :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -1104,6 +1137,8 @@ async def lookup_dictionary_examples( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryExampleItem :rtype: list[~azure.ai.translation.text.models.DictionaryExampleItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -1112,7 +1147,7 @@ async def lookup_dictionary_examples( @distributed_trace_async async def lookup_dictionary_examples( self, - content: Union[List[_models.DictionaryExampleTextItem], IO], + request_body: Union[List[_models.DictionaryExampleTextItem], IO], *, from_parameter: str, to: str, @@ -1123,9 +1158,9 @@ async def lookup_dictionary_examples( Lookup Dictionary Examples. - :param content: Array of the text to be sent to dictionary. Is either a + :param request_body: Defines the content of the request. Is either a [DictionaryExampleTextItem] type or a IO type. Required. - :type content: list[~azure.ai.translation.text.models.DictionaryExampleTextItem] or IO + :type request_body: list[~azure.ai.translation.text.models.DictionaryExampleTextItem] or IO :keyword from_parameter: Specifies the language of the input text. The source language must be one of the supported languages included in the dictionary scope. Required. @@ -1140,6 +1175,8 @@ async def lookup_dictionary_examples( :keyword content_type: Body parameter Content-Type. Known values are: application/json. Default value is None. :paramtype content_type: str + :keyword bool stream: Whether to stream the response of this operation. Defaults to False. You + will have to context manage the returned stream. :return: list of DictionaryExampleItem :rtype: list[~azure.ai.translation.text.models.DictionaryExampleItem] :raises ~azure.core.exceptions.HttpResponseError: @@ -1160,10 +1197,10 @@ async def lookup_dictionary_examples( content_type = content_type or "application/json" _content = None - if isinstance(content, (IO, bytes)): - _content = content + if isinstance(request_body, (IOBase, bytes)): + _content = request_body else: - _content = json.dumps(content, cls=AzureJSONEncoder) # type: ignore + _content = json.dumps(request_body, cls=AzureJSONEncoder) # type: ignore request = build_text_translation_lookup_dictionary_examples_request( from_parameter=from_parameter, @@ -1180,7 +1217,7 @@ async def lookup_dictionary_examples( } request.url = self._client.format_url(request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=_stream, **kwargs ) @@ -1195,7 +1232,10 @@ async def lookup_dictionary_examples( response_headers = {} response_headers["X-RequestId"] = self._deserialize("str", response.headers.get("X-RequestId")) - deserialized = _deserialize(List[_models.DictionaryExampleItem], response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(List[_models.DictionaryExampleItem], response.json()) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_patch.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_patch.py index dc0163493ef8c..fb65d0457a7ef 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_patch.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/aio/_patch.py @@ -6,20 +6,16 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import ( Union, Optional ) -from azure.core.pipeline.policies import ( AsyncBearerTokenCredentialPolicy, AzureKeyCredentialPolicy ) +from typing import Union, Optional +from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AzureKeyCredentialPolicy from azure.core.credentials import AzureKeyCredential from azure.core.credentials_async import AsyncTokenCredential -from .._patch import ( - DEFAULT_TOKEN_SCOPE, - get_translation_endpoint, - TranslatorAuthenticationPolicy, - TranslatorCredential -) +from .._patch import DEFAULT_TOKEN_SCOPE, get_translation_endpoint, TranslatorAuthenticationPolicy, TranslatorCredential from ._client import TextTranslationClient as ServiceClientGenerated + def patch_sdk(): """Do not remove from this file. @@ -28,6 +24,7 @@ def patch_sdk(): https://aka.ms/azsdk/python/dpcodegen/python/customize """ + def set_authentication_policy(credential, kwargs): if isinstance(credential, TranslatorCredential): if not kwargs.get("authentication_policy"): @@ -35,10 +32,14 @@ def set_authentication_policy(credential, kwargs): elif isinstance(credential, AzureKeyCredential): if not kwargs.get("authentication_policy"): kwargs["authentication_policy"] = AzureKeyCredentialPolicy( - name="Ocp-Apim-Subscription-Key", credential=credential) + name="Ocp-Apim-Subscription-Key", credential=credential + ) elif hasattr(credential, "get_token"): if not kwargs.get("authentication_policy"): - kwargs["authentication_policy"] = AsyncBearerTokenCredentialPolicy(credential, *kwargs.pop("credential_scopes", [DEFAULT_TOKEN_SCOPE]), kwargs) + kwargs["authentication_policy"] = AsyncBearerTokenCredentialPolicy( + credential, *kwargs.pop("credential_scopes", [DEFAULT_TOKEN_SCOPE]), kwargs + ) + class TextTranslationClient(ServiceClientGenerated): """Text translation is a cloud-based REST API feature of the Translator service that uses neural @@ -81,23 +82,21 @@ class TextTranslationClient(ServiceClientGenerated): result in unsupported behavior. :paramtype api_version: str """ + def __init__( - self, - credential: Union[AzureKeyCredential , AsyncTokenCredential , TranslatorCredential], - *, - endpoint: Optional[str] = None, - api_version = "3.0", - **kwargs): + self, + credential: Union[AzureKeyCredential, AsyncTokenCredential, TranslatorCredential], + *, + endpoint: Optional[str] = None, + api_version="3.0", + **kwargs + ): set_authentication_policy(credential, kwargs) translation_endpoint = get_translation_endpoint(endpoint, api_version) - super().__init__( - endpoint=translation_endpoint, - api_version=api_version, - **kwargs - ) + super().__init__(endpoint=translation_endpoint, api_version=api_version, **kwargs) __all__ = ["TextTranslationClient"] diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/models/__init__.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/models/__init__.py index 3f7702d816817..45ccc46b08481 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/models/__init__.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/models/__init__.py @@ -29,12 +29,7 @@ from ._models import TranslationLanguage from ._models import TransliterableScript from ._models import TransliteratedText -from ._models import Transliteration from ._models import TransliterationLanguage - -from ._enums import ProfanityAction -from ._enums import ProfanityMarker -from ._enums import TextType from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk @@ -63,11 +58,7 @@ "TranslationLanguage", "TransliterableScript", "TransliteratedText", - "Transliteration", "TransliterationLanguage", - "ProfanityAction", - "ProfanityMarker", - "TextType", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk() diff --git a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/models/_models.py b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/models/_models.py index 98ad4345885cb..b892544249165 100644 --- a/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/models/_models.py +++ b/sdk/translation/azure-ai-translation-text/azure/ai/translation/text/models/_models.py @@ -49,22 +49,26 @@ class BackTranslation(_model_base.Model): """ normalized_text: str = rest_field(name="normalizedText") - """A string giving the normalized form of the source term that is a back-translation of the target. -This value should be used as input to lookup examples. Required. """ + """A string giving the normalized form of the source term that is a back-translation of the + target. + This value should be used as input to lookup examples. Required.""" display_text: str = rest_field(name="displayText") """A string giving the source term that is a back-translation of the target in a form best -suited for end-user display. Required. """ + suited for end-user display. Required.""" num_examples: int = rest_field(name="numExamples") """An integer representing the number of examples that are available for this translation pair. -Actual examples must be retrieved with a separate call to lookup examples. The number is mostly -intended to facilitate display in a UX. For example, a user interface may add a hyperlink -to the back-translation if the number of examples is greater than zero and show the back-translation -as plain text if there are no examples. Note that the actual number of examples returned -by a call to lookup examples may be less than numExamples, because additional filtering may be -applied on the fly to remove \"bad\" examples. Required. """ + Actual examples must be retrieved with a separate call to lookup examples. The number is mostly + intended to facilitate display in a UX. For example, a user interface may add a hyperlink + to the back-translation if the number of examples is greater than zero and show the + back-translation + as plain text if there are no examples. Note that the actual number of examples returned + by a call to lookup examples may be less than numExamples, because additional filtering may be + applied on the fly to remove \"bad\" examples. Required.""" frequency_count: int = rest_field(name="frequencyCount") - """An integer representing the frequency of this translation pair in the data. The main purpose of this -field is to provide a user interface with a means to sort back-translations so the most frequent terms are first. Required. """ + """An integer representing the frequency of this translation pair in the data. The main purpose of + this + field is to provide a user interface with a means to sort back-translations so the most + frequent terms are first. Required.""" @overload def __init__( @@ -103,10 +107,12 @@ class BreakSentenceItem(_model_base.Model): """ detected_language: Optional["_models.DetectedLanguage"] = rest_field(name="detectedLanguage") - """The detectedLanguage property is only present in the result object when language auto-detection is requested. """ + """The detectedLanguage property is only present in the result object when language auto-detection + is requested.""" sent_len: List[int] = rest_field(name="sentLen") """An integer array representing the lengths of the sentences in the input text. -The length of the array is the number of sentences, and the values are the length of each sentence. Required. """ + The length of the array is the number of sentences, and the values are the length of each + sentence. Required.""" @overload def __init__( @@ -147,13 +153,14 @@ class CommonScriptModel(_model_base.Model): """ code: str = rest_field() - """Code identifying the script. Required. """ + """Code identifying the script. Required.""" name: str = rest_field() - """Display name of the script in the locale requested via Accept-Language header. Required. """ + """Display name of the script in the locale requested via Accept-Language header. Required.""" native_name: str = rest_field(name="nativeName") - """Display name of the language in the locale native for the language. Required. """ + """Display name of the language in the locale native for the language. Required.""" dir: str = rest_field() - """Directionality, which is rtl for right-to-left languages or ltr for left-to-right languages. Required. """ + """Directionality, which is rtl for right-to-left languages or ltr for left-to-right languages. + Required.""" @overload def __init__( @@ -190,10 +197,10 @@ class DetectedLanguage(_model_base.Model): """ language: str = rest_field() - """A string representing the code of the detected language. Required. """ + """A string representing the code of the detected language. Required.""" score: float = rest_field() """A float value indicating the confidence in the result. -The score is between zero and one and a low score indicates a low confidence. Required. """ + The score is between zero and one and a low score indicates a low confidence. Required.""" @overload def __init__( @@ -245,22 +252,22 @@ class DictionaryExample(_model_base.Model): source_prefix: str = rest_field(name="sourcePrefix") """The string to concatenate before the value of sourceTerm to form a complete example. -Do not add a space character, since it is already there when it should be. -This value may be an empty string. Required. """ + Do not add a space character, since it is already there when it should be. + This value may be an empty string. Required.""" source_term: str = rest_field(name="sourceTerm") """A string equal to the actual term looked up. The string is added with sourcePrefix -and sourceSuffix to form the complete example. Its value is separated so it can be -marked in a user interface, e.g., by bolding it. Required. """ + and sourceSuffix to form the complete example. Its value is separated so it can be + marked in a user interface, e.g., by bolding it. Required.""" source_suffix: str = rest_field(name="sourceSuffix") """The string to concatenate after the value of sourceTerm to form a complete example. -Do not add a space character, since it is already there when it should be. -This value may be an empty string. Required. """ + Do not add a space character, since it is already there when it should be. + This value may be an empty string. Required.""" target_prefix: str = rest_field(name="targetPrefix") - """A string similar to sourcePrefix but for the target. Required. """ + """A string similar to sourcePrefix but for the target. Required.""" target_term: str = rest_field(name="targetTerm") - """A string similar to sourceTerm but for the target. Required. """ + """A string similar to sourceTerm but for the target. Required.""" target_suffix: str = rest_field(name="targetSuffix") - """A string similar to sourceSuffix but for the target. Required. """ + """A string similar to sourceSuffix but for the target. Required.""" @overload def __init__( @@ -307,12 +314,13 @@ class DictionaryExampleItem(_model_base.Model): normalized_source: str = rest_field(name="normalizedSource") """A string giving the normalized form of the source term. Generally, this should be identical -to the value of the Text field at the matching list index in the body of the request. Required. """ + to the value of the Text field at the matching list index in the body of the request. Required.""" normalized_target: str = rest_field(name="normalizedTarget") """A string giving the normalized form of the target term. Generally, this should be identical -to the value of the Translation field at the matching list index in the body of the request. Required. """ + to the value of the Translation field at the matching list index in the body of the request. + Required.""" examples: List["_models.DictionaryExample"] = rest_field() - """A list of examples for the (source term, target term) pair. Required. """ + """A list of examples for the (source term, target term) pair. Required.""" @overload def __init__( @@ -345,7 +353,7 @@ class InputTextItem(_model_base.Model): """ text: str = rest_field() - """Text to translate. Required. """ + """Text to translate. Required.""" @overload def __init__( @@ -383,9 +391,11 @@ class DictionaryExampleTextItem(InputTextItem): """ translation: str = rest_field() - """A string specifying the translated text previously returned by the Dictionary lookup operation. -This should be the value from the normalizedTarget field in the translations list of the Dictionary -lookup response. The service will return examples for the specific source-target word-pair. Required. """ + """A string specifying the translated text previously returned by the Dictionary lookup operation. + This should be the value from the normalizedTarget field in the translations list of the + Dictionary + lookup response. The service will return examples for the specific source-target word-pair. + Required.""" @overload def __init__( @@ -427,14 +437,14 @@ class DictionaryLookupItem(_model_base.Model): normalized_source: str = rest_field(name="normalizedSource") """A string giving the normalized form of the source term. -For example, if the request is \"JOHN\", the normalized form will be \"john\". -The content of this field becomes the input to lookup examples. Required. """ + For example, if the request is \"JOHN\", the normalized form will be \"john\". + The content of this field becomes the input to lookup examples. Required.""" display_source: str = rest_field(name="displaySource") """A string giving the source term in a form best suited for end-user display. -For example, if the input is \"JOHN\", the display form will reflect the usual -spelling of the name: \"John\". Required. """ + For example, if the input is \"JOHN\", the display form will reflect the usual + spelling of the name: \"John\". Required.""" translations: List["_models.DictionaryTranslation"] = rest_field() - """A list of translations for the source term. Required. """ + """A list of translations for the source term. Required.""" @overload def __init__( @@ -496,29 +506,32 @@ class DictionaryTranslation(_model_base.Model): normalized_target: str = rest_field(name="normalizedTarget") """A string giving the normalized form of this term in the target language. -This value should be used as input to lookup examples. Required. """ + This value should be used as input to lookup examples. Required.""" display_target: str = rest_field(name="displayTarget") """A string giving the term in the target language and in a form best suited -for end-user display. Generally, this will only differ from the normalizedTarget -in terms of capitalization. For example, a proper noun like \"Juan\" will have -normalizedTarget = \"juan\" and displayTarget = \"Juan\". Required. """ + for end-user display. Generally, this will only differ from the normalizedTarget + in terms of capitalization. For example, a proper noun like \"Juan\" will have + normalizedTarget = \"juan\" and displayTarget = \"Juan\". Required.""" pos_tag: str = rest_field(name="posTag") - """A string associating this term with a part-of-speech tag. Required. """ + """A string associating this term with a part-of-speech tag. Required.""" confidence: float = rest_field() - """A value between 0.0 and 1.0 which represents the \"confidence\" -(or perhaps more accurately, \"probability in the training data\") of that translation pair. -The sum of confidence scores for one source word may or may not sum to 1.0. Required. """ + """A value between 0.0 and 1.0 which represents the \"confidence\" + (or perhaps more accurately, \"probability in the training data\") of that translation pair. + The sum of confidence scores for one source word may or may not sum to 1.0. Required.""" prefix_word: str = rest_field(name="prefixWord") """A string giving the word to display as a prefix of the translation. Currently, -this is the gendered determiner of nouns, in languages that have gendered determiners. -For example, the prefix of the Spanish word \"mosca\" is \"la\", since \"mosca\" is a feminine noun in Spanish. -This is only dependent on the translation, and not on the source. -If there is no prefix, it will be the empty string. Required. """ + this is the gendered determiner of nouns, in languages that have gendered determiners. + For example, the prefix of the Spanish word \"mosca\" is \"la\", since \"mosca\" is a feminine + noun in Spanish. + This is only dependent on the translation, and not on the source. + If there is no prefix, it will be the empty string. Required.""" back_translations: List["_models.BackTranslation"] = rest_field(name="backTranslations") - """A list of \"back translations\" of the target. For example, source words that the target can translate to. -The list is guaranteed to contain the source word that was requested (e.g., if the source word being -looked up is \"fly\", then it is guaranteed that \"fly\" will be in the backTranslations list). -However, it is not guaranteed to be in the first position, and often will not be. Required. """ + """A list of \"back translations\" of the target. For example, source words that the target can + translate to. + The list is guaranteed to contain the source word that was requested (e.g., if the source word + being + looked up is \"fly\", then it is guaranteed that \"fly\" will be in the backTranslations list). + However, it is not guaranteed to be in the first position, and often will not be. Required.""" @overload def __init__( @@ -556,9 +569,9 @@ class ErrorDetails(_model_base.Model): """ code: int = rest_field() - """Number identifier of the error. Required. """ + """Number identifier of the error. Required.""" message: str = rest_field() - """Human readable error description. Required. """ + """Human readable error description. Required.""" @overload def __init__( @@ -590,7 +603,7 @@ class ErrorResponse(_model_base.Model): """ error: "_models.ErrorDetails" = rest_field() - """Error details. Required. """ + """Error details. Required.""" @overload def __init__( @@ -623,11 +636,11 @@ class GetLanguagesResult(_model_base.Model): """ translation: Optional[Dict[str, "_models.TranslationLanguage"]] = rest_field() - """Languages that support translate API. """ + """Languages that support translate API.""" transliteration: Optional[Dict[str, "_models.TransliterationLanguage"]] = rest_field() - """Languages that support transliteration API. """ + """Languages that support transliteration API.""" dictionary: Optional[Dict[str, "_models.SourceDictionaryLanguage"]] = rest_field() - """Languages that support dictionary API. """ + """Languages that support dictionary API.""" @overload def __init__( @@ -668,11 +681,13 @@ class SentenceLength(_model_base.Model): """ src_sent_len: List[int] = rest_field(name="srcSentLen") - """An integer array representing the lengths of the sentences in the input text. -The length of the array is the number of sentences, and the values are the length of each sentence. Required. """ + """An integer array representing the lengths of the sentences in the input text. + The length of the array is the number of sentences, and the values are the length of each + sentence. Required.""" trans_sent_len: List[int] = rest_field(name="transSentLen") - """An integer array representing the lengths of the sentences in the translated text. -The length of the array is the number of sentences, and the values are the length of each sentence. Required. """ + """An integer array representing the lengths of the sentences in the translated text. + The length of the array is the number of sentences, and the values are the length of each + sentence. Required.""" @overload def __init__( @@ -714,13 +729,15 @@ class SourceDictionaryLanguage(_model_base.Model): """ name: str = rest_field() - """Display name of the language in the locale requested via Accept-Language header. Required. """ + """Display name of the language in the locale requested via Accept-Language header. Required.""" native_name: str = rest_field(name="nativeName") - """Display name of the language in the locale native for this language. Required. """ + """Display name of the language in the locale native for this language. Required.""" dir: str = rest_field() - """Directionality, which is rtl for right-to-left languages or ltr for left-to-right languages. Required. """ + """Directionality, which is rtl for right-to-left languages or ltr for left-to-right languages. + Required.""" translations: List["_models.TargetDictionaryLanguage"] = rest_field() - """List of languages with alterative translations and examples for the query expressed in the source language. Required. """ + """List of languages with alterative translations and examples for the query expressed in the + source language. Required.""" @overload def __init__( @@ -754,7 +771,7 @@ class SourceText(_model_base.Model): """ text: str = rest_field() - """Input text in the default script of the source language. Required. """ + """Input text in the default script of the source language. Required.""" @overload def __init__( @@ -794,13 +811,14 @@ class TargetDictionaryLanguage(_model_base.Model): """ name: str = rest_field() - """Display name of the language in the locale requested via Accept-Language header. Required. """ + """Display name of the language in the locale requested via Accept-Language header. Required.""" native_name: str = rest_field(name="nativeName") - """Display name of the language in the locale native for this language. Required. """ + """Display name of the language in the locale native for this language. Required.""" dir: str = rest_field() - """Directionality, which is rtl for right-to-left languages or ltr for left-to-right languages. Required. """ + """Directionality, which is rtl for right-to-left languages or ltr for left-to-right languages. + Required.""" code: str = rest_field() - """Language code identifying the target language. Required. """ + """Language code identifying the target language. Required.""" @overload def __init__( @@ -843,12 +861,15 @@ class TranslatedTextAlignment(_model_base.Model): """ proj: str = rest_field() - """Maps input text to translated text. The alignment information is only provided when the request -parameter includeAlignment is true. Alignment is returned as a string value of the following -format: [[SourceTextStartIndex]:[SourceTextEndIndex]–[TgtTextStartIndex]:[TgtTextEndIndex]]. -The colon separates start and end index, the dash separates the languages, and space separates the words. -One word may align with zero, one, or multiple words in the other language, and the aligned words may -be non-contiguous. When no alignment information is available, the alignment element will be empty. Required. """ + """Maps input text to translated text. The alignment information is only provided when the request + parameter includeAlignment is true. Alignment is returned as a string value of the following + format: [[SourceTextStartIndex]:[SourceTextEndIndex]–[TgtTextStartIndex]:[TgtTextEndIndex]]. + The colon separates start and end index, the dash separates the languages, and space separates + the words. + One word may align with zero, one, or multiple words in the other language, and the aligned + words may + be non-contiguous. When no alignment information is available, the alignment element will be + empty. Required.""" @overload def __init__( @@ -891,15 +912,18 @@ class TranslatedTextItem(_model_base.Model): """ detected_language: Optional["_models.DetectedLanguage"] = rest_field(name="detectedLanguage") - """The detectedLanguage property is only present in the result object when language auto-detection is requested. """ + """The detectedLanguage property is only present in the result object when language auto-detection + is requested.""" translations: List["_models.Translation"] = rest_field() - """An array of translation results. The size of the array matches the number of target -languages specified through the to query parameter. Required. """ + """An array of translation results. The size of the array matches the number of target + languages specified through the to query parameter. Required.""" source_text: Optional["_models.SourceText"] = rest_field(name="sourceText") - """Input text in the default script of the source language. sourceText property is present only when -the input is expressed in a script that's not the usual script for the language. For example, -if the input were Arabic written in Latin script, then sourceText.text would be the same Arabic text -converted into Arab script. """ + """Input text in the default script of the source language. sourceText property is present only + when + the input is expressed in a script that's not the usual script for the language. For example, + if the input were Arabic written in Latin script, then sourceText.text would be the same Arabic + text + converted into Arab script.""" @overload def __init__( @@ -933,7 +957,7 @@ class Translation(_model_base.Model): :vartype text: str :ivar transliteration: An object giving the translated text in the script specified by the toScript parameter. - :vartype transliteration: ~azure.ai.translation.text.models.Transliteration + :vartype transliteration: ~azure.ai.translation.text.models.TransliteratedText :ivar alignment: Alignment information. :vartype alignment: ~azure.ai.translation.text.models.TranslatedTextAlignment :ivar sent_len: Sentence boundaries in the input and output texts. @@ -941,15 +965,15 @@ class Translation(_model_base.Model): """ to: str = rest_field() - """A string representing the language code of the target language. Required. """ + """A string representing the language code of the target language. Required.""" text: str = rest_field() - """A string giving the translated text. Required. """ - transliteration: Optional["_models.Transliteration"] = rest_field() - """An object giving the translated text in the script specified by the toScript parameter. """ + """A string giving the translated text. Required.""" + transliteration: Optional["_models.TransliteratedText"] = rest_field() + """An object giving the translated text in the script specified by the toScript parameter.""" alignment: Optional["_models.TranslatedTextAlignment"] = rest_field() - """Alignment information. """ + """Alignment information.""" sent_len: Optional["_models.SentenceLength"] = rest_field(name="sentLen") - """Sentence boundaries in the input and output texts. """ + """Sentence boundaries in the input and output texts.""" @overload def __init__( @@ -957,7 +981,7 @@ def __init__( *, to: str, text: str, - transliteration: Optional["_models.Transliteration"] = None, + transliteration: Optional["_models.TransliteratedText"] = None, alignment: Optional["_models.TranslatedTextAlignment"] = None, sent_len: Optional["_models.SentenceLength"] = None, ): @@ -993,11 +1017,12 @@ class TranslationLanguage(_model_base.Model): """ name: str = rest_field() - """Display name of the language in the locale requested via Accept-Language header. Required. """ + """Display name of the language in the locale requested via Accept-Language header. Required.""" native_name: str = rest_field(name="nativeName") - """Display name of the language in the locale native for this language. Required. """ + """Display name of the language in the locale native for this language. Required.""" dir: str = rest_field() - """Directionality, which is rtl for right-to-left languages or ltr for left-to-right languages. Required. """ + """Directionality, which is rtl for right-to-left languages or ltr for left-to-right languages. + Required.""" @overload def __init__( @@ -1041,7 +1066,7 @@ class TransliterableScript(CommonScriptModel): """ to_scripts: List["_models.CommonScriptModel"] = rest_field(name="toScripts") - """List of scripts available to convert text to. Required. """ + """List of scripts available to convert text to. Required.""" @overload def __init__( @@ -1079,9 +1104,9 @@ class TransliteratedText(_model_base.Model): """ text: str = rest_field() - """A string which is the result of converting the input string to the output script. Required. """ + """A string which is the result of converting the input string to the output script. Required.""" script: str = rest_field() - """A string specifying the script used in the output. Required. """ + """A string specifying the script used in the output. Required.""" @overload def __init__( @@ -1103,42 +1128,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useles super().__init__(*args, **kwargs) -class Transliteration(_model_base.Model): - """An object giving the translated text in the script specified by the toScript parameter. - - All required parameters must be populated in order to send to Azure. - - :ivar script: A string specifying the target script. Required. - :vartype script: str - :ivar text: A string giving the translated text in the target script. Required. - :vartype text: str - """ - - script: str = rest_field() - """A string specifying the target script. Required. """ - text: str = rest_field() - """A string giving the translated text in the target script. Required. """ - - @overload - def __init__( - self, - *, - script: str, - text: str, - ): - ... - - @overload - def __init__(self, mapping: Mapping[str, Any]): - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation - super().__init__(*args, **kwargs) - - class TransliterationLanguage(_model_base.Model): """The value of the transliteration property is a dictionary of (key, value) pairs. Each key is a BCP 47 language tag. A key identifies a language for which text can be converted @@ -1158,11 +1147,11 @@ class TransliterationLanguage(_model_base.Model): """ name: str = rest_field() - """Display name of the language in the locale requested via Accept-Language header. Required. """ + """Display name of the language in the locale requested via Accept-Language header. Required.""" native_name: str = rest_field(name="nativeName") - """Display name of the language in the locale native for this language. Required. """ + """Display name of the language in the locale native for this language. Required.""" scripts: List["_models.TransliterableScript"] = rest_field() - """List of scripts to convert from. Required. """ + """List of scripts to convert from. Required.""" @overload def __init__( diff --git a/sdk/translation/azure-ai-translation-text/setup.py b/sdk/translation/azure-ai-translation-text/setup.py index 82d4af22803cc..6ef717edc5613 100644 --- a/sdk/translation/azure-ai-translation-text/setup.py +++ b/sdk/translation/azure-ai-translation-text/setup.py @@ -1,75 +1,47 @@ -#!/usr/bin/env python - -#------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -#-------------------------------------------------------------------------- - -from io import open -import os +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# coding: utf-8 from setuptools import setup, find_packages -import re PACKAGE_NAME = "azure-ai-translation-text" -PACKAGE_PPRINT_NAME = "Text Translation" - -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace('-', '/') -# a-b-c => a.b.c -namespace_name = PACKAGE_NAME.replace('-', '.') - -# Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, '_version.py'), 'r') as fd: - version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', - fd.read(), re.MULTILINE).group(1) -if not version: - raise RuntimeError('Cannot find version information') - -with open('README.md', encoding='utf-8') as f: - readme = f.read() -with open('CHANGELOG.md', encoding='utf-8') as f: - changelog = f.read() - +version = "1.0.0b1" setup( name=PACKAGE_NAME, version=version, - description="Azure Text Translation Client Library for Python", - author_email='azpysdkhelp@microsoft.com', - url='https://github.com/Azure/azure-sdk-for-python', - long_description=readme + '\n\n' + changelog, - long_description_content_type='text/markdown', - license='MIT License', - author='Microsoft Corporation', + description="azure-ai-translation-text", + author_email="", + url="", keywords="azure, azure sdk", - classifiers=[ - "Development Status :: 4 - Beta", - 'Programming Language :: Python', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'License :: OSI Approved :: MIT License', - ], - zip_safe=False, - packages=find_packages(exclude=[ - # Exclude packages that will be covered by PEP420 or nspkg - 'azure', - 'azure.ai', - 'azure.ai.translation' - ]), + packages=find_packages(), include_package_data=True, - package_data={ - 'azure.ai.translation.text': ['py.typed'], - }, install_requires=[ - "msrest>=0.7.1", + "isodate<1.0.0,>=0.6.1", "azure-core<2.0.0,>=1.24.0", - "typing-extensions>=4.3.0", + "typing-extensions>=4.3.0; python_version<'3.8.0'", ], - python_requires=">=3.7" + long_description="""\ + Text translation is a cloud-based REST API feature of the Translator service that uses neural +machine translation technology to enable quick and accurate source-to-target text translation +in real time across all supported languages. + +The following methods are supported by the Text Translation feature: + +Languages. Returns a list of languages supported by Translate, Transliterate, and Dictionary Lookup operations. + +Translate. Renders single source-language text to multiple target-language texts with a single request. + +Transliterate. Converts characters or letters of a source language to the corresponding characters or letters of a target language. + +Detect. Returns the source code language code and a boolean variable denoting whether the detected language is supported for text translation and transliteration. + +Dictionary lookup. Returns equivalent words for the source term in the target language. + +Dictionary example Returns grammatical structure and context examples for the source term and target term pair. + """, ) diff --git a/sdk/translation/azure-ai-translation-text/tests/conftest.py b/sdk/translation/azure-ai-translation-text/tests/conftest.py index cd8fc782df186..5b96a56d988d5 100644 --- a/sdk/translation/azure-ai-translation-text/tests/conftest.py +++ b/sdk/translation/azure-ai-translation-text/tests/conftest.py @@ -1,13 +1,11 @@ import pytest -from devtools_testutils import ( - test_proxy, - add_remove_header_sanitizer -) +from devtools_testutils import test_proxy, add_remove_header_sanitizer # autouse=True will trigger this fixture on each pytest run, even if it's not explicitly used by a test method -#def start_proxy(test_proxy): - # return +# def start_proxy(test_proxy): +# return + @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): - add_remove_header_sanitizer(headers="Ocp-Apim-Subscription-Key") \ No newline at end of file + add_remove_header_sanitizer(headers="Ocp-Apim-Subscription-Key") diff --git a/sdk/translation/azure-ai-translation-text/tests/preparer.py b/sdk/translation/azure-ai-translation-text/tests/preparer.py index 9eb9a58abd4b4..267e45f8486dd 100644 --- a/sdk/translation/azure-ai-translation-text/tests/preparer.py +++ b/sdk/translation/azure-ai-translation-text/tests/preparer.py @@ -8,9 +8,9 @@ TextTranslationPreparer = functools.partial( EnvironmentVariableLoader, - 'text_translation', + "text_translation", text_translation_endpoint="https://fakeEndpoint.cognitive.microsofttranslator.com", text_translation_custom_endpoint="https://fakeCustomEndpoint.cognitiveservices.azure.com", text_translation_apikey="fakeapikey", text_translation_region="fakeregion", -) \ No newline at end of file +) diff --git a/sdk/translation/azure-ai-translation-text/tests/static_access_token_credential.py b/sdk/translation/azure-ai-translation-text/tests/static_access_token_credential.py index d3c9d8612b867..bccb4a7c18b25 100644 --- a/sdk/translation/azure-ai-translation-text/tests/static_access_token_credential.py +++ b/sdk/translation/azure-ai-translation-text/tests/static_access_token_credential.py @@ -7,15 +7,17 @@ import requests from azure.core.credentials import AccessToken + class StaticAccessTokenCredential(object): request_url: str def __init__(self, apikey, region): self.request_url = "https://{0}.api.cognitive.microsoft.com/sts/v1.0/issueToken?Subscription-Key={1}".format( - region, apikey) + region, apikey + ) def get_token(self, *scopes, **kwargs): response = requests.post(self.request_url) - access_token = response.content.decode('UTF-8') + access_token = response.content.decode("UTF-8") expires_on = datetime.datetime.now() + datetime.timedelta(days=1) return AccessToken(access_token, expires_on) diff --git a/sdk/translation/azure-ai-translation-text/tests/test_break_sentence.py b/sdk/translation/azure-ai-translation-text/tests/test_break_sentence.py index e434667fea7f2..b421cc814e045 100644 --- a/sdk/translation/azure-ai-translation-text/tests/test_break_sentence.py +++ b/sdk/translation/azure-ai-translation-text/tests/test_break_sentence.py @@ -8,8 +8,8 @@ from preparer import TextTranslationPreparer from testcase import TextTranslationTest -class TestBreakSentence(TextTranslationTest): +class TestBreakSentence(TextTranslationTest): @TextTranslationPreparer() @recorded_by_proxy def test_autodetect(self, **kwargs): @@ -33,11 +33,13 @@ def test_with_language(self, **kwargs): region = kwargs.get("text_translation_region") client = self.create_client(endpoint, apikey, region) - input_text_elements = [InputTextItem( - text="รวบรวมแผ่นคำตอบ ระยะเวลาของโครงการ วิธีเลือกชายในฝัน หมายเลขซีเรียลของระเบียน วันที่สิ้นสุดของโครงการเมื่อเสร็จสมบูรณ์ ปีที่มีการรวบรวม ทุกคนมีวัฒนธรรมและวิธีคิดเหมือนกัน ได้รับโทษจำคุกตลอดชีวิตใน ฉันลดได้ถึง 55 ปอนด์ได้อย่างไร ฉันคิดว่าใครๆ ก็ต้องการกำหนดเมนูอาหารส่วนบุคคล")] + input_text_elements = [ + InputTextItem( + text="รวบรวมแผ่นคำตอบ ระยะเวลาของโครงการ วิธีเลือกชายในฝัน หมายเลขซีเรียลของระเบียน วันที่สิ้นสุดของโครงการเมื่อเสร็จสมบูรณ์ ปีที่มีการรวบรวม ทุกคนมีวัฒนธรรมและวิธีคิดเหมือนกัน ได้รับโทษจำคุกตลอดชีวิตใน ฉันลดได้ถึง 55 ปอนด์ได้อย่างไร ฉันคิดว่าใครๆ ก็ต้องการกำหนดเมนูอาหารส่วนบุคคล" + ) + ] - response = client.find_sentence_boundaries( - content = input_text_elements, language="th") + response = client.find_sentence_boundaries(content=input_text_elements, language="th") assert response is not None expected_lengths = [78, 41, 110, 46] for i, expected_length in enumerate(expected_lengths): @@ -53,8 +55,7 @@ def test_with_language_script(self, **kwargs): input_text_elements = [InputTextItem(text="zhè shì gè cè shì。")] - response = client.find_sentence_boundaries( - content=input_text_elements, language="zh-Hans", script="Latn") + response = client.find_sentence_boundaries(content=input_text_elements, language="zh-Hans", script="Latn") assert response is not None assert response[0].sent_len[0] == 18 @@ -66,8 +67,10 @@ def test_with_multiple_languages(self, **kwargs): region = kwargs.get("text_translation_region") client = self.create_client(endpoint, apikey, region) - input_text_elements = [InputTextItem(text="hello world"), InputTextItem( - text="العالم هو مكان مثير جدا للاهتمام")] + input_text_elements = [ + InputTextItem(text="hello world"), + InputTextItem(text="العالم هو مكان مثير جدا للاهتمام"), + ] response = client.find_sentence_boundaries(content=input_text_elements) assert response is not None diff --git a/sdk/translation/azure-ai-translation-text/tests/test_dictionary_examples.py b/sdk/translation/azure-ai-translation-text/tests/test_dictionary_examples.py index 9b232f07b67b9..9e119822b90ea 100644 --- a/sdk/translation/azure-ai-translation-text/tests/test_dictionary_examples.py +++ b/sdk/translation/azure-ai-translation-text/tests/test_dictionary_examples.py @@ -10,7 +10,6 @@ class TestDictionaryExamples(TextTranslationTest): - @TextTranslationPreparer() @recorded_by_proxy def test_single_input_element(self, **kwargs): @@ -21,13 +20,11 @@ def test_single_input_element(self, **kwargs): source_language = "en" target_language = "es" - input_text_elements = [DictionaryExampleTextItem( - text="fly", translation="volar")] + input_text_elements = [DictionaryExampleTextItem(text="fly", translation="volar")] response = client.lookup_dictionary_examples( - content=input_text_elements, - from_parameter=source_language, - to=target_language) + content=input_text_elements, from_parameter=source_language, to=target_language + ) assert response is not None assert response[0].normalized_source == "fly" assert response[0].normalized_target == "volar" @@ -42,14 +39,14 @@ def test_multiple_input_elements(self, **kwargs): source_language = "en" target_language = "es" - input_text_elements = [DictionaryExampleTextItem( - text="fly", translation="volar"), - DictionaryExampleTextItem(text="beef", translation="came")] + input_text_elements = [ + DictionaryExampleTextItem(text="fly", translation="volar"), + DictionaryExampleTextItem(text="beef", translation="came"), + ] response = client.lookup_dictionary_examples( - content=input_text_elements, - from_parameter=source_language, - to=target_language) + content=input_text_elements, from_parameter=source_language, to=target_language + ) assert response is not None assert len(response) == 2 assert response[0].normalized_source == "fly" diff --git a/sdk/translation/azure-ai-translation-text/tests/test_dictionary_lookup.py b/sdk/translation/azure-ai-translation-text/tests/test_dictionary_lookup.py index 86aed86e4782b..c5699908f95b1 100644 --- a/sdk/translation/azure-ai-translation-text/tests/test_dictionary_lookup.py +++ b/sdk/translation/azure-ai-translation-text/tests/test_dictionary_lookup.py @@ -8,8 +8,8 @@ from preparer import TextTranslationPreparer from testcase import TextTranslationTest -class TestDictionaryLookup(TextTranslationTest): +class TestDictionaryLookup(TextTranslationTest): @TextTranslationPreparer() @recorded_by_proxy def test_single_input_element(self, **kwargs): @@ -23,7 +23,8 @@ def test_single_input_element(self, **kwargs): input_text_elements = [InputTextItem(text="fly")] response = client.lookup_dictionary_entries( - content=input_text_elements, from_parameter=source_language, to=target_language) + content=input_text_elements, from_parameter=source_language, to=target_language + ) assert response is not None assert response[0].normalized_source == "fly" assert response[0].display_source == "fly" @@ -38,14 +39,14 @@ def test_multiple_input_elements(self, **kwargs): source_language = "en" target_language = "es" - input_text_elements = [InputTextItem( - text="fly"), InputTextItem(text="fox")] + input_text_elements = [InputTextItem(text="fly"), InputTextItem(text="fox")] response = client.lookup_dictionary_entries( - content=input_text_elements, from_parameter=source_language, to=target_language) + content=input_text_elements, from_parameter=source_language, to=target_language + ) assert response is not None assert len(response) == 2 assert response[0].normalized_source == "fly" assert response[0].display_source == "fly" assert response[1].normalized_source == "fox" - assert response[1].display_source == "fox" \ No newline at end of file + assert response[1].display_source == "fox" diff --git a/sdk/translation/azure-ai-translation-text/tests/test_get_languages.py b/sdk/translation/azure-ai-translation-text/tests/test_get_languages.py index 37fa83e033084..72a6436744ddd 100644 --- a/sdk/translation/azure-ai-translation-text/tests/test_get_languages.py +++ b/sdk/translation/azure-ai-translation-text/tests/test_get_languages.py @@ -9,7 +9,6 @@ class TestGetLanguages(TextTranslationTest): - @TextTranslationPreparer() @recorded_by_proxy def test_all_scopes(self, **kwargs): diff --git a/sdk/translation/azure-ai-translation-text/tests/test_helper.py b/sdk/translation/azure-ai-translation-text/tests/test_helper.py index b70bf9f41e95a..f612b4b8eca06 100644 --- a/sdk/translation/azure-ai-translation-text/tests/test_helper.py +++ b/sdk/translation/azure-ai-translation-text/tests/test_helper.py @@ -3,7 +3,8 @@ # Licensed under the MIT License. # ------------------------------------ -class TestHelper(): + +class TestHelper: def distance(self, s1, s2, n1, n2): if n1 == 0: return n2 @@ -14,9 +15,11 @@ def distance(self, s1, s2, n1, n2): if s1[n1 - 1] == s2[n2 - 1]: return self.distance(s1, s2, n1 - 1, n2 - 1) - nums = [self.distance(s1, s2, n1, n2 - 1), - self.distance(s1, s2, n1 - 1, n2), - self.distance(s1, s2, n1 - 1, n2 - 1)] + nums = [ + self.distance(s1, s2, n1, n2 - 1), + self.distance(s1, s2, n1 - 1, n2), + self.distance(s1, s2, n1 - 1, n2 - 1), + ] return 1 + min(nums) def edit_distance(self, s1, s2): diff --git a/sdk/translation/azure-ai-translation-text/tests/test_translation.py b/sdk/translation/azure-ai-translation-text/tests/test_translation.py index 10acbbfc4f2b9..42e59aeeb7f02 100644 --- a/sdk/translation/azure-ai-translation-text/tests/test_translation.py +++ b/sdk/translation/azure-ai-translation-text/tests/test_translation.py @@ -11,7 +11,6 @@ class TestTranslation(TextTranslationTest): - @TextTranslationPreparer() @recorded_by_proxy def test_translate(self, **kwargs): @@ -23,8 +22,7 @@ def test_translate(self, **kwargs): source_language = "es" target_languages = ["cs"] input_text_elements = [InputTextItem(text="Hola mundo")] - response = client.translate( - content=input_text_elements, to=target_languages, from_parameter=source_language) + response = client.translate(content=input_text_elements, to=target_languages, from_parameter=source_language) assert len(response) == 1 assert len(response[0].translations) == 1 @@ -41,8 +39,7 @@ def test_autodetect(self, **kwargs): target_languages = ["cs"] input_text_elements = [InputTextItem(text="This is a test.")] - response = client.translate( - content=input_text_elements, to=target_languages) + response = client.translate(content=input_text_elements, to=target_languages) assert len(response) == 1 assert len(response[0].translations) == 1 @@ -61,12 +58,10 @@ def test_no_translate_tag(self, **kwargs): source_language = "zh-chs" target_languages = ["en"] - input_text_elements = [InputTextItem( - text="今天是怎么回事是非常可怕的")] - response = client.translate(content=input_text_elements, - to=target_languages, - from_parameter=source_language, - text_type=TextType.HTML) + input_text_elements = [InputTextItem(text="今天是怎么回事是非常可怕的")] + response = client.translate( + content=input_text_elements, to=target_languages, from_parameter=source_language, text_type=TextType.HTML + ) assert len(response) == 1 assert len(response[0].translations) == 1 @@ -82,12 +77,12 @@ def test_dictionary_tag(self, **kwargs): source_language = "en" target_languages = ["es"] - input_text_elements = [InputTextItem( - text="The word < mstrans:dictionary translation =\"wordomatic\">wordomatic is a dictionary entry.")] - response = client.translate( - content=input_text_elements, - to=target_languages, - from_parameter=source_language) + input_text_elements = [ + InputTextItem( + text='The word < mstrans:dictionary translation ="wordomatic">wordomatic is a dictionary entry.' + ) + ] + response = client.translate(content=input_text_elements, to=target_languages, from_parameter=source_language) assert len(response) == 1 assert len(response[0].translations) == 1 @@ -105,11 +100,13 @@ def test_transliteration(self, **kwargs): source_language = "ar" target_languages = ["zh-Hans"] input_text_elements = [InputTextItem(text="hudha akhtabar.")] - response = client.translate(content=input_text_elements, - to=target_languages, - from_parameter=source_language, - from_script="Latn", - to_script="Latn") + response = client.translate( + content=input_text_elements, + to=target_languages, + from_parameter=source_language, + from_script="Latn", + to_script="Latn", + ) assert len(response) == 1 assert response[0].source_text is not None @@ -128,11 +125,13 @@ def test_from_to_latin(self, **kwargs): source_language = "hi" target_languages = ["ta"] input_text_elements = [InputTextItem(text="ap kaise ho")] - response = client.translate(content=input_text_elements, - to=target_languages, - from_parameter=source_language, - from_script="Latn", - to_script="Latn") + response = client.translate( + content=input_text_elements, + to=target_languages, + from_parameter=source_language, + from_script="Latn", + to_script="Latn", + ) assert len(response) == 1 assert len(response[0].translations) == 1 @@ -148,10 +147,12 @@ def test_multiple_input(self, **kwargs): client = self.create_client(endpoint, apikey, region) target_languages = ["cs"] - input_text_elements = [InputTextItem(text="This is a test."), InputTextItem( - text="Esto es una prueba."), InputTextItem(text="Dies ist ein Test.")] - response = client.translate( - content=input_text_elements, to=target_languages) + input_text_elements = [ + InputTextItem(text="This is a test."), + InputTextItem(text="Esto es una prueba."), + InputTextItem(text="Dies ist ein Test."), + ] + response = client.translate(content=input_text_elements, to=target_languages) assert len(response) == 3 assert response[0].detected_language.language == "en" @@ -175,8 +176,7 @@ def test_multiple_target_languages(self, **kwargs): target_languages = ["cs", "es", "de"] input_text_elements = [InputTextItem(text="This is a test.")] - response = client.translate( - content=input_text_elements, to=target_languages) + response = client.translate(content=input_text_elements, to=target_languages) assert len(response) == 1 assert len(response[0].translations) == 3 @@ -195,10 +195,8 @@ def test_different_texttypes(self, **kwargs): client = self.create_client(endpoint, apikey, region) target_languages = ["cs"] - input_text_elements = [InputTextItem( - text="This is a test.")] - response = client.translate( - content=input_text_elements, to=target_languages, text_type=TextType.HTML) + input_text_elements = [InputTextItem(text="This is a test.")] + response = client.translate(content=input_text_elements, to=target_languages, text_type=TextType.HTML) assert len(response) == 1 assert len(response[0].translations) == 1 @@ -214,12 +212,13 @@ def test_profanity(self, **kwargs): client = self.create_client(endpoint, apikey, region) target_languages = ["zh-cn"] - input_text_elements = [InputTextItem( - text="shit this is fucking crazy")] - response = client.translate(content=input_text_elements, - to=target_languages, - profanity_action=ProfanityAction.MARKED, - profanity_marker=ProfanityMarker.ASTERISK) + input_text_elements = [InputTextItem(text="shit this is fucking crazy")] + response = client.translate( + content=input_text_elements, + to=target_languages, + profanity_action=ProfanityAction.MARKED, + profanity_marker=ProfanityMarker.ASTERISK, + ) assert len(response) == 1 assert len(response[0].translations) == 1 @@ -237,8 +236,7 @@ def test_alignment(self, **kwargs): target_languages = ["cs"] input_text_elements = [InputTextItem(text="It is a beautiful morning")] - response = client.translate( - content=input_text_elements, to=target_languages, include_alignment=True) + response = client.translate(content=input_text_elements, to=target_languages, include_alignment=True) assert len(response) == 1 assert len(response[0].translations) == 1 @@ -255,12 +253,12 @@ def test_sentence_length(self, **kwargs): client = self.create_client(endpoint, apikey, region) target_languages = ["fr"] - input_text_elements = [InputTextItem( - text="La réponse se trouve dans la traduction automatique. La meilleure technologie de traduction automatique ne peut pas toujours fournir des traductions adaptées à un site ou des utilisateurs comme un être humain. Il suffit de copier et coller un extrait de code n'importe où.")] - response = client.translate( - content=input_text_elements, - to=target_languages, - include_sentence_length=True) + input_text_elements = [ + InputTextItem( + text="La réponse se trouve dans la traduction automatique. La meilleure technologie de traduction automatique ne peut pas toujours fournir des traductions adaptées à un site ou des utilisateurs comme un être humain. Il suffit de copier et coller un extrait de code n'importe où." + ) + ] + response = client.translate(content=input_text_elements, to=target_languages, include_sentence_length=True) assert len(response) == 1 assert len(response[0].translations) == 1 @@ -279,9 +277,7 @@ def test_custom_endpoint(self, **kwargs): target_languages = ["fr"] input_text_elements = [InputTextItem(text="It is a beautiful morning")] - response = client.translate( - content=input_text_elements, - to=target_languages) + response = client.translate(content=input_text_elements, to=target_languages) assert len(response) == 1 assert len(response[0].translations) == 1 @@ -299,8 +295,7 @@ def test_token(self, **kwargs): target_languages = ["cs"] input_text_elements = [InputTextItem(text="This is a test.")] - response = client.translate( - content=input_text_elements, to=target_languages) + response = client.translate(content=input_text_elements, to=target_languages) assert len(response) == 1 assert len(response[0].translations) == 1 diff --git a/sdk/translation/azure-ai-translation-text/tests/test_transliteration.py b/sdk/translation/azure-ai-translation-text/tests/test_transliteration.py index af2756041abba..47c3b579cea68 100644 --- a/sdk/translation/azure-ai-translation-text/tests/test_transliteration.py +++ b/sdk/translation/azure-ai-translation-text/tests/test_transliteration.py @@ -11,7 +11,6 @@ class TestTransliteration(TextTranslationTest, TestHelper): - @TextTranslationPreparer() @recorded_by_proxy def test_transliteration(self, **kwargs): @@ -22,7 +21,8 @@ def test_transliteration(self, **kwargs): input_text_elements = [InputTextItem(text="这里怎么一回事?")] response = client.transliterate( - content=input_text_elements, language="zh-Hans", from_script="Hans", to_script="Latn") + content=input_text_elements, language="zh-Hans", from_script="Hans", to_script="Latn" + ) assert response is not None assert response[0].text is not None @@ -35,10 +35,10 @@ def test_multiple_inputs(self, **kwargs): region = kwargs.get("text_translation_region") client = self.create_client(endpoint, apikey, region) - input_text_elements = [InputTextItem( - text="यहएककसौटीहैयहएककसौटीहै"), InputTextItem(text="यहएककसौटीहै")] + input_text_elements = [InputTextItem(text="यहएककसौटीहैयहएककसौटीहै"), InputTextItem(text="यहएककसौटीहै")] response = client.transliterate( - content=input_text_elements, language="hi", from_script="Deva", to_script="Latn") + content=input_text_elements, language="hi", from_script="Deva", to_script="Latn" + ) assert response is not None assert response[0].text is not None @@ -52,10 +52,14 @@ def test_edit_distance(self, **kwargs): region = kwargs.get("text_translation_region") client = self.create_client(endpoint, apikey, region) - input_text_elements = [InputTextItem(text="gujarat"), InputTextItem( - text="hadman"), InputTextItem(text="hukkabar")] + input_text_elements = [ + InputTextItem(text="gujarat"), + InputTextItem(text="hadman"), + InputTextItem(text="hukkabar"), + ] response = client.transliterate( - content=input_text_elements, language="gu", from_script="Latn", to_script="Gujr") + content=input_text_elements, language="gu", from_script="Latn", to_script="Gujr" + ) assert response is not None assert response[0].text is not None @@ -65,6 +69,5 @@ def test_edit_distance(self, **kwargs): expected_texts = ["ગુજરાત", "હદમાં", "હુક્કાબાર"] edit_distance_value = 0 for i, expected_text in enumerate(expected_texts): - edit_distance_value = edit_distance_value + \ - self.edit_distance(expected_text, response[i].text) + edit_distance_value = edit_distance_value + self.edit_distance(expected_text, response[i].text) assert edit_distance_value < 6 diff --git a/sdk/translation/azure-ai-translation-text/tests/testcase.py b/sdk/translation/azure-ai-translation-text/tests/testcase.py index a7a448e74855c..ac6a481f5a9e9 100644 --- a/sdk/translation/azure-ai-translation-text/tests/testcase.py +++ b/sdk/translation/azure-ai-translation-text/tests/testcase.py @@ -16,12 +16,10 @@ def create_getlanguage_client(self, endpoint): def create_client(self, endpoint, apikey, region): credential = TranslatorCredential(apikey, region) - client = TextTranslationClient( - endpoint=endpoint, credential=credential) + client = TextTranslationClient(endpoint=endpoint, credential=credential) return client def create_client_token(self, endpoint, apikey, region): credential = StaticAccessTokenCredential(apikey, region) - client = TextTranslationClient( - endpoint=endpoint, credential=credential) + client = TextTranslationClient(endpoint=endpoint, credential=credential) return client diff --git a/sdk/translation/azure-ai-translation-text/tsp-location.yaml b/sdk/translation/azure-ai-translation-text/tsp-location.yaml new file mode 100644 index 0000000000000..08f6ef0bf6bd2 --- /dev/null +++ b/sdk/translation/azure-ai-translation-text/tsp-location.yaml @@ -0,0 +1,4 @@ +cleanup: false +commit: aa1f08c2bfb9bef613f963aaf971a25558151330 +directory: specification/translation/Azure.AI.TextTranslation +repo: Azure/azure-rest-api-specs