From 46f49fd1ab2a87908cc4e9793163e306482325d3 Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Thu, 20 Feb 2020 13:47:09 -0800 Subject: [PATCH 1/5] uses core Odata exception --- .../ai/textanalytics/_response_handlers.py | 46 ++++++------------- .../tests/test_batch.py | 16 +++---- .../tests/test_batch_async.py | 16 +++---- 3 files changed, 30 insertions(+), 48 deletions(-) diff --git a/sdk/textanalytics/azure-ai-textanalytics/azure/ai/textanalytics/_response_handlers.py b/sdk/textanalytics/azure-ai-textanalytics/azure/ai/textanalytics/_response_handlers.py index edee58f006ae1..c8ec6d80272a3 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/azure/ai/textanalytics/_response_handlers.py +++ b/sdk/textanalytics/azure-ai-textanalytics/azure/ai/textanalytics/_response_handlers.py @@ -5,11 +5,10 @@ # ------------------------------------ import json -from azure.core.pipeline.policies import ContentDecodePolicy from azure.core.exceptions import ( HttpResponseError, ClientAuthenticationError, - DecodeError, + ODataV4Format ) from ._models import ( RecognizeEntitiesResult, @@ -30,41 +29,24 @@ ) +class CSODataV4Format(ODataV4Format): + INNERERROR_LABEL = "innerError" # Service plans to fix casing ("innererror") to reflect ODataV4 error spec + + def __init__(self, odata_error): + try: + if odata_error["error"]["innerError"]: + super(CSODataV4Format, self).__init__(odata_error["error"]["innerError"]) + except KeyError: + super(CSODataV4Format, self).__init__(odata_error) + + def process_batch_error(error): - """Raise detailed error message for HttpResponseErrors + """Raise detailed error message. """ raise_error = HttpResponseError if error.status_code == 401: raise_error = ClientAuthenticationError - error_message = error.message - error_code = error.status_code - error_body, error_target = None, None - - try: - error_body = ContentDecodePolicy.deserialize_from_http_generics(error.response) - except DecodeError: - pass - - try: - if error_body is not None: - error_resp = error_body["error"] - if "innerError" in error_resp: - error_resp = error_resp["innerError"] - - error_message = error_resp["message"] - error_code = error_resp["code"] - error_target = error_resp.get("target", None) - if error_target: - error_message += "\nErrorCode:{}\nTarget:{}".format(error_code, error_target) - else: - error_message += "\nErrorCode:{}".format(error_code) - except KeyError: - raise HttpResponseError(message="There was an unknown error with the request.") - - error = raise_error(message=error_message, response=error.response) - error.error_code = error_code - error.target = error_target - raise error + raise raise_error(response=error.response, error_format=CSODataV4Format) def order_results(response, combined): diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_batch.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_batch.py index bda0457138b45..6c6a6f0a150d4 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_batch.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_batch.py @@ -972,8 +972,8 @@ def test_text_analytics_error(self, resource_group, location, text_analytics_acc try: result = text_analytics.analyze_sentiment(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) # DocumentErrors doc_errors = text_analytics.analyze_sentiment(docs) @@ -989,8 +989,8 @@ def test_text_analytics_error(self, resource_group, location, text_analytics_acc try: result = text_analytics.analyze_sentiment(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) # Duplicate Ids docs = [{"id": "1", "text": "hello world"}, @@ -998,16 +998,16 @@ def test_text_analytics_error(self, resource_group, location, text_analytics_acc try: result = text_analytics.analyze_sentiment(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) # Batch size over limit docs = [u"hello world"] * 1001 try: response = text_analytics.detect_language(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) # Service bug returns invalidDocument here. Uncomment after v3.0-preview.2 # docs = [{"id": "1", "country_hint": "United States", "text": "hello world"}] diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_batch_async.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_batch_async.py index f612a21619892..9c6358bb65463 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_batch_async.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_batch_async.py @@ -1056,8 +1056,8 @@ async def test_text_analytics_error_async(self, resource_group, location, text_a try: result = await text_analytics.analyze_sentiment(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) # DocumentErrors doc_errors = await text_analytics.analyze_sentiment(docs) @@ -1073,8 +1073,8 @@ async def test_text_analytics_error_async(self, resource_group, location, text_a try: result = await text_analytics.analyze_sentiment(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) # Duplicate Ids docs = [{"id": "1", "text": "hello world"}, @@ -1083,16 +1083,16 @@ async def test_text_analytics_error_async(self, resource_group, location, text_a try: result = await text_analytics.analyze_sentiment(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) # Batch size over limit docs = [u"hello world"] * 1001 try: response = await text_analytics.detect_language(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) # Service bug returns invalidDocument here. Uncomment after v3.0-preview.2 # docs = [{"id": "1", "country_hint": "United States", "text": "hello world"}] From 0a51d6d3208309c51f123ff2c2832641499d613f Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Thu, 20 Feb 2020 13:50:51 -0800 Subject: [PATCH 2/5] update azure-core dependency --- sdk/textanalytics/azure-ai-textanalytics/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/textanalytics/azure-ai-textanalytics/setup.py b/sdk/textanalytics/azure-ai-textanalytics/setup.py index ed519982fac2e..46e906410c8c5 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/setup.py +++ b/sdk/textanalytics/azure-ai-textanalytics/setup.py @@ -78,7 +78,7 @@ 'azure.ai', ]), install_requires=[ - "azure-core<2.0.0,>=1.1.0", + "azure-core<2.0.0,>=1.2.3", "msrest>=0.6.0", 'azure-common~=1.1', 'six>=1.6', From dea070acedf1088aef1d5b897c246d870f703f3f Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Tue, 25 Feb 2020 15:44:37 -0800 Subject: [PATCH 3/5] update changelog --- sdk/textanalytics/azure-ai-textanalytics/CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sdk/textanalytics/azure-ai-textanalytics/CHANGELOG.md b/sdk/textanalytics/azure-ai-textanalytics/CHANGELOG.md index 99557d65c6a94..4b2e968689675 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/CHANGELOG.md +++ b/sdk/textanalytics/azure-ai-textanalytics/CHANGELOG.md @@ -3,6 +3,9 @@ ## 1.0.0b3 (Unreleased) +**Dependency updates** +- Adopted [azure-core](https://pypi.org/project/azure-core/) version 1.2.3 or greater + ## 1.0.0b2 (2020-02-11) **Breaking changes** From 439f06a13f1991cdaf1dfa55f6ba3c76a9f59e17 Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Thu, 27 Feb 2020 15:46:08 -0800 Subject: [PATCH 4/5] update dep to azure-core 1.3.0 --- sdk/textanalytics/azure-ai-textanalytics/CHANGELOG.md | 2 +- sdk/textanalytics/azure-ai-textanalytics/setup.py | 2 +- shared_requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/textanalytics/azure-ai-textanalytics/CHANGELOG.md b/sdk/textanalytics/azure-ai-textanalytics/CHANGELOG.md index 4b2e968689675..18a8bd0591f61 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/CHANGELOG.md +++ b/sdk/textanalytics/azure-ai-textanalytics/CHANGELOG.md @@ -4,7 +4,7 @@ **Dependency updates** -- Adopted [azure-core](https://pypi.org/project/azure-core/) version 1.2.3 or greater +- Adopted [azure-core](https://pypi.org/project/azure-core/) version 1.3.0 or greater ## 1.0.0b2 (2020-02-11) diff --git a/sdk/textanalytics/azure-ai-textanalytics/setup.py b/sdk/textanalytics/azure-ai-textanalytics/setup.py index 46e906410c8c5..72e946499971b 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/setup.py +++ b/sdk/textanalytics/azure-ai-textanalytics/setup.py @@ -78,7 +78,7 @@ 'azure.ai', ]), install_requires=[ - "azure-core<2.0.0,>=1.2.3", + "azure-core<2.0.0,>=1.3.0", "msrest>=0.6.0", 'azure-common~=1.1', 'six>=1.6', diff --git a/shared_requirements.txt b/shared_requirements.txt index ddc09755f6312..8af4ec3a87c13 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -114,7 +114,7 @@ six>=1.6 #override azure-keyvault-keys azure-core<2.0.0,>=1.2.1 #override azure-keyvault-secrets azure-core<2.0.0,>=1.2.1 #override azure-ai-textanalytics msrest>=0.6.0 -#override azure-ai-textanalytics azure-core<2.0.0,>=1.1.0 +#override azure-ai-textanalytics azure-core<2.0.0,>=1.3.0 #override azure-storage-blob msrest>=0.6.10 #override azure-storage-queue msrest>=0.6.10 #override azure-storage-file-share msrest>=0.6.10 From d9fb8d9ecec95e7bd49d35e17ce91e7ac0bcff8c Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Tue, 3 Mar 2020 18:29:30 -0800 Subject: [PATCH 5/5] fix tests --- .../tests/test_analyze_sentiment.py | 16 ++++++++-------- .../tests/test_analyze_sentiment_async.py | 16 ++++++++-------- .../tests/test_detect_language.py | 16 ++++++++-------- .../tests/test_detect_language_async.py | 16 ++++++++-------- .../tests/test_extract_key_phrases.py | 16 ++++++++-------- .../tests/test_extract_key_phrases_async.py | 16 ++++++++-------- .../tests/test_recognize_entities.py | 16 ++++++++-------- .../tests/test_recognize_entities_async.py | 16 ++++++++-------- .../tests/test_recognize_linked_entities.py | 16 ++++++++-------- .../test_recognize_linked_entities_async.py | 16 ++++++++-------- .../tests/test_recognize_pii_entities.py | 16 ++++++++-------- .../tests/test_recognize_pii_entities_async.py | 16 ++++++++-------- 12 files changed, 96 insertions(+), 96 deletions(-) diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_analyze_sentiment.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_analyze_sentiment.py index b84e4eb7087a1..21f524ff68a01 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_analyze_sentiment.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_analyze_sentiment.py @@ -429,8 +429,8 @@ def test_bad_model_version_error(self, resource_group, location, text_analytics_ try: result = text_analytics.analyze_sentiment(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_document_errors(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -458,8 +458,8 @@ def test_missing_input_records_error(self, resource_group, location, text_analyt try: result = text_analytics.analyze_sentiment(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_duplicate_ids_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -470,8 +470,8 @@ def test_duplicate_ids_error(self, resource_group, location, text_analytics_acco try: result = text_analytics.analyze_sentiment(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_batch_size_over_limit_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -482,8 +482,8 @@ def test_batch_size_over_limit_error(self, resource_group, location, text_analyt try: response = text_analytics.analyze_sentiment(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_language_kwarg_spanish(self, resource_group, location, text_analytics_account, text_analytics_account_key): diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_analyze_sentiment_async.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_analyze_sentiment_async.py index 3e4901f95ca16..f2f16bf524766 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_analyze_sentiment_async.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_analyze_sentiment_async.py @@ -471,8 +471,8 @@ async def test_bad_model_version_error(self, resource_group, location, text_anal try: result = await text_analytics.analyze_sentiment(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -503,8 +503,8 @@ async def test_missing_input_records_error(self, resource_group, location, text_ try: result = await text_analytics.analyze_sentiment(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -516,8 +516,8 @@ async def test_duplicate_ids_error(self, resource_group, location, text_analytic try: result = await text_analytics.analyze_sentiment(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -529,8 +529,8 @@ async def test_batch_size_over_limit_error(self, resource_group, location, text_ try: response = await text_analytics.analyze_sentiment(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_detect_language.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_detect_language.py index ac2748f6b7dd4..6c5f4d24ef548 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_detect_language.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_detect_language.py @@ -450,8 +450,8 @@ def test_bad_model_version_error(self, resource_group, location, text_analytics_ try: result = text_analytics.detect_language(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_document_errors(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -476,8 +476,8 @@ def test_missing_input_records_error(self, resource_group, location, text_analyt try: result = text_analytics.detect_language(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_duplicate_ids_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -489,8 +489,8 @@ def test_duplicate_ids_error(self, resource_group, location, text_analytics_acco try: result = text_analytics.detect_language(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_batch_size_over_limit_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -501,8 +501,8 @@ def test_batch_size_over_limit_error(self, resource_group, location, text_analyt try: response = text_analytics.detect_language(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @pytest.mark.skip(reason="Service bug returns invalidDocument here. Unskip after v3.0-preview.2") diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_detect_language_async.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_detect_language_async.py index 6a79fee05cf7f..aa36ff5a1ae8a 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_detect_language_async.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_detect_language_async.py @@ -491,8 +491,8 @@ async def test_bad_model_version_error(self, resource_group, location, text_anal try: result = await text_analytics.detect_language(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -519,8 +519,8 @@ async def test_missing_input_records_error(self, resource_group, location, text_ try: result = await text_analytics.detect_language(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -532,8 +532,8 @@ async def test_duplicate_ids_error(self, resource_group, location, text_analytic try: result = await text_analytics.detect_language(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -545,8 +545,8 @@ async def test_batch_size_over_limit_error(self, resource_group, location, text_ try: response = await text_analytics.detect_language(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_extract_key_phrases.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_extract_key_phrases.py index 8bbd2740bd90b..558ff271edd28 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_extract_key_phrases.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_extract_key_phrases.py @@ -399,8 +399,8 @@ def test_bad_model_version_error(self, resource_group, location, text_analytics_ try: result = text_analytics.extract_key_phrases(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_document_errors(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -428,8 +428,8 @@ def test_missing_input_records_error(self, resource_group, location, text_analyt try: result = text_analytics.extract_key_phrases(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_duplicate_ids_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -440,8 +440,8 @@ def test_duplicate_ids_error(self, resource_group, location, text_analytics_acco try: result = text_analytics.extract_key_phrases(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_batch_size_over_limit_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -451,8 +451,8 @@ def test_batch_size_over_limit_error(self, resource_group, location, text_analyt try: response = text_analytics.extract_key_phrases(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_language_kwarg_spanish(self, resource_group, location, text_analytics_account, text_analytics_account_key): diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_extract_key_phrases_async.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_extract_key_phrases_async.py index a7e20b20ae539..e05ea66b0c74a 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_extract_key_phrases_async.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_extract_key_phrases_async.py @@ -440,8 +440,8 @@ async def test_bad_model_version_error(self, resource_group, location, text_anal try: result = await text_analytics.extract_key_phrases(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -471,8 +471,8 @@ async def test_missing_input_records_error(self, resource_group, location, text_ try: result = await text_analytics.extract_key_phrases(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -485,8 +485,8 @@ async def test_duplicate_ids_error(self, resource_group, location, text_analytic try: result = await text_analytics.extract_key_phrases(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -498,8 +498,8 @@ async def test_batch_size_over_limit_error(self, resource_group, location, text_ try: response = await text_analytics.extract_key_phrases(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_entities.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_entities.py index 34aa2dc5d88ef..d3b4b2c2dd5e7 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_entities.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_entities.py @@ -414,8 +414,8 @@ def test_bad_model_version_error(self, resource_group, location, text_analytics_ try: result = text_analytics.recognize_entities(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_document_errors(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -443,8 +443,8 @@ def test_missing_input_records_error(self, resource_group, location, text_analyt try: result = text_analytics.recognize_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_duplicate_ids_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -455,8 +455,8 @@ def test_duplicate_ids_error(self, resource_group, location, text_analytics_acco try: result = text_analytics.recognize_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_batch_size_over_limit_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -466,8 +466,8 @@ def test_batch_size_over_limit_error(self, resource_group, location, text_analyt try: response = text_analytics.recognize_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_language_kwarg_spanish(self, resource_group, location, text_analytics_account, text_analytics_account_key): diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_entities_async.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_entities_async.py index 54ce1306b689a..8755ccead1e29 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_entities_async.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_entities_async.py @@ -454,8 +454,8 @@ async def test_bad_model_version_error(self, resource_group, location, text_anal try: result = await text_analytics.recognize_entities(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -485,8 +485,8 @@ async def test_missing_input_records_error(self, resource_group, location, text_ try: result = await text_analytics.recognize_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -498,8 +498,8 @@ async def test_duplicate_ids_error(self, resource_group, location, text_analytic try: result = await text_analytics.recognize_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -510,8 +510,8 @@ async def test_batch_size_over_limit_error(self, resource_group, location, text_ try: response = await text_analytics.recognize_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_linked_entities.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_linked_entities.py index f00fb5fd8b2c8..61826105d5936 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_linked_entities.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_linked_entities.py @@ -409,8 +409,8 @@ def test_bad_model_version_error(self, resource_group, location, text_analytics_ try: result = text_analytics.recognize_linked_entities(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_document_errors(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -438,8 +438,8 @@ def test_missing_input_records_error(self, resource_group, location, text_analyt try: result = text_analytics.recognize_linked_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_duplicate_ids_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -450,8 +450,8 @@ def test_duplicate_ids_error(self, resource_group, location, text_analytics_acco try: result = text_analytics.recognize_linked_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_batch_size_over_limit_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -461,8 +461,8 @@ def test_batch_size_over_limit_error(self, resource_group, location, text_analyt try: response = text_analytics.recognize_linked_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_language_kwarg_spanish(self, resource_group, location, text_analytics_account, text_analytics_account_key): diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_linked_entities_async.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_linked_entities_async.py index f6d025ef4a401..0a7b39ae52c68 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_linked_entities_async.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_linked_entities_async.py @@ -450,8 +450,8 @@ async def test_bad_model_version_error(self, resource_group, location, text_anal try: result = await text_analytics.recognize_linked_entities(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -481,8 +481,8 @@ async def test_missing_input_records_error(self, resource_group, location, text_ try: result = await text_analytics.recognize_linked_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -494,8 +494,8 @@ async def test_duplicate_ids_error(self, resource_group, location, text_analytic try: result = await text_analytics.recognize_linked_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -506,8 +506,8 @@ async def test_batch_size_over_limit_error(self, resource_group, location, text_ try: response = await text_analytics.recognize_linked_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities.py index f95a97f300202..9b11bad83f5b2 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities.py @@ -437,8 +437,8 @@ def test_bad_model_version_error(self, resource_group, location, text_analytics_ try: result = text_analytics.recognize_pii_entities(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_document_errors(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -466,8 +466,8 @@ def test_missing_input_records_error(self, resource_group, location, text_analyt try: result = text_analytics.recognize_pii_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_duplicate_ids_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -478,8 +478,8 @@ def test_duplicate_ids_error(self, resource_group, location, text_analytics_acco try: result = text_analytics.recognize_pii_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_batch_size_over_limit_error(self, resource_group, location, text_analytics_account, text_analytics_account_key): @@ -489,8 +489,8 @@ def test_batch_size_over_limit_error(self, resource_group, location, text_analyt try: response = text_analytics.recognize_pii_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() def test_language_kwarg_english(self, resource_group, location, text_analytics_account, text_analytics_account_key): diff --git a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities_async.py b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities_async.py index 80d6cd21999fb..33676eae336d4 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities_async.py +++ b/sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities_async.py @@ -478,8 +478,8 @@ async def test_bad_model_version_error(self, resource_group, location, text_anal try: result = await text_analytics.recognize_pii_entities(docs, model_version="bad") except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidRequest") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidRequest") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -509,8 +509,8 @@ async def test_missing_input_records_error(self, resource_group, location, text_ try: result = await text_analytics.recognize_pii_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "MissingInputRecords") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "MissingInputRecords") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -522,8 +522,8 @@ async def test_duplicate_ids_error(self, resource_group, location, text_analytic try: result = await text_analytics.recognize_pii_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocument") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocument") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test @@ -534,8 +534,8 @@ async def test_batch_size_over_limit_error(self, resource_group, location, text_ try: response = await text_analytics.recognize_pii_entities(docs) except HttpResponseError as err: - self.assertEqual(err.error_code, "InvalidDocumentBatch") - self.assertIsNotNone(err.message) + self.assertEqual(err.error.code, "InvalidDocumentBatch") + self.assertIsNotNone(err.error.message) @GlobalTextAnalyticsAccountPreparer() @AsyncTextAnalyticsTest.await_prepared_test