Skip to content

Commit

Permalink
[TA] Regenerate the swagger v3.1-Preview.3 with latest autorest versi…
Browse files Browse the repository at this point in the history
…on (#17358)

* regenerate with latest autorest and swagger, use 4.0.4 autorest to codegen instead of v4.0.2
  • Loading branch information
mssfang authored Nov 9, 2020
1 parent 5dd341e commit 93e5dfd
Show file tree
Hide file tree
Showing 89 changed files with 1,209 additions and 1,439 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.SimpleResponse;
import com.azure.core.util.Context;
import com.azure.core.util.logging.ClientLogger;
import com.azure.core.util.polling.LongRunningOperationStatus;
Expand Down Expand Up @@ -83,18 +82,18 @@ PollerFlux<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>> beginA
DEFAULT_POLL_DURATION,
activationOperation(service.healthWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
modelVersion,
StringIndexType.UTF16CODE_UNIT) // Currently StringIndexType is not explored, we use it internally
StringIndexType.UTF16CODE_UNIT, // Currently StringIndexType is not explored, we use it internally
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(healthResponse -> {
final TextAnalyticsOperationResult textAnalyticsOperationResult =
new TextAnalyticsOperationResult();
TextAnalyticsOperationResultPropertiesHelper.setResultId(textAnalyticsOperationResult,
parseModelId(healthResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(jobId -> service.healthStatusWithResponseAsync(jobId, context, null, null,
finalIncludeStatistics)),
pollingOperation(jobId -> service.healthStatusWithResponseAsync(jobId, null, null,
finalIncludeStatistics, context)),
(activationResponse, pollingContext) ->
monoError(logger, new RuntimeException("Use the `beginCancelHealthcareJob` to cancel the job")),
fetchingOperation(resultId -> Mono.just(getHealthcareFluxPage(resultId,
Expand All @@ -118,18 +117,18 @@ PollerFlux<TextAnalyticsOperationResult, PagedIterable<HealthcareTaskResult>> be
DEFAULT_POLL_DURATION,
activationOperation(service.healthWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
modelVersion,
StringIndexType.UTF16CODE_UNIT) // Currently StringIndexType is not explored, we use it internally
StringIndexType.UTF16CODE_UNIT, // Currently StringIndexType is not explored, we use it internally
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(healthResponse -> {
final TextAnalyticsOperationResult textAnalyticsOperationResult =
new TextAnalyticsOperationResult();
TextAnalyticsOperationResultPropertiesHelper.setResultId(textAnalyticsOperationResult,
parseModelId(healthResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(jobId -> service.healthStatusWithResponseAsync(jobId, context, null, null,
finalIncludeStatistics)),
pollingOperation(jobId -> service.healthStatusWithResponseAsync(jobId, null, null,
finalIncludeStatistics, context)),
(activationResponse, pollingContext) ->
monoError(logger, new RuntimeException("Use the `beginCancelHealthcareJob` to cancel the job")),
fetchingOperationIterable(resultId -> Mono.just(new PagedIterable<>(getHealthcareFluxPage(resultId,
Expand All @@ -153,11 +152,11 @@ Mono<PagedResponse<HealthcareTaskResult>> getPage(String continuationToken, UUID
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.healthStatusWithResponseAsync(jobID, context, topValue, skipValue, showStats)
return service.healthStatusWithResponseAsync(jobID, topValue, skipValue, showStats, context)
.map(this::toTextAnalyticsPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.healthStatusWithResponseAsync(jobID, context, null, null, showStats)
return service.healthStatusWithResponseAsync(jobID, null, null, showStats, context)
.map(this::toTextAnalyticsPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
Expand Down Expand Up @@ -212,7 +211,7 @@ PollerFlux<TextAnalyticsOperationResult, Void> beginCancelAnalyzeHealthcare(UUID
return textAnalyticsOperationResult;
})),
pollingOperation(resultId ->
service.healthStatusWithResponseAsync(resultId, context, null, null, null)),
service.healthStatusWithResponseAsync(resultId, null, null, null, context)),
(activationResponse, pollingContext) -> monoError(logger,
new RuntimeException("Cancellation of healthcare task cancellation is not supported.")),
(resultId) -> Mono.empty()
Expand All @@ -236,7 +235,7 @@ PollerFlux<TextAnalyticsOperationResult, Void> beginCancelAnalyzeHealthcare(UUID

// Polling operation
private Function<PollingContext<TextAnalyticsOperationResult>, Mono<PollResponse<TextAnalyticsOperationResult>>>
pollingOperation(Function<UUID, Mono<SimpleResponse<HealthcareJobState>>> pollingFunction) {
pollingOperation(Function<UUID, Mono<Response<HealthcareJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<TextAnalyticsOperationResult> operationResultPollResponse =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -215,8 +215,8 @@ private Mono<Response<AnalyzeSentimentResultCollection>> getAnalyzedSentimentRes
}
return service.sentimentWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
modelVersion, includeStatistics, includeOpinionMining, StringIndexType.UTF16CODE_UNIT
modelVersion, includeStatistics, includeOpinionMining, StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE)
)
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Analyzed sentiment for a batch of documents - {}", response))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,9 @@ private Mono<Response<DetectLanguageResultCollection>> getDetectedLanguageRespon
Iterable<DetectLanguageInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.languagesWithResponseAsync(
new LanguageBatchInput().setDocuments(toLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics())
options == null ? null : options.isIncludeStatistics(),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Detected languages for a batch of documents - {}",
response.getValue()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,9 +182,9 @@ private Mono<Response<ExtractKeyPhrasesResultCollection>> getExtractedKeyPhrases
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.keyPhrasesWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics())
options == null ? null : options.isIncludeStatistics(),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info("A batch of document - {}", documents.toString()))
.doOnSuccess(response -> logger.info("A batch of key phrases output - {}", response.getValue()))
.doOnError(error -> logger.warning("Failed to extract key phrases - {}", error))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,10 +187,10 @@ private Mono<Response<RecognizeEntitiesResultCollection>> getRecognizedEntitiesR
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.entitiesRecognitionGeneralWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
StringIndexType.UTF16CODE_UNIT)
StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Recognized entities for a batch of documents- {}",
response.getValue()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,10 +201,10 @@ private IterableStream<LinkedEntity> mapLinkedEntity(
Context context) {
return service.entitiesLinkingWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
StringIndexType.UTF16CODE_UNIT)
StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Recognized linked entities for a batch of documents - {}",
response.getValue()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,11 +211,11 @@ private Mono<Response<RecognizePiiEntitiesResultCollection>> getRecognizePiiEnti
}
return service.entitiesRecognitionPiiWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
modelVersion,
includeStatistics,
domainFilter,
StringIndexType.UTF16CODE_UNIT)
StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info(
"Start recognizing Personally Identifiable Information entities for a batch of documents."))
.doOnSuccess(response -> logger.info(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,7 @@ public TextAnalyticsClientImpl buildClient() {
this.pipeline = new HttpPipelineBuilder().policies(new UserAgentPolicy(), new RetryPolicy(),
new CookiePolicy()).build();
}
TextAnalyticsClientImpl client = new TextAnalyticsClientImpl(pipeline);
client.setEndpoint(this.endpoint);
TextAnalyticsClientImpl client = new TextAnalyticsClientImpl(pipeline, endpoint);
return client;
}
}
Loading

0 comments on commit 93e5dfd

Please sign in to comment.