diff --git a/sdk/anomalydetector/Azure.AI.AnomalyDetector/api/Azure.AI.AnomalyDetector.netstandard2.0.cs b/sdk/anomalydetector/Azure.AI.AnomalyDetector/api/Azure.AI.AnomalyDetector.netstandard2.0.cs index ee954e42ecf6a..5522ff6551a61 100644 --- a/sdk/anomalydetector/Azure.AI.AnomalyDetector/api/Azure.AI.AnomalyDetector.netstandard2.0.cs +++ b/sdk/anomalydetector/Azure.AI.AnomalyDetector/api/Azure.AI.AnomalyDetector.netstandard2.0.cs @@ -53,9 +53,7 @@ public AnomalyDetectorClient(System.Uri endpoint, Azure.AzureKeyCredential crede public virtual Azure.Response DetectUnivariateChangePoint(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task> DetectUnivariateChangePointAsync(Azure.AI.AnomalyDetector.UnivariateChangePointDetectionOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task DetectUnivariateChangePointAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } - public virtual Azure.Response DetectUnivariateEntireSeries(Azure.AI.AnomalyDetector.UnivariateDetectionOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response DetectUnivariateEntireSeries(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } - public virtual System.Threading.Tasks.Task> DetectUnivariateEntireSeriesAsync(Azure.AI.AnomalyDetector.UnivariateDetectionOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task DetectUnivariateEntireSeriesAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } public virtual Azure.Response DetectUnivariateLastPoint(Azure.AI.AnomalyDetector.UnivariateDetectionOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response DetectUnivariateLastPoint(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } @@ -230,11 +228,11 @@ public ModelState() { } } public partial class MultivariateBatchDetectionOptions { - public MultivariateBatchDetectionOptions(string dataSource, int topContributorCount, System.DateTimeOffset startTime, System.DateTimeOffset endTime) { } + public MultivariateBatchDetectionOptions(string dataSource, System.DateTimeOffset startTime, System.DateTimeOffset endTime) { } public string DataSource { get { throw null; } set { } } public System.DateTimeOffset EndTime { get { throw null; } set { } } public System.DateTimeOffset StartTime { get { throw null; } set { } } - public int TopContributorCount { get { throw null; } set { } } + public int? TopContributorCount { get { throw null; } set { } } } public partial class MultivariateBatchDetectionResultSummary { @@ -343,18 +341,6 @@ public UnivariateDetectionOptions(System.Collections.Generic.IEnumerable Series { get { throw null; } } } - public partial class UnivariateEntireDetectionResult - { - internal UnivariateEntireDetectionResult() { } - public System.Collections.Generic.IReadOnlyList ExpectedValues { get { throw null; } } - public System.Collections.Generic.IReadOnlyList IsAnomaly { get { throw null; } } - public System.Collections.Generic.IReadOnlyList IsNegativeAnomaly { get { throw null; } } - public System.Collections.Generic.IReadOnlyList IsPositiveAnomaly { get { throw null; } } - public System.Collections.Generic.IReadOnlyList LowerMargins { get { throw null; } } - public int Period { get { throw null; } } - public System.Collections.Generic.IReadOnlyList Severity { get { throw null; } } - public System.Collections.Generic.IReadOnlyList UpperMargins { get { throw null; } } - } public partial class UnivariateLastDetectionResult { internal UnivariateLastDetectionResult() { } diff --git a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/AnomalyDetectorClient.cs b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/AnomalyDetectorClient.cs index 2b8fa3b9dd2dd..ec7cb6c73afc2 100644 --- a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/AnomalyDetectorClient.cs +++ b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/AnomalyDetectorClient.cs @@ -67,62 +67,7 @@ public AnomalyDetectorClient(Uri endpoint, AzureKeyCredential credential, Anomal _apiVersion = options.Version; } - /// Detect anomalies for the entire series in batch. - /// Method of univariate anomaly detection. - /// The cancellation token to use. - /// is null. - /// - /// This operation generates a model with an entire series. Each point is detected - /// with the same model. With this method, points before and after a certain point - /// are used to determine whether it's an anomaly. The entire detection can give the - /// user an overall status of the time series. - /// - public virtual async Task> DetectUnivariateEntireSeriesAsync(UnivariateDetectionOptions options, CancellationToken cancellationToken = default) - { - Argument.AssertNotNull(options, nameof(options)); - - RequestContext context = FromCancellationToken(cancellationToken); - Response response = await DetectUnivariateEntireSeriesAsync(options.ToRequestContent(), context).ConfigureAwait(false); - return Response.FromValue(UnivariateEntireDetectionResult.FromResponse(response), response); - } - - /// Detect anomalies for the entire series in batch. - /// Method of univariate anomaly detection. - /// The cancellation token to use. - /// is null. - /// - /// This operation generates a model with an entire series. Each point is detected - /// with the same model. With this method, points before and after a certain point - /// are used to determine whether it's an anomaly. The entire detection can give the - /// user an overall status of the time series. - /// - public virtual Response DetectUnivariateEntireSeries(UnivariateDetectionOptions options, CancellationToken cancellationToken = default) - { - Argument.AssertNotNull(options, nameof(options)); - - RequestContext context = FromCancellationToken(cancellationToken); - Response response = DetectUnivariateEntireSeries(options.ToRequestContent(), context); - return Response.FromValue(UnivariateEntireDetectionResult.FromResponse(response), response); - } - - /// - /// [Protocol Method] Detect anomalies for the entire series in batch. - /// - /// - /// - /// This operation generates a model with an entire series. Each point is detected - /// with the same model. With this method, points before and after a certain point - /// are used to determine whether it's an anomaly. The entire detection can give the - /// user an overall status of the time series. - /// - /// - /// - /// - /// Please try the simpler convenience overload with strongly typed models first. - /// - /// - /// - /// + /// [Protocol Method] Detect anomalies for the entire series in batch. /// The content to send as the body of the request. Details of the request body schema are in the Remarks section below. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// is null. @@ -147,24 +92,7 @@ public virtual async Task DetectUnivariateEntireSeriesAsync(RequestCon } } - /// - /// [Protocol Method] Detect anomalies for the entire series in batch. - /// - /// - /// - /// This operation generates a model with an entire series. Each point is detected - /// with the same model. With this method, points before and after a certain point - /// are used to determine whether it's an anomaly. The entire detection can give the - /// user an overall status of the time series. - /// - /// - /// - /// - /// Please try the simpler convenience overload with strongly typed models first. - /// - /// - /// - /// + /// [Protocol Method] Detect anomalies for the entire series in batch. /// The content to send as the body of the request. Details of the request body schema are in the Remarks section below. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// is null. @@ -538,6 +466,10 @@ public virtual Response GetMultivariateBatchDetectionResult(string resultId, Req /// Storage folder that contains multiple CSV files, where each CSV file has /// two columns, time stamp and variable. Or the Blob Storage URI can point to a single blob that contains a CSV file that has all the variables and a /// time stamp column. + /// The model object will be created and returned in the response, but the + /// training process happens asynchronously. To check the training status, call + /// GetMultivariateModel with the modelId value and check the status field in the + /// modelInfo object. /// public virtual async Task> TrainMultivariateModelAsync(ModelInfo modelInfo, CancellationToken cancellationToken = default) { @@ -559,6 +491,10 @@ public virtual async Task> TrainMultivariateMode /// Storage folder that contains multiple CSV files, where each CSV file has /// two columns, time stamp and variable. Or the Blob Storage URI can point to a single blob that contains a CSV file that has all the variables and a /// time stamp column. + /// The model object will be created and returned in the response, but the + /// training process happens asynchronously. To check the training status, call + /// GetMultivariateModel with the modelId value and check the status field in the + /// modelInfo object. /// public virtual Response TrainMultivariateModel(ModelInfo modelInfo, CancellationToken cancellationToken = default) { @@ -580,6 +516,10 @@ public virtual Response TrainMultivariateModel(ModelInfo /// Storage folder that contains multiple CSV files, where each CSV file has /// two columns, time stamp and variable. Or the Blob Storage URI can point to a single blob that contains a CSV file that has all the variables and a /// time stamp column. + /// The model object will be created and returned in the response, but the + /// training process happens asynchronously. To check the training status, call + /// GetMultivariateModel with the modelId value and check the status field in the + /// modelInfo object. /// /// /// @@ -624,6 +564,10 @@ public virtual async Task TrainMultivariateModelAsync(RequestContent c /// Storage folder that contains multiple CSV files, where each CSV file has /// two columns, time stamp and variable. Or the Blob Storage URI can point to a single blob that contains a CSV file that has all the variables and a /// time stamp column. + /// The model object will be created and returned in the response, but the + /// training process happens asynchronously. To check the training status, call + /// GetMultivariateModel with the modelId value and check the status field in the + /// modelInfo object. /// /// /// diff --git a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/Docs/AnomalyDetectorClient.xml b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/Docs/AnomalyDetectorClient.xml index d5e87e2d56067..56591a6e8e434 100644 --- a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/Docs/AnomalyDetectorClient.xml +++ b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/Docs/AnomalyDetectorClient.xml @@ -783,10 +783,10 @@ Schema for MultivariateDetectionResult: dataSource: string, # Required. Source link to the input data to indicate an accessible Azure Storage URI. It either points to an Azure Blob Storage folder or points to a CSV file in Azure Blob Storage, based on your data schema selection. The data schema should -be exactly the same as those used in the training phase. - topContributorCount: number, # Required. Number of top contributed -variables for one anomalous time stamp in the response. The default is -10. +be exactly the same as those used in the training phase. The input data must +contain at least slidingWindow entries preceding the start time of the data +to be detected. + topContributorCount: number, # Optional. Number of top contributed variables for one anomalous time stamp in the response. startTime: string (date & time), # Required. Start date/time of data for detection, which should be in ISO 8601 format. endTime: string (date & time), # Required. End date/time of data for detection, which should @@ -866,10 +866,10 @@ Schema for MultivariateDetectionResult: dataSource: string, # Required. Source link to the input data to indicate an accessible Azure Storage URI. It either points to an Azure Blob Storage folder or points to a CSV file in Azure Blob Storage, based on your data schema selection. The data schema should -be exactly the same as those used in the training phase. - topContributorCount: number, # Required. Number of top contributed -variables for one anomalous time stamp in the response. The default is -10. +be exactly the same as those used in the training phase. The input data must +contain at least slidingWindow entries preceding the start time of the data +to be detected. + topContributorCount: number, # Optional. Number of top contributed variables for one anomalous time stamp in the response. startTime: string (date & time), # Required. Start date/time of data for detection, which should be in ISO 8601 format. endTime: string (date & time), # Required. End date/time of data for detection, which should @@ -972,6 +972,10 @@ Storage URI that's accessible to the service. There are two types of data i Storage folder that contains multiple CSV files, where each CSV file has two columns, time stamp and variable. Or the Blob Storage URI can point to a single blob that contains a CSV file that has all the variables and a time stamp column. +The model object will be created and returned in the response, but the +training process happens asynchronously. To check the training status, call +GetMultivariateModel with the modelId value and check the status field in the +modelInfo object. Below is the JSON schema for the request and response payloads. @@ -1150,6 +1154,10 @@ Storage URI that's accessible to the service. There are two types of data i Storage folder that contains multiple CSV files, where each CSV file has two columns, time stamp and variable. Or the Blob Storage URI can point to a single blob that contains a CSV file that has all the variables and a time stamp column. +The model object will be created and returned in the response, but the +training process happens asynchronously. To check the training status, call +GetMultivariateModel with the modelId value and check the status field in the +modelInfo object. Below is the JSON schema for the request and response payloads. @@ -1477,7 +1485,29 @@ information. -This sample shows how to call DetectMultivariateBatchAnomalyAsync with required parameters and request content and parse the result. +This sample shows how to call DetectMultivariateBatchAnomalyAsync with required parameters and request content, and how to parse the result. +"); +var endpoint = new Uri(""); +var client = new AnomalyDetectorClient(endpoint, credential); + +var data = new { + dataSource = "", + startTime = "2022-05-10T14:57:31.2311892-04:00", + endTime = "2022-05-10T14:57:31.2311892-04:00", +}; + +Response response = await client.DetectMultivariateBatchAnomalyAsync("", RequestContent.Create(data)); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("resultId").ToString()); +Console.WriteLine(result.GetProperty("summary").GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("summary").GetProperty("setupInfo").GetProperty("dataSource").ToString()); +Console.WriteLine(result.GetProperty("summary").GetProperty("setupInfo").GetProperty("startTime").ToString()); +Console.WriteLine(result.GetProperty("summary").GetProperty("setupInfo").GetProperty("endTime").ToString()); +Console.WriteLine(result.GetProperty("results")[0].GetProperty("timestamp").ToString()); +]]> +This sample shows how to call DetectMultivariateBatchAnomalyAsync with all parameters and request content, and how to parse the result. "); var endpoint = new Uri(""); @@ -1534,10 +1564,10 @@ Schema for MultivariateBatchDetectionOptions: dataSource: string, # Required. Source link to the input data to indicate an accessible Azure Storage URI. It either points to an Azure Blob Storage folder or points to a CSV file in Azure Blob Storage, based on your data schema selection. The data schema should -be exactly the same as those used in the training phase. - topContributorCount: number, # Required. Number of top contributed -variables for one anomalous time stamp in the response. The default is -10. +be exactly the same as those used in the training phase. The input data must +contain at least slidingWindow entries preceding the start time of the data +to be detected. + topContributorCount: number, # Optional. Number of top contributed variables for one anomalous time stamp in the response. startTime: string (date & time), # Required. Start date/time of data for detection, which should be in ISO 8601 format. endTime: string (date & time), # Required. End date/time of data for detection, which should @@ -1558,10 +1588,10 @@ Schema for MultivariateDetectionResult: dataSource: string, # Required. Source link to the input data to indicate an accessible Azure Storage URI. It either points to an Azure Blob Storage folder or points to a CSV file in Azure Blob Storage, based on your data schema selection. The data schema should -be exactly the same as those used in the training phase. - topContributorCount: number, # Required. Number of top contributed -variables for one anomalous time stamp in the response. The default is -10. +be exactly the same as those used in the training phase. The input data must +contain at least slidingWindow entries preceding the start time of the data +to be detected. + topContributorCount: number, # Optional. Number of top contributed variables for one anomalous time stamp in the response. startTime: string (date & time), # Required. Start date/time of data for detection, which should be in ISO 8601 format. endTime: string (date & time), # Required. End date/time of data for detection, which should @@ -1589,7 +1619,29 @@ significant the anomaly is. -This sample shows how to call DetectMultivariateBatchAnomaly with required parameters and request content and parse the result. +This sample shows how to call DetectMultivariateBatchAnomaly with required parameters and request content, and how to parse the result. +"); +var endpoint = new Uri(""); +var client = new AnomalyDetectorClient(endpoint, credential); + +var data = new { + dataSource = "", + startTime = "2022-05-10T14:57:31.2311892-04:00", + endTime = "2022-05-10T14:57:31.2311892-04:00", +}; + +Response response = client.DetectMultivariateBatchAnomaly("", RequestContent.Create(data)); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("resultId").ToString()); +Console.WriteLine(result.GetProperty("summary").GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("summary").GetProperty("setupInfo").GetProperty("dataSource").ToString()); +Console.WriteLine(result.GetProperty("summary").GetProperty("setupInfo").GetProperty("startTime").ToString()); +Console.WriteLine(result.GetProperty("summary").GetProperty("setupInfo").GetProperty("endTime").ToString()); +Console.WriteLine(result.GetProperty("results")[0].GetProperty("timestamp").ToString()); +]]> +This sample shows how to call DetectMultivariateBatchAnomaly with all parameters and request content, and how to parse the result. "); var endpoint = new Uri(""); @@ -1646,10 +1698,10 @@ Schema for MultivariateBatchDetectionOptions: dataSource: string, # Required. Source link to the input data to indicate an accessible Azure Storage URI. It either points to an Azure Blob Storage folder or points to a CSV file in Azure Blob Storage, based on your data schema selection. The data schema should -be exactly the same as those used in the training phase. - topContributorCount: number, # Required. Number of top contributed -variables for one anomalous time stamp in the response. The default is -10. +be exactly the same as those used in the training phase. The input data must +contain at least slidingWindow entries preceding the start time of the data +to be detected. + topContributorCount: number, # Optional. Number of top contributed variables for one anomalous time stamp in the response. startTime: string (date & time), # Required. Start date/time of data for detection, which should be in ISO 8601 format. endTime: string (date & time), # Required. End date/time of data for detection, which should @@ -1670,10 +1722,10 @@ Schema for MultivariateDetectionResult: dataSource: string, # Required. Source link to the input data to indicate an accessible Azure Storage URI. It either points to an Azure Blob Storage folder or points to a CSV file in Azure Blob Storage, based on your data schema selection. The data schema should -be exactly the same as those used in the training phase. - topContributorCount: number, # Required. Number of top contributed -variables for one anomalous time stamp in the response. The default is -10. +be exactly the same as those used in the training phase. The input data must +contain at least slidingWindow entries preceding the start time of the data +to be detected. + topContributorCount: number, # Optional. Number of top contributed variables for one anomalous time stamp in the response. startTime: string (date & time), # Required. Start date/time of data for detection, which should be in ISO 8601 format. endTime: string (date & time), # Required. End date/time of data for detection, which should diff --git a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/ModelStatus.cs b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/ModelStatus.cs index 0d8e4d85bb952..90b6f24dacd36 100644 --- a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/ModelStatus.cs +++ b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/ModelStatus.cs @@ -26,13 +26,13 @@ public ModelStatus(string value) private const string ReadyValue = "READY"; private const string FailedValue = "FAILED"; - /// CREATED. + /// The model has been created. Training has been scheduled but not yet started. public static ModelStatus Created { get; } = new ModelStatus(CreatedValue); - /// RUNNING. + /// The model is being trained. public static ModelStatus Running { get; } = new ModelStatus(RunningValue); - /// READY. + /// The model has been trained and is ready to be used for anomaly detection. public static ModelStatus Ready { get; } = new ModelStatus(ReadyValue); - /// FAILED. + /// The model training failed. public static ModelStatus Failed { get; } = new ModelStatus(FailedValue); /// Determines if two values are the same. public static bool operator ==(ModelStatus left, ModelStatus right) => left.Equals(right); diff --git a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/MultivariateBatchDetectionOptions.Serialization.cs b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/MultivariateBatchDetectionOptions.Serialization.cs index 1a0d2bfc86179..11884d7e14c4e 100644 --- a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/MultivariateBatchDetectionOptions.Serialization.cs +++ b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/MultivariateBatchDetectionOptions.Serialization.cs @@ -19,8 +19,18 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WriteStartObject(); writer.WritePropertyName("dataSource"u8); writer.WriteStringValue(DataSource); - writer.WritePropertyName("topContributorCount"u8); - writer.WriteNumberValue(TopContributorCount); + if (Optional.IsDefined(TopContributorCount)) + { + if (TopContributorCount != null) + { + writer.WritePropertyName("topContributorCount"u8); + writer.WriteNumberValue(TopContributorCount.Value); + } + else + { + writer.WriteNull("topContributorCount"); + } + } writer.WritePropertyName("startTime"u8); writer.WriteStringValue(StartTime, "O"); writer.WritePropertyName("endTime"u8); @@ -35,7 +45,7 @@ internal static MultivariateBatchDetectionOptions DeserializeMultivariateBatchDe return null; } string dataSource = default; - int topContributorCount = default; + Optional topContributorCount = default; DateTimeOffset startTime = default; DateTimeOffset endTime = default; foreach (var property in element.EnumerateObject()) @@ -47,6 +57,11 @@ internal static MultivariateBatchDetectionOptions DeserializeMultivariateBatchDe } if (property.NameEquals("topContributorCount"u8)) { + if (property.Value.ValueKind == JsonValueKind.Null) + { + topContributorCount = null; + continue; + } topContributorCount = property.Value.GetInt32(); continue; } @@ -61,7 +76,7 @@ internal static MultivariateBatchDetectionOptions DeserializeMultivariateBatchDe continue; } } - return new MultivariateBatchDetectionOptions(dataSource, topContributorCount, startTime, endTime); + return new MultivariateBatchDetectionOptions(dataSource, Optional.ToNullable(topContributorCount), startTime, endTime); } /// Deserializes the model from a raw response. diff --git a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/MultivariateBatchDetectionOptions.cs b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/MultivariateBatchDetectionOptions.cs index 695a09a2a411f..3767692e1ed8f 100644 --- a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/MultivariateBatchDetectionOptions.cs +++ b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/MultivariateBatchDetectionOptions.cs @@ -21,12 +21,9 @@ public partial class MultivariateBatchDetectionOptions /// Source link to the input data to indicate an accessible Azure Storage URI. /// It either points to an Azure Blob Storage folder or points to a CSV file in /// Azure Blob Storage, based on your data schema selection. The data schema should - /// be exactly the same as those used in the training phase. - /// - /// - /// Number of top contributed - /// variables for one anomalous time stamp in the response. The default is - /// 10. + /// be exactly the same as those used in the training phase. The input data must + /// contain at least slidingWindow entries preceding the start time of the data + /// to be detected. /// /// /// Start date/time of data for detection, which should @@ -37,10 +34,35 @@ public partial class MultivariateBatchDetectionOptions /// be in ISO 8601 format. /// /// is null. - public MultivariateBatchDetectionOptions(string dataSource, int topContributorCount, DateTimeOffset startTime, DateTimeOffset endTime) + public MultivariateBatchDetectionOptions(string dataSource, DateTimeOffset startTime, DateTimeOffset endTime) { Argument.AssertNotNull(dataSource, nameof(dataSource)); + DataSource = dataSource; + StartTime = startTime; + EndTime = endTime; + } + + /// Initializes a new instance of MultivariateBatchDetectionOptions. + /// + /// Source link to the input data to indicate an accessible Azure Storage URI. + /// It either points to an Azure Blob Storage folder or points to a CSV file in + /// Azure Blob Storage, based on your data schema selection. The data schema should + /// be exactly the same as those used in the training phase. The input data must + /// contain at least slidingWindow entries preceding the start time of the data + /// to be detected. + /// + /// Number of top contributed variables for one anomalous time stamp in the response. + /// + /// Start date/time of data for detection, which should + /// be in ISO 8601 format. + /// + /// + /// End date/time of data for detection, which should + /// be in ISO 8601 format. + /// + internal MultivariateBatchDetectionOptions(string dataSource, int? topContributorCount, DateTimeOffset startTime, DateTimeOffset endTime) + { DataSource = dataSource; TopContributorCount = topContributorCount; StartTime = startTime; @@ -51,15 +73,13 @@ public MultivariateBatchDetectionOptions(string dataSource, int topContributorCo /// Source link to the input data to indicate an accessible Azure Storage URI. /// It either points to an Azure Blob Storage folder or points to a CSV file in /// Azure Blob Storage, based on your data schema selection. The data schema should - /// be exactly the same as those used in the training phase. + /// be exactly the same as those used in the training phase. The input data must + /// contain at least slidingWindow entries preceding the start time of the data + /// to be detected. /// public string DataSource { get; set; } - /// - /// Number of top contributed - /// variables for one anomalous time stamp in the response. The default is - /// 10. - /// - public int TopContributorCount { get; set; } + /// Number of top contributed variables for one anomalous time stamp in the response. + public int? TopContributorCount { get; set; } /// /// Start date/time of data for detection, which should /// be in ISO 8601 format. diff --git a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/UnivariateEntireDetectionResult.Serialization.cs b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/UnivariateEntireDetectionResult.Serialization.cs deleted file mode 100644 index 42819aed13efe..0000000000000 --- a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/UnivariateEntireDetectionResult.Serialization.cs +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure; -using Azure.Core; - -namespace Azure.AI.AnomalyDetector -{ - public partial class UnivariateEntireDetectionResult - { - internal static UnivariateEntireDetectionResult DeserializeUnivariateEntireDetectionResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int period = default; - IReadOnlyList expectedValues = default; - IReadOnlyList upperMargins = default; - IReadOnlyList lowerMargins = default; - IReadOnlyList isAnomaly = default; - IReadOnlyList isNegativeAnomaly = default; - IReadOnlyList isPositiveAnomaly = default; - Optional> severity = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("period"u8)) - { - period = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("expectedValues"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetSingle()); - } - expectedValues = array; - continue; - } - if (property.NameEquals("upperMargins"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetSingle()); - } - upperMargins = array; - continue; - } - if (property.NameEquals("lowerMargins"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetSingle()); - } - lowerMargins = array; - continue; - } - if (property.NameEquals("isAnomaly"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetBoolean()); - } - isAnomaly = array; - continue; - } - if (property.NameEquals("isNegativeAnomaly"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetBoolean()); - } - isNegativeAnomaly = array; - continue; - } - if (property.NameEquals("isPositiveAnomaly"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetBoolean()); - } - isPositiveAnomaly = array; - continue; - } - if (property.NameEquals("severity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetSingle()); - } - severity = array; - continue; - } - } - return new UnivariateEntireDetectionResult(period, expectedValues, upperMargins, lowerMargins, isAnomaly, isNegativeAnomaly, isPositiveAnomaly, Optional.ToList(severity)); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static UnivariateEntireDetectionResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content); - return DeserializeUnivariateEntireDetectionResult(document.RootElement); - } - } -} diff --git a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/UnivariateEntireDetectionResult.cs b/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/UnivariateEntireDetectionResult.cs deleted file mode 100644 index 7d382d4e9cbdd..0000000000000 --- a/sdk/anomalydetector/Azure.AI.AnomalyDetector/src/Generated/UnivariateEntireDetectionResult.cs +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; -using Azure.Core; - -namespace Azure.AI.AnomalyDetector -{ - /// Response of the entire anomaly detection. - public partial class UnivariateEntireDetectionResult - { - /// Initializes a new instance of UnivariateEntireDetectionResult. - /// - /// Frequency extracted from the series. Zero means no recurrent pattern has been - /// found. - /// - /// - /// Expected value for each input point. The index of the - /// array is consistent with the input series. - /// - /// - /// Upper margin of each input point. UpperMargin is used to - /// calculate upperBoundary, which is equal to expectedValue + (100 - - /// marginScale)*upperMargin. Anomalies in the response can be filtered by - /// upperBoundary and lowerBoundary. Adjusting the marginScale value can help filter less - /// significant anomalies on the client side. The index of the array is - /// consistent with the input series. - /// - /// - /// Lower margin of each input point. LowerMargin is used to - /// calculate lowerBoundary, which is equal to expectedValue - (100 - - /// marginScale)*lowerMargin. Points between the boundary can be marked as normal - /// ones on the client side. The index of the array is consistent with the input - /// series. - /// - /// - /// Anomaly properties for each input point. True means an - /// anomaly (either negative or positive) has been detected. The index of the array - /// is consistent with the input series. - /// - /// - /// Anomaly status in a negative direction for each input - /// point. True means a negative anomaly has been detected. A negative anomaly - /// means the point is detected as an anomaly and its real value is smaller than - /// the expected one. The index of the array is consistent with the input series. - /// - /// - /// Anomaly status in a positive direction for each input - /// point. True means a positive anomaly has been detected. A positive anomaly - /// means the point is detected as an anomaly and its real value is larger than the - /// expected one. The index of the array is consistent with the input series. - /// - /// , , , , or is null. - internal UnivariateEntireDetectionResult(int period, IEnumerable expectedValues, IEnumerable upperMargins, IEnumerable lowerMargins, IEnumerable isAnomaly, IEnumerable isNegativeAnomaly, IEnumerable isPositiveAnomaly) - { - Argument.AssertNotNull(expectedValues, nameof(expectedValues)); - Argument.AssertNotNull(upperMargins, nameof(upperMargins)); - Argument.AssertNotNull(lowerMargins, nameof(lowerMargins)); - Argument.AssertNotNull(isAnomaly, nameof(isAnomaly)); - Argument.AssertNotNull(isNegativeAnomaly, nameof(isNegativeAnomaly)); - Argument.AssertNotNull(isPositiveAnomaly, nameof(isPositiveAnomaly)); - - Period = period; - ExpectedValues = expectedValues.ToList(); - UpperMargins = upperMargins.ToList(); - LowerMargins = lowerMargins.ToList(); - IsAnomaly = isAnomaly.ToList(); - IsNegativeAnomaly = isNegativeAnomaly.ToList(); - IsPositiveAnomaly = isPositiveAnomaly.ToList(); - Severity = new ChangeTrackingList(); - } - - /// Initializes a new instance of UnivariateEntireDetectionResult. - /// - /// Frequency extracted from the series. Zero means no recurrent pattern has been - /// found. - /// - /// - /// Expected value for each input point. The index of the - /// array is consistent with the input series. - /// - /// - /// Upper margin of each input point. UpperMargin is used to - /// calculate upperBoundary, which is equal to expectedValue + (100 - - /// marginScale)*upperMargin. Anomalies in the response can be filtered by - /// upperBoundary and lowerBoundary. Adjusting the marginScale value can help filter less - /// significant anomalies on the client side. The index of the array is - /// consistent with the input series. - /// - /// - /// Lower margin of each input point. LowerMargin is used to - /// calculate lowerBoundary, which is equal to expectedValue - (100 - - /// marginScale)*lowerMargin. Points between the boundary can be marked as normal - /// ones on the client side. The index of the array is consistent with the input - /// series. - /// - /// - /// Anomaly properties for each input point. True means an - /// anomaly (either negative or positive) has been detected. The index of the array - /// is consistent with the input series. - /// - /// - /// Anomaly status in a negative direction for each input - /// point. True means a negative anomaly has been detected. A negative anomaly - /// means the point is detected as an anomaly and its real value is smaller than - /// the expected one. The index of the array is consistent with the input series. - /// - /// - /// Anomaly status in a positive direction for each input - /// point. True means a positive anomaly has been detected. A positive anomaly - /// means the point is detected as an anomaly and its real value is larger than the - /// expected one. The index of the array is consistent with the input series. - /// - /// - /// Severity score for each input point. The larger the value is, the more - /// severe the anomaly is. For normal points, the severity is always 0. - /// - internal UnivariateEntireDetectionResult(int period, IReadOnlyList expectedValues, IReadOnlyList upperMargins, IReadOnlyList lowerMargins, IReadOnlyList isAnomaly, IReadOnlyList isNegativeAnomaly, IReadOnlyList isPositiveAnomaly, IReadOnlyList severity) - { - Period = period; - ExpectedValues = expectedValues; - UpperMargins = upperMargins; - LowerMargins = lowerMargins; - IsAnomaly = isAnomaly; - IsNegativeAnomaly = isNegativeAnomaly; - IsPositiveAnomaly = isPositiveAnomaly; - Severity = severity; - } - - /// - /// Frequency extracted from the series. Zero means no recurrent pattern has been - /// found. - /// - public int Period { get; } - /// - /// Expected value for each input point. The index of the - /// array is consistent with the input series. - /// - public IReadOnlyList ExpectedValues { get; } - /// - /// Upper margin of each input point. UpperMargin is used to - /// calculate upperBoundary, which is equal to expectedValue + (100 - - /// marginScale)*upperMargin. Anomalies in the response can be filtered by - /// upperBoundary and lowerBoundary. Adjusting the marginScale value can help filter less - /// significant anomalies on the client side. The index of the array is - /// consistent with the input series. - /// - public IReadOnlyList UpperMargins { get; } - /// - /// Lower margin of each input point. LowerMargin is used to - /// calculate lowerBoundary, which is equal to expectedValue - (100 - - /// marginScale)*lowerMargin. Points between the boundary can be marked as normal - /// ones on the client side. The index of the array is consistent with the input - /// series. - /// - public IReadOnlyList LowerMargins { get; } - /// - /// Anomaly properties for each input point. True means an - /// anomaly (either negative or positive) has been detected. The index of the array - /// is consistent with the input series. - /// - public IReadOnlyList IsAnomaly { get; } - /// - /// Anomaly status in a negative direction for each input - /// point. True means a negative anomaly has been detected. A negative anomaly - /// means the point is detected as an anomaly and its real value is smaller than - /// the expected one. The index of the array is consistent with the input series. - /// - public IReadOnlyList IsNegativeAnomaly { get; } - /// - /// Anomaly status in a positive direction for each input - /// point. True means a positive anomaly has been detected. A positive anomaly - /// means the point is detected as an anomaly and its real value is larger than the - /// expected one. The index of the array is consistent with the input series. - /// - public IReadOnlyList IsPositiveAnomaly { get; } - /// - /// Severity score for each input point. The larger the value is, the more - /// severe the anomaly is. For normal points, the severity is always 0. - /// - public IReadOnlyList Severity { get; } - } -} diff --git a/sdk/anomalydetector/Azure.AI.AnomalyDetector/tsp-location.yaml b/sdk/anomalydetector/Azure.AI.AnomalyDetector/tsp-location.yaml index 3dc0fe1959e01..55c39279df459 100644 --- a/sdk/anomalydetector/Azure.AI.AnomalyDetector/tsp-location.yaml +++ b/sdk/anomalydetector/Azure.AI.AnomalyDetector/tsp-location.yaml @@ -1,3 +1,3 @@ +spec-root-dir: /mnt/vss/_work/1/s/azure-rest-api-specs directory: specification/cognitiveservices/AnomalyDetector -commit: fda314573efadf7c360fd83e2c80141ce48a4d7a -repo: Azure/azure-rest-api-specs +