diff --git a/alerting/build.gradle b/alerting/build.gradle index 4eaaaef50..2e1eaba6e 100644 --- a/alerting/build.gradle +++ b/alerting/build.gradle @@ -91,7 +91,7 @@ dependencies { implementation "com.github.seancfoley:ipaddress:5.3.3" testImplementation "org.jetbrains.kotlin:kotlin-test:${kotlin_version}" - testImplementation "org.mockito:mockito-core:4.6.1" + testImplementation "org.mockito:mockito-core:4.7.0" testImplementation "org.opensearch.plugin:reindex-client:${opensearch_version}" } @@ -258,7 +258,7 @@ String bwcRemoteFile = 'https://ci.opensearch.org/ci/dbc/bundle-build/1.1.0/2021 testClusters { "${baseName}$i" { testDistribution = "ARCHIVE" - versions = ["1.1.0", "2.2.0-SNAPSHOT"] + versions = ["1.1.0", "2.4.0-SNAPSHOT"] numberOfNodes = 3 plugin(provider(new Callable(){ @Override diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt index ed3e44b19..2bc433dd4 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt @@ -15,18 +15,9 @@ import org.opensearch.action.delete.DeleteRequest import org.opensearch.action.index.IndexRequest import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse -import org.opensearch.alerting.alerts.AlertError import org.opensearch.alerting.alerts.AlertIndices -import org.opensearch.alerting.model.ActionExecutionResult import org.opensearch.alerting.model.ActionRunResult -import org.opensearch.alerting.model.AggregationResultBucket -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.BucketLevelTrigger -import org.opensearch.alerting.model.DataSources -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.QueryLevelTriggerRunResult -import org.opensearch.alerting.model.Trigger -import org.opensearch.alerting.model.action.AlertCategory import org.opensearch.alerting.opensearchapi.firstFailureOrNull import org.opensearch.alerting.opensearchapi.retry import org.opensearch.alerting.opensearchapi.suspendUntil @@ -43,6 +34,15 @@ import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.commons.alerting.model.ActionExecutionResult +import org.opensearch.commons.alerting.model.AggregationResultBucket +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.Trigger +import org.opensearch.commons.alerting.model.action.AlertCategory import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.RestStatus import org.opensearch.search.builder.SearchSourceBuilder diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index 89ed19c23..d7ce0d32a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -8,36 +8,23 @@ package org.opensearch.alerting import org.opensearch.action.ActionRequest import org.opensearch.action.ActionResponse import org.opensearch.alerting.action.AcknowledgeAlertAction -import org.opensearch.alerting.action.DeleteMonitorAction import org.opensearch.alerting.action.ExecuteMonitorAction -import org.opensearch.alerting.action.GetAlertsAction import org.opensearch.alerting.action.GetDestinationsAction import org.opensearch.alerting.action.GetEmailAccountAction import org.opensearch.alerting.action.GetEmailGroupAction -import org.opensearch.alerting.action.GetFindingsAction import org.opensearch.alerting.action.GetMonitorAction -import org.opensearch.alerting.action.IndexMonitorAction import org.opensearch.alerting.action.SearchEmailAccountAction import org.opensearch.alerting.action.SearchEmailGroupAction import org.opensearch.alerting.action.SearchMonitorAction -import org.opensearch.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder import org.opensearch.alerting.alerts.AlertIndices import org.opensearch.alerting.core.JobSweeper import org.opensearch.alerting.core.ScheduledJobIndices import org.opensearch.alerting.core.action.node.ScheduledJobsStatsAction import org.opensearch.alerting.core.action.node.ScheduledJobsStatsTransportAction -import org.opensearch.alerting.core.model.ClusterMetricsInput -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.core.model.SearchInput import org.opensearch.alerting.core.resthandler.RestScheduledJobStatsHandler import org.opensearch.alerting.core.schedule.JobScheduler import org.opensearch.alerting.core.settings.LegacyOpenDistroScheduledJobSettings import org.opensearch.alerting.core.settings.ScheduledJobSettings -import org.opensearch.alerting.model.BucketLevelTrigger -import org.opensearch.alerting.model.DocumentLevelTrigger -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.model.QueryLevelTrigger import org.opensearch.alerting.resthandler.RestAcknowledgeAlertAction import org.opensearch.alerting.resthandler.RestDeleteMonitorAction import org.opensearch.alerting.resthandler.RestExecuteMonitorAction @@ -84,6 +71,16 @@ import org.opensearch.common.settings.Settings import org.opensearch.common.settings.SettingsFilter import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentParser +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.ClusterMetricsInput +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocumentLevelTrigger +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.QueryLevelTrigger +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.env.Environment import org.opensearch.env.NodeEnvironment import org.opensearch.index.IndexModule @@ -172,19 +169,19 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R override fun getActions(): List> { return listOf( ActionPlugin.ActionHandler(ScheduledJobsStatsAction.INSTANCE, ScheduledJobsStatsTransportAction::class.java), - ActionPlugin.ActionHandler(IndexMonitorAction.INSTANCE, TransportIndexMonitorAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.INDEX_MONITOR_ACTION_TYPE, TransportIndexMonitorAction::class.java), ActionPlugin.ActionHandler(GetMonitorAction.INSTANCE, TransportGetMonitorAction::class.java), ActionPlugin.ActionHandler(ExecuteMonitorAction.INSTANCE, TransportExecuteMonitorAction::class.java), ActionPlugin.ActionHandler(SearchMonitorAction.INSTANCE, TransportSearchMonitorAction::class.java), - ActionPlugin.ActionHandler(DeleteMonitorAction.INSTANCE, TransportDeleteMonitorAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.DELETE_MONITOR_ACTION_TYPE, TransportDeleteMonitorAction::class.java), ActionPlugin.ActionHandler(AcknowledgeAlertAction.INSTANCE, TransportAcknowledgeAlertAction::class.java), ActionPlugin.ActionHandler(GetEmailAccountAction.INSTANCE, TransportGetEmailAccountAction::class.java), ActionPlugin.ActionHandler(SearchEmailAccountAction.INSTANCE, TransportSearchEmailAccountAction::class.java), ActionPlugin.ActionHandler(GetEmailGroupAction.INSTANCE, TransportGetEmailGroupAction::class.java), ActionPlugin.ActionHandler(SearchEmailGroupAction.INSTANCE, TransportSearchEmailGroupAction::class.java), ActionPlugin.ActionHandler(GetDestinationsAction.INSTANCE, TransportGetDestinationsAction::class.java), - ActionPlugin.ActionHandler(GetAlertsAction.INSTANCE, TransportGetAlertsAction::class.java), - ActionPlugin.ActionHandler(GetFindingsAction.INSTANCE, TransportGetFindingsSearchAction::class.java) + ActionPlugin.ActionHandler(AlertingActions.GET_ALERTS_ACTION_TYPE, TransportGetAlertsAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.GET_FINDINGS_ACTION_TYPE, TransportGetFindingsSearchAction::class.java) ) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt index 32856ac53..c96f4ed57 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt @@ -7,15 +7,9 @@ package org.opensearch.alerting import org.apache.logging.log4j.LogManager import org.opensearch.alerting.model.ActionRunResult -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.BucketLevelTrigger import org.opensearch.alerting.model.BucketLevelTriggerRunResult import org.opensearch.alerting.model.InputRunResults -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorRunResult -import org.opensearch.alerting.model.action.AlertCategory -import org.opensearch.alerting.model.action.PerAlertActionScope -import org.opensearch.alerting.model.action.PerExecutionActionScope import org.opensearch.alerting.opensearchapi.InjectorContextElement import org.opensearch.alerting.opensearchapi.withClosableContext import org.opensearch.alerting.script.BucketLevelTriggerExecutionContext @@ -23,6 +17,12 @@ import org.opensearch.alerting.util.defaultToPerExecutionAction import org.opensearch.alerting.util.getActionExecutionPolicy import org.opensearch.alerting.util.getBucketKeysHash import org.opensearch.alerting.util.getCombinedTriggerRunResult +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.action.AlertCategory +import org.opensearch.commons.alerting.model.action.PerAlertActionScope +import org.opensearch.commons.alerting.model.action.PerExecutionActionScope import java.time.Instant object BucketLevelMonitorRunner : MonitorRunner() { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt index 39ce76dd1..fb529f0db 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt @@ -14,20 +14,11 @@ import org.opensearch.action.search.SearchAction import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.WriteRequest -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.model.ActionExecutionResult -import org.opensearch.alerting.model.Alert import org.opensearch.alerting.model.AlertingConfigAccessor.Companion.getMonitorMetadata import org.opensearch.alerting.model.DocumentExecutionContext -import org.opensearch.alerting.model.DocumentLevelTrigger import org.opensearch.alerting.model.DocumentLevelTriggerRunResult -import org.opensearch.alerting.model.Finding import org.opensearch.alerting.model.InputRunResults -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorRunResult -import org.opensearch.alerting.model.action.PerAlertActionScope -import org.opensearch.alerting.opensearchapi.string import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.script.DocumentLevelTriggerExecutionContext import org.opensearch.alerting.util.AlertingException @@ -42,6 +33,15 @@ import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.ActionExecutionResult +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.DocumentLevelTrigger +import org.opensearch.commons.alerting.model.Finding +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.action.PerAlertActionScope +import org.opensearch.commons.alerting.util.string import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.QueryBuilders import org.opensearch.percolator.PercolateQueryBuilderExt diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt index a5916a73c..b7e86ee90 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt @@ -8,10 +8,7 @@ package org.opensearch.alerting import org.apache.logging.log4j.LogManager import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse -import org.opensearch.alerting.core.model.ClusterMetricsInput -import org.opensearch.alerting.core.model.SearchInput import org.opensearch.alerting.model.InputRunResults -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.TriggerAfterKey import org.opensearch.alerting.opensearchapi.convertToMap import org.opensearch.alerting.opensearchapi.suspendUntil @@ -26,6 +23,9 @@ import org.opensearch.common.io.stream.NamedWriteableRegistry import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.ClusterMetricsInput +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.script.Script import org.opensearch.script.ScriptService import org.opensearch.script.ScriptType diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt index fd89acf8f..c7887e466 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt @@ -10,11 +10,8 @@ import org.opensearch.alerting.action.GetDestinationsAction import org.opensearch.alerting.action.GetDestinationsRequest import org.opensearch.alerting.action.GetDestinationsResponse import org.opensearch.alerting.model.ActionRunResult -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorMetadata import org.opensearch.alerting.model.MonitorRunResult -import org.opensearch.alerting.model.Table -import org.opensearch.alerting.model.action.Action import org.opensearch.alerting.model.destination.Destination import org.opensearch.alerting.opensearchapi.InjectorContextElement import org.opensearch.alerting.opensearchapi.suspendUntil @@ -31,6 +28,9 @@ import org.opensearch.alerting.util.isAllowed import org.opensearch.alerting.util.isTestAction import org.opensearch.client.node.NodeClient import org.opensearch.common.Strings +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.Table +import org.opensearch.commons.alerting.model.action.Action import org.opensearch.commons.notifications.model.NotificationConfigInfo import java.time.Instant @@ -57,7 +57,7 @@ abstract class MonitorRunner { } val actionOutput = mutableMapOf() actionOutput[Action.SUBJECT] = if (action.subjectTemplate != null) - MonitorRunnerService.compileTemplate(action.subjectTemplate, ctx) + MonitorRunnerService.compileTemplate(action.subjectTemplate!!, ctx) else "" actionOutput[Action.MESSAGE] = MonitorRunnerService.compileTemplate(action.messageTemplate, ctx) if (Strings.isNullOrEmpty(actionOutput[Action.MESSAGE])) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerService.kt index 7b2ec5116..bbeb7e99f 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerService.kt @@ -15,11 +15,7 @@ import org.opensearch.action.bulk.BackoffPolicy import org.opensearch.alerting.alerts.AlertIndices import org.opensearch.alerting.alerts.moveAlerts import org.opensearch.alerting.core.JobRunner -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorRunResult -import org.opensearch.alerting.model.action.Action import org.opensearch.alerting.model.destination.DestinationContextFactory import org.opensearch.alerting.opensearchapi.retry import org.opensearch.alerting.script.TriggerExecutionContext @@ -33,13 +29,17 @@ import org.opensearch.alerting.settings.DestinationSettings.Companion.ALLOW_LIST import org.opensearch.alerting.settings.DestinationSettings.Companion.HOST_DENY_LIST import org.opensearch.alerting.settings.DestinationSettings.Companion.loadDestinationSettings import org.opensearch.alerting.util.DocLevelMonitorQueries -import org.opensearch.alerting.util.isBucketLevelMonitor import org.opensearch.alerting.util.isDocLevelMonitor import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.component.AbstractLifecycleComponent import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.NamedXContentRegistry +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.commons.alerting.util.isBucketLevelMonitor import org.opensearch.script.Script import org.opensearch.script.ScriptService import org.opensearch.script.TemplateScript @@ -244,7 +244,7 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon // TODO: Remove "AmazonES_all_access" role? monitorCtx.settings!!.getAsList("", listOf("all_access", "AmazonES_all_access")) } else { - monitor.user.roles + monitor.user!!.roles } } @@ -259,7 +259,7 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon if (action.throttleEnabled) { val result = alert.actionExecutionResults.firstOrNull { r -> r.actionId == action.id } val lastExecutionTime: Instant? = result?.lastExecutionTime - val throttledTimeBound = currentTime().minus(action.throttle.value.toLong(), action.throttle.unit) + val throttledTimeBound = currentTime().minus(action.throttle!!.value.toLong(), action.throttle!!.unit) return (lastExecutionTime == null || lastExecutionTime.isBefore(throttledTimeBound)) } return true diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt index 2d9bd1d07..9864d4a9d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt @@ -6,15 +6,15 @@ package org.opensearch.alerting import org.apache.logging.log4j.LogManager -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorRunResult -import org.opensearch.alerting.model.QueryLevelTrigger import org.opensearch.alerting.model.QueryLevelTriggerRunResult import org.opensearch.alerting.opensearchapi.InjectorContextElement import org.opensearch.alerting.opensearchapi.withClosableContext import org.opensearch.alerting.script.QueryLevelTriggerExecutionContext import org.opensearch.alerting.util.isADMonitor +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.QueryLevelTrigger import java.time.Instant object QueryLevelMonitorRunner : MonitorRunner() { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/TriggerService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/TriggerService.kt index 282ebda59..eaf087a63 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/TriggerService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/TriggerService.kt @@ -6,23 +6,23 @@ package org.opensearch.alerting import org.apache.logging.log4j.LogManager -import org.opensearch.alerting.aggregation.bucketselectorext.BucketSelectorIndices.Fields.BUCKET_INDICES -import org.opensearch.alerting.aggregation.bucketselectorext.BucketSelectorIndices.Fields.PARENT_BUCKET_PATH -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.model.AggregationResultBucket -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.BucketLevelTrigger import org.opensearch.alerting.model.BucketLevelTriggerRunResult -import org.opensearch.alerting.model.DocumentLevelTrigger import org.opensearch.alerting.model.DocumentLevelTriggerRunResult -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.model.QueryLevelTrigger import org.opensearch.alerting.model.QueryLevelTriggerRunResult import org.opensearch.alerting.script.BucketLevelTriggerExecutionContext import org.opensearch.alerting.script.QueryLevelTriggerExecutionContext import org.opensearch.alerting.script.TriggerScript import org.opensearch.alerting.triggercondition.parsers.TriggerExpressionParser import org.opensearch.alerting.util.getBucketKeysHash +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorIndices.Fields.BUCKET_INDICES +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorIndices.Fields.PARENT_BUCKET_PATH +import org.opensearch.commons.alerting.model.AggregationResultBucket +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.DocumentLevelTrigger +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.QueryLevelTrigger import org.opensearch.script.Script import org.opensearch.script.ScriptService import org.opensearch.search.aggregations.Aggregation diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/AcknowledgeAlertResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/AcknowledgeAlertResponse.kt index ce8c3a2f0..ad1a990d9 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/AcknowledgeAlertResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/AcknowledgeAlertResponse.kt @@ -6,12 +6,12 @@ package org.opensearch.alerting.action import org.opensearch.action.ActionResponse -import org.opensearch.alerting.model.Alert import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.ToXContentObject import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.commons.alerting.model.Alert import java.io.IOException import java.util.Collections diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/DeleteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/DeleteMonitorAction.kt deleted file mode 100644 index 402b95f41..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/DeleteMonitorAction.kt +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionType -import org.opensearch.action.delete.DeleteResponse - -class DeleteMonitorAction private constructor() : ActionType(NAME, ::DeleteResponse) { - companion object { - val INSTANCE = DeleteMonitorAction() - const val NAME = "cluster:admin/opendistro/alerting/monitor/delete" - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/DeleteMonitorRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/DeleteMonitorRequest.kt deleted file mode 100644 index 6ea08bee2..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/DeleteMonitorRequest.kt +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionRequest -import org.opensearch.action.ActionRequestValidationException -import org.opensearch.action.support.WriteRequest -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import java.io.IOException - -class DeleteMonitorRequest : ActionRequest { - - val monitorId: String - val refreshPolicy: WriteRequest.RefreshPolicy - - constructor(monitorId: String, refreshPolicy: WriteRequest.RefreshPolicy) : super() { - this.monitorId = monitorId - this.refreshPolicy = refreshPolicy - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : super() { - monitorId = sin.readString() - refreshPolicy = WriteRequest.RefreshPolicy.readFrom(sin) - } - - override fun validate(): ActionRequestValidationException? { - return null - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(monitorId) - refreshPolicy.writeTo(out) - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequest.kt index d5c077cb0..f82994fe6 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequest.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequest.kt @@ -7,10 +7,10 @@ package org.opensearch.alerting.action import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException -import org.opensearch.alerting.model.Monitor import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.unit.TimeValue +import org.opensearch.commons.alerting.model.Monitor import java.io.IOException class ExecuteMonitorRequest : ActionRequest { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetAlertsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetAlertsAction.kt deleted file mode 100644 index 649993565..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetAlertsAction.kt +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionType - -class GetAlertsAction private constructor() : ActionType(NAME, ::GetAlertsResponse) { - companion object { - val INSTANCE = GetAlertsAction() - const val NAME = "cluster:admin/opendistro/alerting/alerts/get" - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetAlertsRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetAlertsRequest.kt deleted file mode 100644 index a6f77190b..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetAlertsRequest.kt +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionRequest -import org.opensearch.action.ActionRequestValidationException -import org.opensearch.alerting.model.Table -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import java.io.IOException - -class GetAlertsRequest : ActionRequest { - val table: Table - val severityLevel: String - val alertState: String - val monitorId: String? - val alertIndex: String? - - constructor( - table: Table, - severityLevel: String, - alertState: String, - monitorId: String?, - alertIndex: String?, - ) : super() { - this.table = table - this.severityLevel = severityLevel - this.alertState = alertState - this.monitorId = monitorId - this.alertIndex = alertIndex - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - table = Table.readFrom(sin), - severityLevel = sin.readString(), - alertState = sin.readString(), - monitorId = sin.readOptionalString(), - alertIndex = sin.readOptionalString() - ) - - override fun validate(): ActionRequestValidationException? { - return null - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - table.writeTo(out) - out.writeString(severityLevel) - out.writeString(alertState) - out.writeOptionalString(monitorId) - out.writeOptionalString(alertIndex) - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetAlertsResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetAlertsResponse.kt deleted file mode 100644 index 71dc6565e..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetAlertsResponse.kt +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionResponse -import org.opensearch.alerting.model.Alert -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import java.io.IOException -import java.util.Collections - -class GetAlertsResponse : ActionResponse, ToXContentObject { - val alerts: List - // totalAlerts is not the same as the size of alerts because there can be 30 alerts from the request, but - // the request only asked for 5 alerts, so totalAlerts will be 30, but alerts will only contain 5 alerts - val totalAlerts: Int? - - constructor( - alerts: List, - totalAlerts: Int? - ) : super() { - this.alerts = alerts - this.totalAlerts = totalAlerts - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - alerts = Collections.unmodifiableList(sin.readList(::Alert)), - totalAlerts = sin.readOptionalInt() - ) - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeCollection(alerts) - out.writeOptionalInt(totalAlerts) - } - - @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .field("alerts", alerts) - .field("totalAlerts", totalAlerts) - - return builder.endObject() - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsRequest.kt index 63c3ebd9d..73b30c535 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsRequest.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsRequest.kt @@ -7,9 +7,9 @@ package org.opensearch.alerting.action import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException -import org.opensearch.alerting.model.Table import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput +import org.opensearch.commons.alerting.model.Table import org.opensearch.search.fetch.subphase.FetchSourceContext import java.io.IOException diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountResponse.kt index 0c14b8964..69201f1c0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountResponse.kt @@ -7,15 +7,15 @@ package org.opensearch.alerting.action import org.opensearch.action.ActionResponse import org.opensearch.alerting.model.destination.email.EmailAccount -import org.opensearch.alerting.util._ID -import org.opensearch.alerting.util._PRIMARY_TERM -import org.opensearch.alerting.util._SEQ_NO -import org.opensearch.alerting.util._VERSION import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.ToXContentObject import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.IndexUtils.Companion._PRIMARY_TERM +import org.opensearch.commons.alerting.util.IndexUtils.Companion._SEQ_NO +import org.opensearch.commons.alerting.util.IndexUtils.Companion._VERSION import org.opensearch.rest.RestStatus import java.io.IOException diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupResponse.kt index b41394a39..8c21c6743 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupResponse.kt @@ -7,15 +7,15 @@ package org.opensearch.alerting.action import org.opensearch.action.ActionResponse import org.opensearch.alerting.model.destination.email.EmailGroup -import org.opensearch.alerting.util._ID -import org.opensearch.alerting.util._PRIMARY_TERM -import org.opensearch.alerting.util._SEQ_NO -import org.opensearch.alerting.util._VERSION import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.ToXContentObject import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.IndexUtils.Companion._PRIMARY_TERM +import org.opensearch.commons.alerting.util.IndexUtils.Companion._SEQ_NO +import org.opensearch.commons.alerting.util.IndexUtils.Companion._VERSION import org.opensearch.rest.RestStatus import java.io.IOException diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetFindingsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetFindingsAction.kt deleted file mode 100644 index 03d2be9c9..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetFindingsAction.kt +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionType - -class GetFindingsAction private constructor() : ActionType(NAME, ::GetFindingsResponse) { - companion object { - val INSTANCE = GetFindingsAction() - const val NAME = "cluster:admin/opensearch/alerting/findings/get" - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetFindingsRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetFindingsRequest.kt deleted file mode 100644 index f40954cf7..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetFindingsRequest.kt +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionRequest -import org.opensearch.action.ActionRequestValidationException -import org.opensearch.alerting.model.Table -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import java.io.IOException - -class GetFindingsRequest : ActionRequest { - val findingId: String? - val table: Table - val monitorId: String? - val findingIndex: String? - - constructor( - findingId: String?, - table: Table, - monitorId: String? = null, - findingIndexName: String? = null - ) : super() { - this.findingId = findingId - this.table = table - this.monitorId = monitorId - this.findingIndex = findingIndexName - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - findingId = sin.readOptionalString(), - table = Table.readFrom(sin), - monitorId = sin.readOptionalString(), - findingIndexName = sin.readOptionalString() - ) - - override fun validate(): ActionRequestValidationException? { - return null - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeOptionalString(findingId) - table.writeTo(out) - out.writeOptionalString(monitorId) - out.writeOptionalString(findingIndex) - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetFindingsResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetFindingsResponse.kt deleted file mode 100644 index 66943e318..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetFindingsResponse.kt +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionResponse -import org.opensearch.alerting.model.FindingWithDocs -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.rest.RestStatus -import java.io.IOException - -class GetFindingsResponse : ActionResponse, ToXContentObject { - var status: RestStatus - var totalFindings: Int? - var findings: List - - constructor( - status: RestStatus, - totalFindings: Int?, - findings: List - ) : super() { - this.status = status - this.totalFindings = totalFindings - this.findings = findings - } - - @Throws(IOException::class) - constructor(sin: StreamInput) { - this.status = sin.readEnum(RestStatus::class.java) - val findings = mutableListOf() - this.totalFindings = sin.readOptionalInt() - var currentSize = sin.readInt() - for (i in 0 until currentSize) { - findings.add(FindingWithDocs.readFrom(sin)) - } - this.findings = findings - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeEnum(status) - out.writeOptionalInt(totalFindings) - out.writeInt(findings.size) - for (finding in findings) { - finding.writeTo(out) - } - } - - @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .field("total_findings", totalFindings) - .field("findings", findings) - - return builder.endObject() - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetMonitorResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetMonitorResponse.kt index 819168812..1fc6454be 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetMonitorResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetMonitorResponse.kt @@ -6,16 +6,16 @@ package org.opensearch.alerting.action import org.opensearch.action.ActionResponse -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.util._ID -import org.opensearch.alerting.util._PRIMARY_TERM -import org.opensearch.alerting.util._SEQ_NO -import org.opensearch.alerting.util._VERSION import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.ToXContentObject import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.IndexUtils.Companion._PRIMARY_TERM +import org.opensearch.commons.alerting.util.IndexUtils.Companion._SEQ_NO +import org.opensearch.commons.alerting.util.IndexUtils.Companion._VERSION import org.opensearch.rest.RestStatus import java.io.IOException diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/IndexMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/IndexMonitorAction.kt deleted file mode 100644 index 4c3c8dacb..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/IndexMonitorAction.kt +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionType - -class IndexMonitorAction private constructor() : ActionType(NAME, ::IndexMonitorResponse) { - companion object { - val INSTANCE = IndexMonitorAction() - const val NAME = "cluster:admin/opendistro/alerting/monitor/write" - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/IndexMonitorRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/IndexMonitorRequest.kt deleted file mode 100644 index 1e5b24551..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/IndexMonitorRequest.kt +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionRequest -import org.opensearch.action.ActionRequestValidationException -import org.opensearch.action.support.WriteRequest -import org.opensearch.alerting.model.Monitor -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.rest.RestRequest -import java.io.IOException - -class IndexMonitorRequest : ActionRequest { - val monitorId: String - val seqNo: Long - val primaryTerm: Long - val refreshPolicy: WriteRequest.RefreshPolicy - val method: RestRequest.Method - var monitor: Monitor - - constructor( - monitorId: String, - seqNo: Long, - primaryTerm: Long, - refreshPolicy: WriteRequest.RefreshPolicy, - method: RestRequest.Method, - monitor: Monitor - ) : super() { - this.monitorId = monitorId - this.seqNo = seqNo - this.primaryTerm = primaryTerm - this.refreshPolicy = refreshPolicy - this.method = method - this.monitor = monitor - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - monitorId = sin.readString(), - seqNo = sin.readLong(), - primaryTerm = sin.readLong(), - refreshPolicy = WriteRequest.RefreshPolicy.readFrom(sin), - method = sin.readEnum(RestRequest.Method::class.java), - monitor = Monitor.readFrom(sin) as Monitor - ) - - override fun validate(): ActionRequestValidationException? { - return null - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(monitorId) - out.writeLong(seqNo) - out.writeLong(primaryTerm) - refreshPolicy.writeTo(out) - out.writeEnum(method) - monitor.writeTo(out) - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/IndexMonitorResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/IndexMonitorResponse.kt deleted file mode 100644 index 5990bd680..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/IndexMonitorResponse.kt +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.ActionResponse -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.util._ID -import org.opensearch.alerting.util._PRIMARY_TERM -import org.opensearch.alerting.util._SEQ_NO -import org.opensearch.alerting.util._VERSION -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.rest.RestStatus -import java.io.IOException - -class IndexMonitorResponse : ActionResponse, ToXContentObject { - var id: String - var version: Long - var seqNo: Long - var primaryTerm: Long - var status: RestStatus - var monitor: Monitor - - constructor( - id: String, - version: Long, - seqNo: Long, - primaryTerm: Long, - status: RestStatus, - monitor: Monitor - ) : super() { - this.id = id - this.version = version - this.seqNo = seqNo - this.primaryTerm = primaryTerm - this.status = status - this.monitor = monitor - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readString(), // id - sin.readLong(), // version - sin.readLong(), // seqNo - sin.readLong(), // primaryTerm - sin.readEnum(RestStatus::class.java), // status - Monitor.readFrom(sin) as Monitor // monitor - ) - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(id) - out.writeLong(version) - out.writeLong(seqNo) - out.writeLong(primaryTerm) - out.writeEnum(status) - monitor.writeTo(out) - } - - @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject() - .field(_ID, id) - .field(_VERSION, version) - .field(_SEQ_NO, seqNo) - .field(_PRIMARY_TERM, primaryTerm) - .field("monitor", monitor) - .endObject() - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilder.kt b/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilder.kt deleted file mode 100644 index bb356a4a3..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilder.kt +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.aggregation.bucketselectorext - -import org.opensearch.alerting.aggregation.bucketselectorext.BucketSelectorExtFilter.Companion.BUCKET_SELECTOR_COMPOSITE_AGG_FILTER -import org.opensearch.alerting.aggregation.bucketselectorext.BucketSelectorExtFilter.Companion.BUCKET_SELECTOR_FILTER -import org.opensearch.common.ParseField -import org.opensearch.common.ParsingException -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.ToXContent.Params -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.script.Script -import org.opensearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder -import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy -import org.opensearch.search.aggregations.pipeline.PipelineAggregator -import java.io.IOException -import java.util.Objects - -class BucketSelectorExtAggregationBuilder : - AbstractPipelineAggregationBuilder { - private val bucketsPathsMap: Map - val parentBucketPath: String - val script: Script - val filter: BucketSelectorExtFilter? - private var gapPolicy = GapPolicy.SKIP - - constructor( - name: String, - bucketsPathsMap: Map, - script: Script, - parentBucketPath: String, - filter: BucketSelectorExtFilter? - ) : super(name, NAME.preferredName, listOf(parentBucketPath).toTypedArray()) { - this.bucketsPathsMap = bucketsPathsMap - this.script = script - this.parentBucketPath = parentBucketPath - this.filter = filter - } - - @Throws(IOException::class) - @Suppress("UNCHECKED_CAST") - constructor(sin: StreamInput) : super(sin, NAME.preferredName) { - bucketsPathsMap = sin.readMap() as MutableMap - script = Script(sin) - gapPolicy = GapPolicy.readFrom(sin) - parentBucketPath = sin.readString() - filter = if (sin.readBoolean()) { - BucketSelectorExtFilter(sin) - } else { - null - } - } - - @Throws(IOException::class) - override fun doWriteTo(out: StreamOutput) { - out.writeMap(bucketsPathsMap) - script.writeTo(out) - gapPolicy.writeTo(out) - out.writeString(parentBucketPath) - if (filter != null) { - out.writeBoolean(true) - filter.writeTo(out) - } else { - out.writeBoolean(false) - } - } - - /** - * Sets the gap policy to use for this aggregation. - */ - fun gapPolicy(gapPolicy: GapPolicy?): BucketSelectorExtAggregationBuilder { - requireNotNull(gapPolicy) { "[gapPolicy] must not be null: [$name]" } - this.gapPolicy = gapPolicy - return this - } - - override fun createInternal(metaData: Map?): PipelineAggregator { - return BucketSelectorExtAggregator(name, bucketsPathsMap, parentBucketPath, script, gapPolicy, filter, metaData) - } - - @Throws(IOException::class) - public override fun internalXContent(builder: XContentBuilder, params: Params): XContentBuilder { - builder.field(PipelineAggregator.Parser.BUCKETS_PATH.preferredName, bucketsPathsMap as Map?) - .field(PARENT_BUCKET_PATH.preferredName, parentBucketPath) - .field(Script.SCRIPT_PARSE_FIELD.preferredName, script) - .field(PipelineAggregator.Parser.GAP_POLICY.preferredName, gapPolicy.getName()) - if (filter != null) { - if (filter.isCompositeAggregation) { - builder.startObject(BUCKET_SELECTOR_COMPOSITE_AGG_FILTER.preferredName) - .value(filter) - .endObject() - } else { - builder.startObject(BUCKET_SELECTOR_FILTER.preferredName) - .value(filter) - .endObject() - } - } - return builder - } - - override fun overrideBucketsPath(): Boolean { - return true - } - - override fun validate(context: ValidationContext) { - // Nothing to check - } - - override fun hashCode(): Int { - return Objects.hash(super.hashCode(), bucketsPathsMap, script, gapPolicy) - } - - override fun equals(other: Any?): Boolean { - if (this === other) return true - if (other == null || javaClass != other.javaClass) return false - if (!super.equals(other)) return false - val otherCast = other as BucketSelectorExtAggregationBuilder - return ( - bucketsPathsMap == otherCast.bucketsPathsMap && - script == otherCast.script && - gapPolicy == otherCast.gapPolicy - ) - } - - override fun getWriteableName(): String { - return NAME.preferredName - } - - companion object { - val NAME = ParseField("bucket_selector_ext") - val PARENT_BUCKET_PATH = ParseField("parent_bucket_path") - - @Throws(IOException::class) - fun parse(reducerName: String, parser: XContentParser): BucketSelectorExtAggregationBuilder { - var token: XContentParser.Token - var script: Script? = null - var currentFieldName: String? = null - var bucketsPathsMap: MutableMap? = null - var gapPolicy: GapPolicy? = null - var parentBucketPath: String? = null - var filter: BucketSelectorExtFilter? = null - while (parser.nextToken().also { token = it } !== XContentParser.Token.END_OBJECT) { - if (token === XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName() - } else if (token === XContentParser.Token.VALUE_STRING) { - when { - PipelineAggregator.Parser.BUCKETS_PATH.match(currentFieldName, parser.deprecationHandler) -> { - bucketsPathsMap = HashMap() - bucketsPathsMap["_value"] = parser.text() - } - PipelineAggregator.Parser.GAP_POLICY.match(currentFieldName, parser.deprecationHandler) -> { - gapPolicy = GapPolicy.parse(parser.text(), parser.tokenLocation) - } - Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.deprecationHandler) -> { - script = Script.parse(parser) - } - PARENT_BUCKET_PATH.match(currentFieldName, parser.deprecationHandler) -> { - parentBucketPath = parser.text() - } - else -> { - throw ParsingException( - parser.tokenLocation, - "Unknown key for a $token in [$reducerName]: [$currentFieldName]." - ) - } - } - } else if (token === XContentParser.Token.START_ARRAY) { - if (PipelineAggregator.Parser.BUCKETS_PATH.match(currentFieldName, parser.deprecationHandler)) { - val paths: MutableList = ArrayList() - while (parser.nextToken().also { token = it } !== XContentParser.Token.END_ARRAY) { - val path = parser.text() - paths.add(path) - } - bucketsPathsMap = HashMap() - for (i in paths.indices) { - bucketsPathsMap["_value$i"] = paths[i] - } - } else { - throw ParsingException( - parser.tokenLocation, - "Unknown key for a $token in [$reducerName]: [$currentFieldName]." - ) - } - } else if (token === XContentParser.Token.START_OBJECT) { - when { - Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.deprecationHandler) -> { - script = Script.parse(parser) - } - PipelineAggregator.Parser.BUCKETS_PATH.match(currentFieldName, parser.deprecationHandler) -> { - val map = parser.map() - bucketsPathsMap = HashMap() - for ((key, value) in map) { - bucketsPathsMap[key] = value.toString() - } - } - BUCKET_SELECTOR_FILTER.match(currentFieldName, parser.deprecationHandler) -> { - filter = BucketSelectorExtFilter.parse(reducerName, false, parser) - } - BUCKET_SELECTOR_COMPOSITE_AGG_FILTER.match( - currentFieldName, - parser.deprecationHandler - ) -> { - filter = BucketSelectorExtFilter.parse(reducerName, true, parser) - } - else -> { - throw ParsingException( - parser.tokenLocation, - "Unknown key for a $token in [$reducerName]: [$currentFieldName]." - ) - } - } - } else { - throw ParsingException(parser.tokenLocation, "Unexpected token $token in [$reducerName].") - } - } - if (bucketsPathsMap == null) { - throw ParsingException( - parser.tokenLocation, - "Missing required field [" + PipelineAggregator.Parser.BUCKETS_PATH.preferredName + - "] for bucket_selector aggregation [" + reducerName + "]" - ) - } - if (script == null) { - throw ParsingException( - parser.tokenLocation, - "Missing required field [" + Script.SCRIPT_PARSE_FIELD.preferredName + - "] for bucket_selector aggregation [" + reducerName + "]" - ) - } - - if (parentBucketPath == null) { - throw ParsingException( - parser.tokenLocation, - "Missing required field [" + PARENT_BUCKET_PATH + - "] for bucket_selector aggregation [" + reducerName + "]" - ) - } - val factory = BucketSelectorExtAggregationBuilder(reducerName, bucketsPathsMap, script, parentBucketPath, filter) - if (gapPolicy != null) { - factory.gapPolicy(gapPolicy) - } - return factory - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregator.kt b/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregator.kt deleted file mode 100644 index f121138b7..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregator.kt +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.aggregation.bucketselectorext - -import org.apache.lucene.util.BytesRef -import org.opensearch.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder.Companion.NAME -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.script.BucketAggregationSelectorScript -import org.opensearch.script.Script -import org.opensearch.search.DocValueFormat -import org.opensearch.search.aggregations.Aggregations -import org.opensearch.search.aggregations.InternalAggregation -import org.opensearch.search.aggregations.InternalAggregation.ReduceContext -import org.opensearch.search.aggregations.InternalMultiBucketAggregation -import org.opensearch.search.aggregations.bucket.SingleBucketAggregation -import org.opensearch.search.aggregations.bucket.composite.InternalComposite -import org.opensearch.search.aggregations.bucket.terms.IncludeExclude -import org.opensearch.search.aggregations.pipeline.BucketHelpers -import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy -import org.opensearch.search.aggregations.pipeline.SiblingPipelineAggregator -import org.opensearch.search.aggregations.support.AggregationPath -import java.io.IOException - -class BucketSelectorExtAggregator : SiblingPipelineAggregator { - private var name: String? = null - private var bucketsPathsMap: Map - private var parentBucketPath: String - private var script: Script - private var gapPolicy: GapPolicy - private var bucketSelectorExtFilter: BucketSelectorExtFilter? = null - - constructor( - name: String?, - bucketsPathsMap: Map, - parentBucketPath: String, - script: Script, - gapPolicy: GapPolicy, - filter: BucketSelectorExtFilter?, - metadata: Map? - ) : super(name, bucketsPathsMap.values.toTypedArray(), metadata) { - this.bucketsPathsMap = bucketsPathsMap - this.parentBucketPath = parentBucketPath - this.script = script - this.gapPolicy = gapPolicy - this.bucketSelectorExtFilter = filter - } - - /** - * Read from a stream. - */ - @Suppress("UNCHECKED_CAST") - @Throws(IOException::class) - constructor(sin: StreamInput) : super(sin.readString(), null, null) { - script = Script(sin) - gapPolicy = GapPolicy.readFrom(sin) - bucketsPathsMap = sin.readMap() as Map - parentBucketPath = sin.readString() - if (sin.readBoolean()) { - bucketSelectorExtFilter = BucketSelectorExtFilter(sin) - } else { - bucketSelectorExtFilter = null - } - } - - @Throws(IOException::class) - override fun doWriteTo(out: StreamOutput) { - out.writeString(name) - script.writeTo(out) - gapPolicy.writeTo(out) - out.writeGenericValue(bucketsPathsMap) - out.writeString(parentBucketPath) - if (bucketSelectorExtFilter != null) { - out.writeBoolean(true) - bucketSelectorExtFilter!!.writeTo(out) - } else { - out.writeBoolean(false) - } - } - - override fun getWriteableName(): String { - return NAME.preferredName - } - - override fun doReduce(aggregations: Aggregations, reduceContext: ReduceContext): InternalAggregation { - val parentBucketPathList = AggregationPath.parse(parentBucketPath).pathElementsAsStringList - var subAggregations: Aggregations = aggregations - for (i in 0 until parentBucketPathList.size - 1) { - subAggregations = subAggregations.get(parentBucketPathList[0]).aggregations - } - val originalAgg = subAggregations.get(parentBucketPathList.last()) as InternalMultiBucketAggregation<*, *> - val buckets = originalAgg.buckets - val factory = reduceContext.scriptService().compile(script, BucketAggregationSelectorScript.CONTEXT) - val selectedBucketsIndex: MutableList = ArrayList() - for (i in buckets.indices) { - val bucket = buckets[i] - if (bucketSelectorExtFilter != null) { - var accepted = true - if (bucketSelectorExtFilter!!.isCompositeAggregation) { - val compBucketKeyObj = (bucket as InternalComposite.InternalBucket).key - val filtersMap: HashMap? = bucketSelectorExtFilter!!.filtersMap - for (sourceKey in compBucketKeyObj.keys) { - if (filtersMap != null) { - if (filtersMap.containsKey(sourceKey)) { - val obj = compBucketKeyObj[sourceKey] - accepted = isAccepted(obj!!, filtersMap[sourceKey]) - if (!accepted) break - } else { - accepted = false - break - } - } - } - } else { - accepted = isAccepted(bucket.key, bucketSelectorExtFilter!!.filters) - } - if (!accepted) continue - } - - val vars: MutableMap = HashMap() - if (script.params != null) { - vars.putAll(script.params) - } - for ((varName, bucketsPath) in bucketsPathsMap) { - val value = BucketHelpers.resolveBucketValue(originalAgg, bucket, bucketsPath, gapPolicy) - vars[varName] = value - } - val executableScript = factory.newInstance(vars) - // TODO: can we use one instance of the script for all buckets? it should be stateless? - if (executableScript.execute()) { - selectedBucketsIndex.add(i) - } - } - - return BucketSelectorIndices( - name(), parentBucketPath, selectedBucketsIndex, originalAgg.metadata - ) - } - - private fun isAccepted(obj: Any, filter: IncludeExclude?): Boolean { - return when (obj.javaClass) { - String::class.java -> { - val stringFilter = filter!!.convertToStringFilter(DocValueFormat.RAW) - stringFilter.accept(BytesRef(obj as String)) - } - java.lang.Long::class.java, Long::class.java -> { - val longFilter = filter!!.convertToLongFilter(DocValueFormat.RAW) - longFilter.accept(obj as Long) - } - java.lang.Double::class.java, Double::class.java -> { - val doubleFilter = filter!!.convertToDoubleFilter() - doubleFilter.accept(obj as Long) - } - else -> { - throw IllegalStateException("Object is not comparable. Please use one of String, Long or Double type.") - } - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtFilter.kt b/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtFilter.kt deleted file mode 100644 index c1f7d159d..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtFilter.kt +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.aggregation.bucketselectorext - -import org.opensearch.common.ParseField -import org.opensearch.common.ParsingException -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent.Params -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.search.aggregations.bucket.terms.IncludeExclude -import java.io.IOException - -class BucketSelectorExtFilter : ToXContentObject, Writeable { - // used for composite aggregations - val filtersMap: HashMap? - // used for filtering string term aggregation - val filters: IncludeExclude? - - constructor(filters: IncludeExclude?) { - filtersMap = null - this.filters = filters - } - - constructor(filtersMap: HashMap?) { - this.filtersMap = filtersMap - filters = null - } - - constructor(sin: StreamInput) { - if (sin.readBoolean()) { - val size: Int = sin.readVInt() - filtersMap = java.util.HashMap() - for (i in 0 until size) { - filtersMap[sin.readString()] = IncludeExclude(sin) - } - filters = null - } else { - filters = IncludeExclude(sin) - filtersMap = null - } - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - val isCompAgg = isCompositeAggregation - out.writeBoolean(isCompAgg) - if (isCompAgg) { - out.writeVInt(filtersMap!!.size) - for ((key, value) in filtersMap) { - out.writeString(key) - value.writeTo(out) - } - } else { - filters!!.writeTo(out) - } - } - - @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: Params): XContentBuilder { - if (isCompositeAggregation) { - for ((key, filter) in filtersMap!!) { - builder.startObject(key) - filter.toXContent(builder, params) - builder.endObject() - } - } else { - filters!!.toXContent(builder, params) - } - return builder - } - - val isCompositeAggregation: Boolean - get() = if (filtersMap != null && filters == null) { - true - } else if (filtersMap == null && filters != null) { - false - } else { - throw IllegalStateException("Type of selector cannot be determined") - } - - companion object { - const val NAME = "filter" - var BUCKET_SELECTOR_FILTER = ParseField("filter") - var BUCKET_SELECTOR_COMPOSITE_AGG_FILTER = ParseField("composite_agg_filter") - - @Throws(IOException::class) - fun parse(reducerName: String, isCompositeAggregation: Boolean, parser: XContentParser): BucketSelectorExtFilter { - var token: XContentParser.Token - return if (isCompositeAggregation) { - val filtersMap = HashMap() - while (parser.nextToken().also { token = it } !== XContentParser.Token.END_OBJECT) { - if (token === XContentParser.Token.FIELD_NAME) { - val sourceKey = parser.currentName() - token = parser.nextToken() - filtersMap[sourceKey] = parseIncludeExclude(reducerName, parser) - } else { - throw ParsingException( - parser.tokenLocation, - "Unknown key for a " + token + " in [" + reducerName + "]: [" + parser.currentName() + "]." - ) - } - } - BucketSelectorExtFilter(filtersMap) - } else { - BucketSelectorExtFilter(parseIncludeExclude(reducerName, parser)) - } - } - - @Throws(IOException::class) - private fun parseIncludeExclude(reducerName: String, parser: XContentParser): IncludeExclude { - var token: XContentParser.Token - var include: IncludeExclude? = null - var exclude: IncludeExclude? = null - while (parser.nextToken().also { token = it } !== XContentParser.Token.END_OBJECT) { - val fieldName = parser.currentName() - when { - IncludeExclude.INCLUDE_FIELD.match(fieldName, parser.deprecationHandler) -> { - parser.nextToken() - include = IncludeExclude.parseInclude(parser) - } - IncludeExclude.EXCLUDE_FIELD.match(fieldName, parser.deprecationHandler) -> { - parser.nextToken() - exclude = IncludeExclude.parseExclude(parser) - } - else -> { - throw ParsingException( - parser.tokenLocation, - "Unknown key for a $token in [$reducerName]: [$fieldName]." - ) - } - } - } - return IncludeExclude.merge(include, exclude) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorIndices.kt b/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorIndices.kt deleted file mode 100644 index b0ca6ab15..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorIndices.kt +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.aggregation.bucketselectorext - -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.ToXContent.Params -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.search.aggregations.InternalAggregation -import java.io.IOException -import java.util.Objects - -open class BucketSelectorIndices( - name: String?, - private var parentBucketPath: String, - var bucketIndices: List, - metaData: Map? -) : InternalAggregation(name, metaData) { - - @Throws(IOException::class) - override fun doWriteTo(out: StreamOutput) { - out.writeString(parentBucketPath) - out.writeIntArray(bucketIndices.stream().mapToInt { i: Int? -> i!! }.toArray()) - } - - override fun getWriteableName(): String { - return name - } - - override fun reduce(aggregations: List, reduceContext: ReduceContext): BucketSelectorIndices { - throw UnsupportedOperationException("Not supported") - } - - override fun mustReduceOnSingleInternalAgg(): Boolean { - return false - } - - override fun getProperty(path: MutableList?): Any { - throw UnsupportedOperationException("Not supported") - } - - internal object Fields { - const val PARENT_BUCKET_PATH = "parent_bucket_path" - const val BUCKET_INDICES = "bucket_indices" - } - - @Throws(IOException::class) - override fun doXContentBody(builder: XContentBuilder, params: Params): XContentBuilder { - builder.field(Fields.PARENT_BUCKET_PATH, parentBucketPath) - builder.field(Fields.BUCKET_INDICES, bucketIndices) - otherStatsToXContent(builder) - return builder - } - - @Throws(IOException::class) - protected fun otherStatsToXContent(builder: XContentBuilder): XContentBuilder { - return builder - } - - override fun hashCode(): Int { - return Objects.hash(super.hashCode(), parentBucketPath) - } - - override fun equals(other: Any?): Boolean { - if (this === other) return true - if (other == null || javaClass != other.javaClass) return false - if (!super.equals(other)) return false - val otherCast = other as BucketSelectorIndices - return name == otherCast.name && parentBucketPath == otherCast.parentBucketPath - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertError.kt b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertError.kt deleted file mode 100644 index 72d788684..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertError.kt +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.alerts - -import org.opensearch.alerting.opensearchapi.instant -import org.opensearch.alerting.opensearchapi.optionalTimeField -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException -import java.time.Instant - -data class AlertError(val timestamp: Instant, val message: String) : Writeable, ToXContent { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readInstant(), // timestamp - sin.readString() // message - ) - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeInstant(timestamp) - out.writeString(message) - } - companion object { - - const val TIMESTAMP_FIELD = "timestamp" - const val MESSAGE_FIELD = "message" - - @JvmStatic - @Throws(IOException::class) - fun parse(xcp: XContentParser): AlertError { - - lateinit var timestamp: Instant - lateinit var message: String - - ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - TIMESTAMP_FIELD -> timestamp = requireNotNull(xcp.instant()) - MESSAGE_FIELD -> message = xcp.text() - } - } - return AlertError(timestamp = timestamp, message = message) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): AlertError { - return AlertError(sin) - } - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject() - .optionalTimeField(TIMESTAMP_FIELD, timestamp) - .field(MESSAGE_FIELD, message) - .endObject() - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt index cd7348be2..e813e8d0d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt @@ -23,7 +23,6 @@ import org.opensearch.action.support.IndicesOptions import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.alerting.alerts.AlertIndices.Companion.ALERT_HISTORY_WRITE_INDEX import org.opensearch.alerting.alerts.AlertIndices.Companion.ALERT_INDEX -import org.opensearch.alerting.model.DataSources import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.settings.AlertingSettings.Companion.ALERT_HISTORY_ENABLED @@ -46,6 +45,7 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.DataSources import org.opensearch.threadpool.Scheduler.Cancellable import org.opensearch.threadpool.ThreadPool import java.time.Instant diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertMover.kt b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertMover.kt index a9c704958..100281a87 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertMover.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertMover.kt @@ -13,8 +13,6 @@ import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.alerting.alerts.AlertIndices.Companion.ALERT_HISTORY_WRITE_INDEX import org.opensearch.alerting.alerts.AlertIndices.Companion.ALERT_INDEX -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.client.Client import org.opensearch.common.bytes.BytesReference @@ -25,6 +23,8 @@ import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.Monitor import org.opensearch.index.VersionType import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.RestStatus diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/ActionExecutionResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/ActionExecutionResult.kt deleted file mode 100644 index ecdbd8ea4..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/ActionExecutionResult.kt +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.alerting.opensearchapi.instant -import org.opensearch.alerting.opensearchapi.optionalTimeField -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import java.io.IOException -import java.time.Instant - -/** - * When an alert triggered, the trigger's actions will be executed. - * Action execution result records action throttle result and is a part of Alert. - */ -data class ActionExecutionResult( - val actionId: String, - val lastExecutionTime: Instant?, - val throttledCount: Int = 0 -) : Writeable, ToXContentObject { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readString(), // actionId - sin.readOptionalInstant(), // lastExecutionTime - sin.readInt() // throttledCount - ) - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject() - .field(ACTION_ID_FIELD, actionId) - .optionalTimeField(LAST_EXECUTION_TIME_FIELD, lastExecutionTime) - .field(THROTTLED_COUNT_FIELD, throttledCount) - .endObject() - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(actionId) - out.writeOptionalInstant(lastExecutionTime) - out.writeInt(throttledCount) - } - - companion object { - const val ACTION_ID_FIELD = "action_id" - const val LAST_EXECUTION_TIME_FIELD = "last_execution_time" - const val THROTTLED_COUNT_FIELD = "throttled_count" - - @JvmStatic - @Throws(IOException::class) - fun parse(xcp: XContentParser): ActionExecutionResult { - lateinit var actionId: String - var throttledCount: Int = 0 - var lastExecutionTime: Instant? = null - - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - when (fieldName) { - ACTION_ID_FIELD -> actionId = xcp.text() - THROTTLED_COUNT_FIELD -> throttledCount = xcp.intValue() - LAST_EXECUTION_TIME_FIELD -> lastExecutionTime = xcp.instant() - - else -> { - throw IllegalStateException("Unexpected field: $fieldName, while parsing action") - } - } - } - - requireNotNull(actionId) { "Must set action id" } - return ActionExecutionResult(actionId, lastExecutionTime, throttledCount) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): ActionExecutionResult { - return ActionExecutionResult(sin) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/AggregationResultBucket.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/AggregationResultBucket.kt deleted file mode 100644 index 9ba7f0291..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/AggregationResultBucket.kt +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.common.ParsingException -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException -import java.util.Locale - -data class AggregationResultBucket( - val parentBucketPath: String?, - val bucketKeys: List, - val bucket: Map? // TODO: Should reduce contents to only top-level to not include sub-aggs here -) : Writeable, ToXContentObject { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this(sin.readString(), sin.readStringList(), sin.readMap()) - - override fun writeTo(out: StreamOutput) { - out.writeString(parentBucketPath) - out.writeStringCollection(bucketKeys) - out.writeMap(bucket) - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - innerXContent(builder) - return builder.endObject() - } - - fun innerXContent(builder: XContentBuilder): XContentBuilder { - builder.startObject(CONFIG_NAME) - .field(PARENTS_BUCKET_PATH, parentBucketPath) - .field(BUCKET_KEYS, bucketKeys.toTypedArray()) - .field(BUCKET, bucket) - .endObject() - return builder - } - - companion object { - const val CONFIG_NAME = "agg_alert_content" - const val PARENTS_BUCKET_PATH = "parent_bucket_path" - const val BUCKET_KEYS = "bucket_keys" - private const val BUCKET = "bucket" - - fun parse(xcp: XContentParser): AggregationResultBucket { - var parentBucketPath: String? = null - var bucketKeys = mutableListOf() - var bucket: MutableMap? = null - ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - - if (CONFIG_NAME != xcp.currentName()) { - throw ParsingException( - xcp.tokenLocation, - String.format( - Locale.ROOT, "Failed to parse object: expecting token with name [%s] but found [%s]", - CONFIG_NAME, xcp.currentName() - ) - ) - } - while (xcp.nextToken() != Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - when (fieldName) { - PARENTS_BUCKET_PATH -> parentBucketPath = xcp.text() - BUCKET_KEYS -> { - ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_ARRAY) { - bucketKeys.add(xcp.text()) - } - } - BUCKET -> bucket = xcp.map() - } - } - return AggregationResultBucket(parentBucketPath, bucketKeys, bucket) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/Alert.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/Alert.kt deleted file mode 100644 index 51e617f44..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/Alert.kt +++ /dev/null @@ -1,378 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.alerting.alerts.AlertError -import org.opensearch.alerting.opensearchapi.instant -import org.opensearch.alerting.opensearchapi.optionalTimeField -import org.opensearch.alerting.opensearchapi.optionalUserField -import org.opensearch.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.lucene.uid.Versions -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.authuser.User -import java.io.IOException -import java.time.Instant - -data class Alert( - val id: String = NO_ID, - val version: Long = NO_VERSION, - val schemaVersion: Int = NO_SCHEMA_VERSION, - val monitorId: String, - val monitorName: String, - val monitorVersion: Long, - val monitorUser: User?, - val triggerId: String, - val triggerName: String, - val findingIds: List, - val relatedDocIds: List, - val state: State, - val startTime: Instant, - val endTime: Instant? = null, - val lastNotificationTime: Instant? = null, - val acknowledgedTime: Instant? = null, - val errorMessage: String? = null, - val errorHistory: List, - val severity: String, - val actionExecutionResults: List, - val aggregationResultBucket: AggregationResultBucket? = null -) : Writeable, ToXContent { - - init { - if (errorMessage != null) require(state == State.DELETED || state == State.ERROR) { - "Attempt to create an alert with an error in state: $state" - } - } - - constructor( - monitor: Monitor, - trigger: QueryLevelTrigger, - startTime: Instant, - lastNotificationTime: Instant?, - state: State = State.ACTIVE, - errorMessage: String? = null, - errorHistory: List = mutableListOf(), - actionExecutionResults: List = mutableListOf(), - schemaVersion: Int = NO_SCHEMA_VERSION - ) : this( - monitorId = monitor.id, monitorName = monitor.name, monitorVersion = monitor.version, monitorUser = monitor.user, - triggerId = trigger.id, triggerName = trigger.name, state = state, startTime = startTime, - lastNotificationTime = lastNotificationTime, errorMessage = errorMessage, errorHistory = errorHistory, - severity = trigger.severity, actionExecutionResults = actionExecutionResults, schemaVersion = schemaVersion, - aggregationResultBucket = null, findingIds = emptyList(), relatedDocIds = emptyList() - ) - - constructor( - monitor: Monitor, - trigger: BucketLevelTrigger, - startTime: Instant, - lastNotificationTime: Instant?, - state: State = State.ACTIVE, - errorMessage: String? = null, - errorHistory: List = mutableListOf(), - actionExecutionResults: List = mutableListOf(), - schemaVersion: Int = NO_SCHEMA_VERSION - ) : this( - monitorId = monitor.id, monitorName = monitor.name, monitorVersion = monitor.version, monitorUser = monitor.user, - triggerId = trigger.id, triggerName = trigger.name, state = state, startTime = startTime, - lastNotificationTime = lastNotificationTime, errorMessage = errorMessage, errorHistory = errorHistory, - severity = trigger.severity, actionExecutionResults = actionExecutionResults, schemaVersion = schemaVersion, - aggregationResultBucket = null, findingIds = emptyList(), relatedDocIds = emptyList() - ) - - constructor( - monitor: Monitor, - trigger: BucketLevelTrigger, - startTime: Instant, - lastNotificationTime: Instant?, - state: State = State.ACTIVE, - errorMessage: String? = null, - errorHistory: List = mutableListOf(), - actionExecutionResults: List = mutableListOf(), - schemaVersion: Int = NO_SCHEMA_VERSION, - aggregationResultBucket: AggregationResultBucket - ) : this( - monitorId = monitor.id, monitorName = monitor.name, monitorVersion = monitor.version, monitorUser = monitor.user, - triggerId = trigger.id, triggerName = trigger.name, state = state, startTime = startTime, - lastNotificationTime = lastNotificationTime, errorMessage = errorMessage, errorHistory = errorHistory, - severity = trigger.severity, actionExecutionResults = actionExecutionResults, schemaVersion = schemaVersion, - aggregationResultBucket = aggregationResultBucket, findingIds = emptyList(), relatedDocIds = emptyList() - ) - - constructor( - id: String = NO_ID, - monitor: Monitor, - trigger: DocumentLevelTrigger, - findingIds: List, - relatedDocIds: List, - startTime: Instant, - lastNotificationTime: Instant?, - state: State = State.ACTIVE, - errorMessage: String? = null, - errorHistory: List = mutableListOf(), - actionExecutionResults: List = mutableListOf(), - schemaVersion: Int = NO_SCHEMA_VERSION - ) : this( - id = id, monitorId = monitor.id, monitorName = monitor.name, monitorVersion = monitor.version, monitorUser = monitor.user, - triggerId = trigger.id, triggerName = trigger.name, state = state, startTime = startTime, - lastNotificationTime = lastNotificationTime, errorMessage = errorMessage, errorHistory = errorHistory, - severity = trigger.severity, actionExecutionResults = actionExecutionResults, schemaVersion = schemaVersion, - aggregationResultBucket = null, findingIds = findingIds, relatedDocIds = relatedDocIds - ) - - enum class State { - ACTIVE, ACKNOWLEDGED, COMPLETED, ERROR, DELETED - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - id = sin.readString(), - version = sin.readLong(), - schemaVersion = sin.readInt(), - monitorId = sin.readString(), - monitorName = sin.readString(), - monitorVersion = sin.readLong(), - monitorUser = if (sin.readBoolean()) { - User(sin) - } else null, - triggerId = sin.readString(), - triggerName = sin.readString(), - findingIds = sin.readStringList(), - relatedDocIds = sin.readStringList(), - state = sin.readEnum(State::class.java), - startTime = sin.readInstant(), - endTime = sin.readOptionalInstant(), - lastNotificationTime = sin.readOptionalInstant(), - acknowledgedTime = sin.readOptionalInstant(), - errorMessage = sin.readOptionalString(), - errorHistory = sin.readList(::AlertError), - severity = sin.readString(), - actionExecutionResults = sin.readList(::ActionExecutionResult), - aggregationResultBucket = if (sin.readBoolean()) AggregationResultBucket(sin) else null - ) - - fun isAcknowledged(): Boolean = (state == State.ACKNOWLEDGED) - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(id) - out.writeLong(version) - out.writeInt(schemaVersion) - out.writeString(monitorId) - out.writeString(monitorName) - out.writeLong(monitorVersion) - out.writeBoolean(monitorUser != null) - monitorUser?.writeTo(out) - out.writeString(triggerId) - out.writeString(triggerName) - out.writeStringCollection(findingIds) - out.writeStringCollection(relatedDocIds) - out.writeEnum(state) - out.writeInstant(startTime) - out.writeOptionalInstant(endTime) - out.writeOptionalInstant(lastNotificationTime) - out.writeOptionalInstant(acknowledgedTime) - out.writeOptionalString(errorMessage) - out.writeCollection(errorHistory) - out.writeString(severity) - out.writeCollection(actionExecutionResults) - if (aggregationResultBucket != null) { - out.writeBoolean(true) - aggregationResultBucket.writeTo(out) - } else { - out.writeBoolean(false) - } - } - - companion object { - - const val ALERT_ID_FIELD = "id" - const val SCHEMA_VERSION_FIELD = "schema_version" - const val ALERT_VERSION_FIELD = "version" - const val MONITOR_ID_FIELD = "monitor_id" - const val MONITOR_VERSION_FIELD = "monitor_version" - const val MONITOR_NAME_FIELD = "monitor_name" - const val MONITOR_USER_FIELD = "monitor_user" - const val TRIGGER_ID_FIELD = "trigger_id" - const val TRIGGER_NAME_FIELD = "trigger_name" - const val FINDING_IDS = "finding_ids" - const val RELATED_DOC_IDS = "related_doc_ids" - const val STATE_FIELD = "state" - const val START_TIME_FIELD = "start_time" - const val LAST_NOTIFICATION_TIME_FIELD = "last_notification_time" - const val END_TIME_FIELD = "end_time" - const val ACKNOWLEDGED_TIME_FIELD = "acknowledged_time" - const val ERROR_MESSAGE_FIELD = "error_message" - const val ALERT_HISTORY_FIELD = "alert_history" - const val SEVERITY_FIELD = "severity" - const val ACTION_EXECUTION_RESULTS_FIELD = "action_execution_results" - const val BUCKET_KEYS = AggregationResultBucket.BUCKET_KEYS - const val PARENTS_BUCKET_PATH = AggregationResultBucket.PARENTS_BUCKET_PATH - const val NO_ID = "" - const val NO_VERSION = Versions.NOT_FOUND - - @JvmStatic @JvmOverloads - @Throws(IOException::class) - fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): Alert { - - lateinit var monitorId: String - var schemaVersion = NO_SCHEMA_VERSION - lateinit var monitorName: String - var monitorVersion: Long = Versions.NOT_FOUND - var monitorUser: User? = null - lateinit var triggerId: String - lateinit var triggerName: String - val findingIds = mutableListOf() - val relatedDocIds = mutableListOf() - lateinit var state: State - lateinit var startTime: Instant - lateinit var severity: String - var endTime: Instant? = null - var lastNotificationTime: Instant? = null - var acknowledgedTime: Instant? = null - var errorMessage: String? = null - val errorHistory: MutableList = mutableListOf() - val actionExecutionResults: MutableList = mutableListOf() - var aggAlertBucket: AggregationResultBucket? = null - ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - MONITOR_ID_FIELD -> monitorId = xcp.text() - SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() - MONITOR_NAME_FIELD -> monitorName = xcp.text() - MONITOR_VERSION_FIELD -> monitorVersion = xcp.longValue() - MONITOR_USER_FIELD -> monitorUser = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else User.parse(xcp) - TRIGGER_ID_FIELD -> triggerId = xcp.text() - FINDING_IDS -> { - ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - findingIds.add(xcp.text()) - } - } - RELATED_DOC_IDS -> { - ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - relatedDocIds.add(xcp.text()) - } - } - STATE_FIELD -> state = State.valueOf(xcp.text()) - TRIGGER_NAME_FIELD -> triggerName = xcp.text() - START_TIME_FIELD -> startTime = requireNotNull(xcp.instant()) - END_TIME_FIELD -> endTime = xcp.instant() - LAST_NOTIFICATION_TIME_FIELD -> lastNotificationTime = xcp.instant() - ACKNOWLEDGED_TIME_FIELD -> acknowledgedTime = xcp.instant() - ERROR_MESSAGE_FIELD -> errorMessage = xcp.textOrNull() - ALERT_HISTORY_FIELD -> { - ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - errorHistory.add(AlertError.parse(xcp)) - } - } - SEVERITY_FIELD -> severity = xcp.text() - ACTION_EXECUTION_RESULTS_FIELD -> { - ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - actionExecutionResults.add(ActionExecutionResult.parse(xcp)) - } - } - AggregationResultBucket.CONFIG_NAME -> { - // If an Alert with aggAlertBucket contents is indexed into the alerts index first, then - // that field will be added to the mappings. - // In this case, that field will default to null when it isn't present for Alerts created by Query-Level Monitors - // (even though the toXContent doesn't output the field) so null is being accounted for here. - aggAlertBucket = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { - null - } else { - AggregationResultBucket.parse(xcp) - } - } - } - } - - return Alert( - id = id, version = version, schemaVersion = schemaVersion, monitorId = requireNotNull(monitorId), - monitorName = requireNotNull(monitorName), monitorVersion = monitorVersion, monitorUser = monitorUser, - triggerId = requireNotNull(triggerId), triggerName = requireNotNull(triggerName), - state = requireNotNull(state), startTime = requireNotNull(startTime), endTime = endTime, - lastNotificationTime = lastNotificationTime, acknowledgedTime = acknowledgedTime, - errorMessage = errorMessage, errorHistory = errorHistory, severity = severity, - actionExecutionResults = actionExecutionResults, aggregationResultBucket = aggAlertBucket, findingIds = findingIds, - relatedDocIds = relatedDocIds - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): Alert { - return Alert(sin) - } - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return createXContentBuilder(builder, true) - } - - fun toXContentWithUser(builder: XContentBuilder): XContentBuilder { - return createXContentBuilder(builder, false) - } - private fun createXContentBuilder(builder: XContentBuilder, secure: Boolean): XContentBuilder { - builder.startObject() - .field(ALERT_ID_FIELD, id) - .field(ALERT_VERSION_FIELD, version) - .field(MONITOR_ID_FIELD, monitorId) - .field(SCHEMA_VERSION_FIELD, schemaVersion) - .field(MONITOR_VERSION_FIELD, monitorVersion) - .field(MONITOR_NAME_FIELD, monitorName) - - if (!secure) { - builder.optionalUserField(MONITOR_USER_FIELD, monitorUser) - } - - builder.field(TRIGGER_ID_FIELD, triggerId) - .field(TRIGGER_NAME_FIELD, triggerName) - .field(FINDING_IDS, findingIds.toTypedArray()) - .field(RELATED_DOC_IDS, relatedDocIds.toTypedArray()) - .field(STATE_FIELD, state) - .field(ERROR_MESSAGE_FIELD, errorMessage) - .field(ALERT_HISTORY_FIELD, errorHistory.toTypedArray()) - .field(SEVERITY_FIELD, severity) - .field(ACTION_EXECUTION_RESULTS_FIELD, actionExecutionResults.toTypedArray()) - .optionalTimeField(START_TIME_FIELD, startTime) - .optionalTimeField(LAST_NOTIFICATION_TIME_FIELD, lastNotificationTime) - .optionalTimeField(END_TIME_FIELD, endTime) - .optionalTimeField(ACKNOWLEDGED_TIME_FIELD, acknowledgedTime) - aggregationResultBucket?.innerXContent(builder) - builder.endObject() - return builder - } - - fun asTemplateArg(): Map { - return mapOf( - ACKNOWLEDGED_TIME_FIELD to acknowledgedTime?.toEpochMilli(), - ALERT_ID_FIELD to id, - ALERT_VERSION_FIELD to version, - END_TIME_FIELD to endTime?.toEpochMilli(), - ERROR_MESSAGE_FIELD to errorMessage, - LAST_NOTIFICATION_TIME_FIELD to lastNotificationTime?.toEpochMilli(), - SEVERITY_FIELD to severity, - START_TIME_FIELD to startTime.toEpochMilli(), - STATE_FIELD to state.toString(), - // Converting bucket keys to comma separated String to avoid manipulation in Action mustache templates - BUCKET_KEYS to aggregationResultBucket?.bucketKeys?.joinToString(","), - PARENTS_BUCKET_PATH to aggregationResultBucket?.parentBucketPath, - FINDING_IDS to findingIds.joinToString(","), - RELATED_DOC_IDS to relatedDocIds.joinToString(",") - ) - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt index d1d5411f6..7a96d2a44 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt @@ -9,7 +9,6 @@ import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.withContext import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse -import org.opensearch.alerting.core.model.ScheduledJob import org.opensearch.alerting.model.destination.email.EmailAccount import org.opensearch.alerting.model.destination.email.EmailGroup import org.opensearch.alerting.opensearchapi.suspendUntil @@ -21,6 +20,7 @@ import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.index.IndexNotFoundException /** diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/BucketLevelTrigger.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/BucketLevelTrigger.kt deleted file mode 100644 index a77944a11..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/BucketLevelTrigger.kt +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder -import org.opensearch.alerting.model.Trigger.Companion.ACTIONS_FIELD -import org.opensearch.alerting.model.Trigger.Companion.ID_FIELD -import org.opensearch.alerting.model.Trigger.Companion.NAME_FIELD -import org.opensearch.alerting.model.Trigger.Companion.SEVERITY_FIELD -import org.opensearch.alerting.model.action.Action -import org.opensearch.common.CheckedFunction -import org.opensearch.common.ParseField -import org.opensearch.common.UUIDs -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.NamedXContentRegistry -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException - -/** - * A multi-alert Trigger available with Bucket-Level Monitors that filters aggregation buckets via a pipeline - * aggregator. - */ -data class BucketLevelTrigger( - override val id: String = UUIDs.base64UUID(), - override val name: String, - override val severity: String, - val bucketSelector: BucketSelectorExtAggregationBuilder, - override val actions: List -) : Trigger { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readString(), // id - sin.readString(), // name - sin.readString(), // severity - BucketSelectorExtAggregationBuilder(sin), // condition - sin.readList(::Action) // actions - ) - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .startObject(BUCKET_LEVEL_TRIGGER_FIELD) - .field(ID_FIELD, id) - .field(NAME_FIELD, name) - .field(SEVERITY_FIELD, severity) - .startObject(CONDITION_FIELD) - bucketSelector.internalXContent(builder, params) - builder.endObject() - .field(ACTIONS_FIELD, actions.toTypedArray()) - .endObject() - .endObject() - return builder - } - - override fun name(): String { - return BUCKET_LEVEL_TRIGGER_FIELD - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(id) - out.writeString(name) - out.writeString(severity) - bucketSelector.writeTo(out) - out.writeCollection(actions) - } - - fun asTemplateArg(): Map { - return mapOf( - ID_FIELD to id, - NAME_FIELD to name, - SEVERITY_FIELD to severity, - ACTIONS_FIELD to actions.map { it.asTemplateArg() }, - PARENT_BUCKET_PATH to getParentBucketPath() - ) - } - - fun getParentBucketPath(): String { - return bucketSelector.parentBucketPath - } - - companion object { - const val BUCKET_LEVEL_TRIGGER_FIELD = "bucket_level_trigger" - const val CONDITION_FIELD = "condition" - const val PARENT_BUCKET_PATH = "parentBucketPath" - - val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( - Trigger::class.java, ParseField(BUCKET_LEVEL_TRIGGER_FIELD), - CheckedFunction { parseInner(it) } - ) - - @JvmStatic - @Throws(IOException::class) - fun parseInner(xcp: XContentParser): BucketLevelTrigger { - var id = UUIDs.base64UUID() // assign a default triggerId if one is not specified - lateinit var name: String - lateinit var severity: String - val actions: MutableList = mutableListOf() - ensureExpectedToken(Token.START_OBJECT, xcp.currentToken(), xcp) - lateinit var bucketSelector: BucketSelectorExtAggregationBuilder - - while (xcp.nextToken() != Token.END_OBJECT) { - val fieldName = xcp.currentName() - - xcp.nextToken() - when (fieldName) { - ID_FIELD -> id = xcp.text() - NAME_FIELD -> name = xcp.text() - SEVERITY_FIELD -> severity = xcp.text() - CONDITION_FIELD -> { - // Using the trigger id as the name in the bucket selector since it is validated for uniqueness within Monitors. - // The contents of the trigger definition are round-tripped through parse and toXContent during Monitor creation - // ensuring that the id is available here in the version of the Monitor object that will be executed, even if the - // user submitted a custom trigger id after the condition definition. - bucketSelector = BucketSelectorExtAggregationBuilder.parse(id, xcp) - } - ACTIONS_FIELD -> { - ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_ARRAY) { - actions.add(Action.parse(xcp)) - } - } - } - } - - return BucketLevelTrigger( - id = requireNotNull(id) { "Trigger id is null." }, - name = requireNotNull(name) { "Trigger name is null" }, - severity = requireNotNull(severity) { "Trigger severity is null" }, - bucketSelector = requireNotNull(bucketSelector) { "Trigger condition is null" }, - actions = requireNotNull(actions) { "Trigger actions are null" } - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): BucketLevelTrigger { - return BucketLevelTrigger(sin) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/BucketLevelTriggerRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/BucketLevelTriggerRunResult.kt index 63131d835..b996032ce 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/BucketLevelTriggerRunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/BucketLevelTriggerRunResult.kt @@ -9,6 +9,7 @@ import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.commons.alerting.model.AggregationResultBucket import java.io.IOException data class BucketLevelTriggerRunResult( diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/DataSources.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/DataSources.kt deleted file mode 100644 index bb9f71d46..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/DataSources.kt +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.alerting.alerts.AlertIndices -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import java.io.IOException - -data class DataSources( - /** Configures a custom query index name for the monitor. Creates a new index if index with given name not present.*/ - val queryIndex: String = ScheduledJob.DOC_LEVEL_QUERIES_INDEX, - - /** Configures a custom index to store findings for a monitor. Creates a new index if index with given name not present. - * If index is pre-existing, mapping is updated*/ - val findingsIndex: String = AlertIndices.FINDING_HISTORY_WRITE_INDEX, - - /** Configures a custom index to store alerts for a monitor. Creates a new index if index with given name not present. - * If index is pre-existing, mapping is updated. */ - val alertsIndex: String = AlertIndices.ALERT_INDEX, - - /** Configures custom mappings by field type for query index. - * Custom query index mappings are configurable, only if a custom query index is configured too. */ - val queryIndexMappingsByType: Map> = mapOf() - -) : Writeable, ToXContentObject { - - init { - require(queryIndex.isNotEmpty()) { - "Query index cannot be empty" - } - require(findingsIndex.isNotEmpty()) { - "Findings index cannot be empty" - } - require(alertsIndex.isNotEmpty()) { - "Alerts index cannot be empty" - } - if (queryIndexMappingsByType.isNotEmpty()) { - require(queryIndex != ScheduledJob.DOC_LEVEL_QUERIES_INDEX) { - "Custom query index mappings are configurable only if a custom query index is configured too." - } - require( - queryIndexMappingsByType.size == 1 && - queryIndexMappingsByType.containsKey("text") && - queryIndexMappingsByType.get("text")?.size == 1 && - queryIndexMappingsByType.get("text")!!.containsKey("analyzer") - ) { - "Custom query index mappings are currently configurable only for 'text' fields and mapping parameter can only be 'analyzer'" - } - } - } - - @Throws(IOException::class) - @Suppress("UNCHECKED_CAST") - constructor(sin: StreamInput) : this( - queryIndex = sin.readString(), - findingsIndex = sin.readString(), - alertsIndex = sin.readString(), - queryIndexMappingsByType = sin.readMap() as Map> - ) - - @Suppress("UNCHECKED_CAST") - fun asTemplateArg(): Map { - return mapOf( - QUERY_INDEX_FIELD to queryIndex, - FINDINGS_INDEX_FIELD to findingsIndex, - ALERTS_INDEX_FIELD to alertsIndex, - QUERY_INDEX_MAPPINGS_BY_TYPE to queryIndexMappingsByType - ) as Map - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - builder.field(QUERY_INDEX_FIELD, queryIndex) - builder.field(FINDINGS_INDEX_FIELD, findingsIndex) - builder.field(ALERTS_INDEX_FIELD, alertsIndex) - builder.field(QUERY_INDEX_MAPPINGS_BY_TYPE, queryIndexMappingsByType as Map) - builder.endObject() - return builder - } - - companion object { - const val QUERY_INDEX_FIELD = "query_index" - const val FINDINGS_INDEX_FIELD = "findings_index" - const val ALERTS_INDEX_FIELD = "alerts_index" - const val QUERY_INDEX_MAPPINGS_BY_TYPE = "query_index_mappings_by_type" - - @JvmStatic - @Throws(IOException::class) - @Suppress("UNCHECKED_CAST") - fun parse(xcp: XContentParser): DataSources { - var queryIndex = "" - var findingsIndex = "" - var alertsIndex = "" - var queryIndexMappingsByType: Map> = mapOf() - - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - QUERY_INDEX_FIELD -> queryIndex = xcp.text() - FINDINGS_INDEX_FIELD -> findingsIndex = xcp.text() - ALERTS_INDEX_FIELD -> alertsIndex = xcp.text() - QUERY_INDEX_MAPPINGS_BY_TYPE -> queryIndexMappingsByType = xcp.map() as Map> - } - } - return DataSources( - queryIndex = queryIndex, - findingsIndex = findingsIndex, - alertsIndex = alertsIndex, - queryIndexMappingsByType = queryIndexMappingsByType - ) - } - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(queryIndex) - out.writeString(findingsIndex) - out.writeString(alertsIndex) - out.writeMap(queryIndexMappingsByType as Map) - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/DocumentExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/DocumentExecutionContext.kt index a6acd027a..0caad1f4a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/DocumentExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/DocumentExecutionContext.kt @@ -5,7 +5,7 @@ package org.opensearch.alerting.model -import org.opensearch.alerting.core.model.DocLevelQuery +import org.opensearch.commons.alerting.model.DocLevelQuery data class DocumentExecutionContext( val queries: List, diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/DocumentLevelTrigger.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/DocumentLevelTrigger.kt deleted file mode 100644 index 003d738fa..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/DocumentLevelTrigger.kt +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.alerting.model.Trigger.Companion.ACTIONS_FIELD -import org.opensearch.alerting.model.Trigger.Companion.ID_FIELD -import org.opensearch.alerting.model.Trigger.Companion.NAME_FIELD -import org.opensearch.alerting.model.Trigger.Companion.SEVERITY_FIELD -import org.opensearch.alerting.model.action.Action -import org.opensearch.common.CheckedFunction -import org.opensearch.common.ParseField -import org.opensearch.common.UUIDs -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.NamedXContentRegistry -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.script.Script -import java.io.IOException - -/** - * A single-alert Trigger that uses Painless scripts which execute on the response of the Monitor input query to define - * alerting conditions. - */ -data class DocumentLevelTrigger( - override val id: String = UUIDs.base64UUID(), - override val name: String, - override val severity: String, - override val actions: List, - val condition: Script -) : Trigger { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readString(), // id - sin.readString(), // name - sin.readString(), // severity - sin.readList(::Action), // actions - Script(sin) - ) - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .startObject(DOCUMENT_LEVEL_TRIGGER_FIELD) - .field(ID_FIELD, id) - .field(NAME_FIELD, name) - .field(SEVERITY_FIELD, severity) - .startObject(CONDITION_FIELD) - .field(SCRIPT_FIELD, condition) - .endObject() - .field(ACTIONS_FIELD, actions.toTypedArray()) - .endObject() - .endObject() - return builder - } - - override fun name(): String { - return DOCUMENT_LEVEL_TRIGGER_FIELD - } - - /** Returns a representation of the trigger suitable for passing into painless and mustache scripts. */ - fun asTemplateArg(): Map { - return mapOf( - ID_FIELD to id, - NAME_FIELD to name, - SEVERITY_FIELD to severity, - ACTIONS_FIELD to actions.map { it.asTemplateArg() } - ) - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(id) - out.writeString(name) - out.writeString(severity) - out.writeCollection(actions) - condition.writeTo(out) - } - - companion object { - const val DOCUMENT_LEVEL_TRIGGER_FIELD = "document_level_trigger" - const val CONDITION_FIELD = "condition" - const val SCRIPT_FIELD = "script" - const val QUERY_IDS_FIELD = "query_ids" - - val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( - Trigger::class.java, ParseField(DOCUMENT_LEVEL_TRIGGER_FIELD), - CheckedFunction { parseInner(it) } - ) - - @JvmStatic @Throws(IOException::class) - fun parseInner(xcp: XContentParser): DocumentLevelTrigger { - var id = UUIDs.base64UUID() // assign a default triggerId if one is not specified - lateinit var name: String - lateinit var severity: String - lateinit var condition: Script - val queryIds: MutableList = mutableListOf() - val actions: MutableList = mutableListOf() - - if (xcp.currentToken() != Token.START_OBJECT && xcp.currentToken() != Token.FIELD_NAME) { - XContentParserUtils.throwUnknownToken(xcp.currentToken(), xcp.tokenLocation) - } - - // If the parser began on START_OBJECT, move to the next token so that the while loop enters on - // the fieldName (or END_OBJECT if it's empty). - if (xcp.currentToken() == Token.START_OBJECT) xcp.nextToken() - - while (xcp.currentToken() != Token.END_OBJECT) { - val fieldName = xcp.currentName() - - xcp.nextToken() - when (fieldName) { - ID_FIELD -> id = xcp.text() - NAME_FIELD -> name = xcp.text() - SEVERITY_FIELD -> severity = xcp.text() - CONDITION_FIELD -> { - xcp.nextToken() - condition = Script.parse(xcp) - require(condition.lang == Script.DEFAULT_SCRIPT_LANG) { - "Invalid script language. Allowed languages are [${Script.DEFAULT_SCRIPT_LANG}]" - } - xcp.nextToken() - } - QUERY_IDS_FIELD -> { - ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_ARRAY) { - queryIds.add(xcp.text()) - } - } - ACTIONS_FIELD -> { - ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_ARRAY) { - actions.add(Action.parse(xcp)) - } - } - } - xcp.nextToken() - } - - return DocumentLevelTrigger( - name = requireNotNull(name) { "Trigger name is null" }, - severity = requireNotNull(severity) { "Trigger severity is null" }, - condition = requireNotNull(condition) { "Trigger condition is null" }, - actions = requireNotNull(actions) { "Trigger actions are null" }, - id = requireNotNull(id) { "Trigger id is null." } - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): DocumentLevelTrigger { - return DocumentLevelTrigger(sin) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/Finding.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/Finding.kt deleted file mode 100644 index 1e8a186ff..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/Finding.kt +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.opensearchapi.instant -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException -import java.time.Instant - -/** - * A wrapper of the log event that enriches the event by also including information about the monitor it triggered. - */ -class Finding( - val id: String = NO_ID, - val relatedDocIds: List, - val monitorId: String, - val monitorName: String, - val index: String, - val docLevelQueries: List, - val timestamp: Instant -) : Writeable, ToXContent { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - id = sin.readString(), - relatedDocIds = sin.readStringList(), - monitorId = sin.readString(), - monitorName = sin.readString(), - index = sin.readString(), - docLevelQueries = sin.readList((DocLevelQuery)::readFrom), - timestamp = sin.readInstant() - ) - - fun asTemplateArg(): Map { - return mapOf( - FINDING_ID_FIELD to id, - RELATED_DOC_IDS_FIELD to relatedDocIds, - MONITOR_ID_FIELD to monitorId, - MONITOR_NAME_FIELD to monitorName, - INDEX_FIELD to index, - QUERIES_FIELD to docLevelQueries, - TIMESTAMP_FIELD to timestamp.toEpochMilli() - ) - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .field(FINDING_ID_FIELD, id) - .field(RELATED_DOC_IDS_FIELD, relatedDocIds) - .field(MONITOR_ID_FIELD, monitorId) - .field(MONITOR_NAME_FIELD, monitorName) - .field(INDEX_FIELD, index) - .field(QUERIES_FIELD, docLevelQueries.toTypedArray()) - .field(TIMESTAMP_FIELD, timestamp.toEpochMilli()) - builder.endObject() - return builder - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(id) - out.writeStringCollection(relatedDocIds) - out.writeString(monitorId) - out.writeString(monitorName) - out.writeString(index) - out.writeCollection(docLevelQueries) - out.writeInstant(timestamp) - } - - companion object { - const val FINDING_ID_FIELD = "id" - const val RELATED_DOC_IDS_FIELD = "related_doc_ids" - const val MONITOR_ID_FIELD = "monitor_id" - const val MONITOR_NAME_FIELD = "monitor_name" - const val INDEX_FIELD = "index" - const val QUERIES_FIELD = "queries" - const val TIMESTAMP_FIELD = "timestamp" - const val NO_ID = "" - - @JvmStatic @JvmOverloads - @Throws(IOException::class) - fun parse(xcp: XContentParser): Finding { - var id: String = NO_ID - val relatedDocIds: MutableList = mutableListOf() - lateinit var monitorId: String - lateinit var monitorName: String - lateinit var index: String - val queries: MutableList = mutableListOf() - lateinit var timestamp: Instant - - ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - FINDING_ID_FIELD -> id = xcp.text() - RELATED_DOC_IDS_FIELD -> { - ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - relatedDocIds.add(xcp.text()) - } - } - MONITOR_ID_FIELD -> monitorId = xcp.text() - MONITOR_NAME_FIELD -> monitorName = xcp.text() - INDEX_FIELD -> index = xcp.text() - QUERIES_FIELD -> { - ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - queries.add(DocLevelQuery.parse(xcp)) - } - } - TIMESTAMP_FIELD -> { - timestamp = requireNotNull(xcp.instant()) - } - } - } - - return Finding( - id = id, - relatedDocIds = relatedDocIds, - monitorId = monitorId, - monitorName = monitorName, - index = index, - docLevelQueries = queries, - timestamp = timestamp - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): Finding { - return Finding(sin) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/FindingDocument.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/FindingDocument.kt deleted file mode 100644 index bb6728b35..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/FindingDocument.kt +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.apache.logging.log4j.LogManager -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import java.io.IOException - -private val log = LogManager.getLogger(FindingDocument::class.java) - -class FindingDocument( - val index: String, - val id: String, - val found: Boolean, - val document: String -) : Writeable, ToXContent { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - index = sin.readString(), - id = sin.readString(), - found = sin.readBoolean(), - document = sin.readString() - ) - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject() - .field(INDEX_FIELD, index) - .field(FINDING_DOCUMENT_ID_FIELD, id) - .field(FOUND_FIELD, found) - .field(DOCUMENT_FIELD, document) - .endObject() - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(index) - out.writeString(id) - out.writeBoolean(found) - out.writeString(document) - } - - companion object { - const val INDEX_FIELD = "index" - const val FINDING_DOCUMENT_ID_FIELD = "id" - const val FOUND_FIELD = "found" - const val DOCUMENT_FIELD = "document" - const val NO_ID = "" - const val NO_INDEX = "" - - @JvmStatic @JvmOverloads - @Throws(IOException::class) - fun parse(xcp: XContentParser, id: String = NO_ID, index: String = NO_INDEX): FindingDocument { - var found = false - var document: String = "" - - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - FOUND_FIELD -> found = xcp.booleanValue() - DOCUMENT_FIELD -> document = xcp.text() - } - } - - return FindingDocument( - index = index, - id = id, - found = found, - document = document - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): FindingDocument { - return FindingDocument(sin) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/FindingWithDocs.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/FindingWithDocs.kt deleted file mode 100644 index 5fbcb98ff..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/FindingWithDocs.kt +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.apache.logging.log4j.LogManager -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import java.io.IOException - -private val log = LogManager.getLogger(Finding::class.java) - -class FindingWithDocs( - val finding: Finding, - val documents: List -) : Writeable, ToXContent { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - finding = Finding.readFrom(sin), - documents = sin.readList((FindingDocument)::readFrom) - ) - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - finding.writeTo(out) - documents.forEach { - it.writeTo(out) - } - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .field(FINDING_FIELD, finding) - .field(DOCUMENTS_FIELD, documents) - builder.endObject() - return builder - } - - companion object { - const val FINDING_FIELD = "finding" - const val DOCUMENTS_FIELD = "document_list" - - @JvmStatic - @Throws(IOException::class) - fun parse(xcp: XContentParser): FindingWithDocs { - lateinit var finding: Finding - val documents: MutableList = mutableListOf() - - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - FINDING_FIELD -> finding = Finding.parse(xcp) - DOCUMENTS_FIELD -> { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - documents.add(FindingDocument.parse(xcp)) - } - } - } - } - - return FindingWithDocs( - finding = finding, - documents = documents - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): FindingWithDocs { - return FindingWithDocs(sin) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/Monitor.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/Monitor.kt deleted file mode 100644 index ddb29be2b..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/Monitor.kt +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.alerting.core.model.ClusterMetricsInput -import org.opensearch.alerting.core.model.CronSchedule -import org.opensearch.alerting.core.model.Input -import org.opensearch.alerting.core.model.Schedule -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.core.model.SearchInput -import org.opensearch.alerting.opensearchapi.instant -import org.opensearch.alerting.opensearchapi.optionalTimeField -import org.opensearch.alerting.opensearchapi.optionalUserField -import org.opensearch.alerting.settings.AlertingSettings.Companion.MONITOR_MAX_INPUTS -import org.opensearch.alerting.settings.AlertingSettings.Companion.MONITOR_MAX_TRIGGERS -import org.opensearch.alerting.settings.SupportedClusterMetricsSettings -import org.opensearch.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION -import org.opensearch.alerting.util._ID -import org.opensearch.alerting.util._VERSION -import org.opensearch.alerting.util.isBucketLevelMonitor -import org.opensearch.common.CheckedFunction -import org.opensearch.common.ParseField -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.NamedXContentRegistry -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.authuser.User -import java.io.IOException -import java.time.Instant -import java.util.Locale - -/** - * A value object that represents a Monitor. Monitors are used to periodically execute a source query and check the - * results. - */ -data class Monitor( - override val id: String = NO_ID, - override val version: Long = NO_VERSION, - override val name: String, - override val enabled: Boolean, - override val schedule: Schedule, - override val lastUpdateTime: Instant, - override val enabledTime: Instant?, - // TODO: Check how this behaves during rolling upgrade/multi-version cluster - // Can read/write and parsing break if it's done from an old -> new version of the plugin? - val monitorType: MonitorType, - val user: User?, - val schemaVersion: Int = NO_SCHEMA_VERSION, - val inputs: List, - val triggers: List, - val uiMetadata: Map, - val dataSources: DataSources = DataSources(), - val owner: String? = "alerting" -) : ScheduledJob { - - override val type = MONITOR_TYPE - - init { - // Ensure that trigger ids are unique within a monitor - val triggerIds = mutableSetOf() - triggers.forEach { trigger -> - require(triggerIds.add(trigger.id)) { "Duplicate trigger id: ${trigger.id}. Trigger ids must be unique." } - // Verify Trigger type based on Monitor type - when (monitorType) { - MonitorType.QUERY_LEVEL_MONITOR -> - require(trigger is QueryLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } - MonitorType.BUCKET_LEVEL_MONITOR -> - require(trigger is BucketLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } - MonitorType.CLUSTER_METRICS_MONITOR -> - require(trigger is QueryLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } - MonitorType.DOC_LEVEL_MONITOR -> - require(trigger is DocumentLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } - } - } - if (enabled) { - requireNotNull(enabledTime) - } else { - require(enabledTime == null) - } - require(inputs.size <= MONITOR_MAX_INPUTS) { "Monitors can only have $MONITOR_MAX_INPUTS search input." } - require(triggers.size <= MONITOR_MAX_TRIGGERS) { "Monitors can only support up to $MONITOR_MAX_TRIGGERS triggers." } - if (this.isBucketLevelMonitor()) { - inputs.forEach { input -> - require(input is SearchInput) { "Unsupported input [$input] for Monitor" } - // TODO: Keeping query validation simple for now, only term aggregations have full support for the "group by" on the - // initial release. Should either add tests for other aggregation types or add validation to prevent using them. - require(input.query.aggregations() != null && !input.query.aggregations().aggregatorFactories.isEmpty()) { - "At least one aggregation is required for the input [$input]" - } - } - } - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - id = sin.readString(), - version = sin.readLong(), - name = sin.readString(), - enabled = sin.readBoolean(), - schedule = Schedule.readFrom(sin), - lastUpdateTime = sin.readInstant(), - enabledTime = sin.readOptionalInstant(), - monitorType = sin.readEnum(MonitorType::class.java), - user = if (sin.readBoolean()) { - User(sin) - } else null, - schemaVersion = sin.readInt(), - inputs = sin.readList((Input)::readFrom), - triggers = sin.readList((Trigger)::readFrom), - uiMetadata = suppressWarning(sin.readMap()), - dataSources = if (sin.readBoolean()) { - DataSources(sin) - } else { - DataSources() - }, - owner = sin.readOptionalString() - ) - - // This enum classifies different Monitors - // This is different from 'type' which denotes the Scheduled Job type - enum class MonitorType(val value: String) { - QUERY_LEVEL_MONITOR("query_level_monitor"), - BUCKET_LEVEL_MONITOR("bucket_level_monitor"), - CLUSTER_METRICS_MONITOR("cluster_metrics_monitor"), - DOC_LEVEL_MONITOR("doc_level_monitor"); - - override fun toString(): String { - return value - } - } - - /** Returns a representation of the monitor suitable for passing into painless and mustache scripts. */ - fun asTemplateArg(): Map { - return mapOf(_ID to id, _VERSION to version, NAME_FIELD to name, ENABLED_FIELD to enabled) - } - - fun toXContentWithUser(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return createXContentBuilder(builder, params, false) - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return createXContentBuilder(builder, params, true) - } - - private fun createXContentBuilder(builder: XContentBuilder, params: ToXContent.Params, secure: Boolean): XContentBuilder { - builder.startObject() - if (params.paramAsBoolean("with_type", false)) builder.startObject(type) - builder.field(TYPE_FIELD, type) - .field(SCHEMA_VERSION_FIELD, schemaVersion) - .field(NAME_FIELD, name) - .field(MONITOR_TYPE_FIELD, monitorType) - - if (!secure) { - builder.optionalUserField(USER_FIELD, user) - } - - builder.field(ENABLED_FIELD, enabled) - .optionalTimeField(ENABLED_TIME_FIELD, enabledTime) - .field(SCHEDULE_FIELD, schedule) - .field(INPUTS_FIELD, inputs.toTypedArray()) - .field(TRIGGERS_FIELD, triggers.toTypedArray()) - .optionalTimeField(LAST_UPDATE_TIME_FIELD, lastUpdateTime) - if (uiMetadata.isNotEmpty()) builder.field(UI_METADATA_FIELD, uiMetadata) - builder.field(DATA_SOURCES_FIELD, dataSources) - builder.field(OWNER_FIELD, owner) - if (params.paramAsBoolean("with_type", false)) builder.endObject() - return builder.endObject() - } - - override fun fromDocument(id: String, version: Long): Monitor = copy(id = id, version = version) - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(id) - out.writeLong(version) - out.writeString(name) - out.writeBoolean(enabled) - if (schedule is CronSchedule) { - out.writeEnum(Schedule.TYPE.CRON) - } else { - out.writeEnum(Schedule.TYPE.INTERVAL) - } - schedule.writeTo(out) - out.writeInstant(lastUpdateTime) - out.writeOptionalInstant(enabledTime) - out.writeEnum(monitorType) - out.writeBoolean(user != null) - user?.writeTo(out) - out.writeInt(schemaVersion) - // Outputting type with each Input so that the generic Input.readFrom() can read it - out.writeVInt(inputs.size) - inputs.forEach { - if (it is SearchInput) out.writeEnum(Input.Type.SEARCH_INPUT) - else out.writeEnum(Input.Type.DOCUMENT_LEVEL_INPUT) - it.writeTo(out) - } - // Outputting type with each Trigger so that the generic Trigger.readFrom() can read it - out.writeVInt(triggers.size) - triggers.forEach { - when (it) { - is BucketLevelTrigger -> out.writeEnum(Trigger.Type.BUCKET_LEVEL_TRIGGER) - is DocumentLevelTrigger -> out.writeEnum(Trigger.Type.DOCUMENT_LEVEL_TRIGGER) - else -> out.writeEnum(Trigger.Type.QUERY_LEVEL_TRIGGER) - } - it.writeTo(out) - } - out.writeMap(uiMetadata) - out.writeBoolean(dataSources != null) // for backward compatibility with pre-existing monitors which don't have datasources field - dataSources.writeTo(out) - out.writeOptionalString(owner) - } - - companion object { - const val MONITOR_TYPE = "monitor" - const val TYPE_FIELD = "type" - const val MONITOR_TYPE_FIELD = "monitor_type" - const val SCHEMA_VERSION_FIELD = "schema_version" - const val NAME_FIELD = "name" - const val USER_FIELD = "user" - const val ENABLED_FIELD = "enabled" - const val SCHEDULE_FIELD = "schedule" - const val TRIGGERS_FIELD = "triggers" - const val NO_ID = "" - const val NO_VERSION = 1L - const val INPUTS_FIELD = "inputs" - const val LAST_UPDATE_TIME_FIELD = "last_update_time" - const val UI_METADATA_FIELD = "ui_metadata" - const val DATA_SOURCES_FIELD = "data_sources" - const val OWNER_FIELD = "owner" - const val ENABLED_TIME_FIELD = "enabled_time" - - // This is defined here instead of in ScheduledJob to avoid having the ScheduledJob class know about all - // the different subclasses and creating circular dependencies - val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( - ScheduledJob::class.java, - ParseField(MONITOR_TYPE), - CheckedFunction { parse(it) } - ) - - @JvmStatic - @JvmOverloads - @Throws(IOException::class) - fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): Monitor { - var name: String? = null - // Default to QUERY_LEVEL_MONITOR to cover Monitors that existed before the addition of MonitorType - var monitorType: String = MonitorType.QUERY_LEVEL_MONITOR.toString() - var user: User? = null - var schedule: Schedule? = null - var lastUpdateTime: Instant? = null - var enabledTime: Instant? = null - var uiMetadata: Map = mapOf() - var enabled = true - var schemaVersion = NO_SCHEMA_VERSION - val triggers: MutableList = mutableListOf() - val inputs: MutableList = mutableListOf() - var dataSources = DataSources() - var owner = "alerting" - - ensureExpectedToken(Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() - NAME_FIELD -> name = xcp.text() - MONITOR_TYPE_FIELD -> { - monitorType = xcp.text() - val allowedTypes = MonitorType.values().map { it.value } - if (!allowedTypes.contains(monitorType)) { - throw IllegalStateException("Monitor type should be one of $allowedTypes") - } - } - USER_FIELD -> user = if (xcp.currentToken() == Token.VALUE_NULL) null else User.parse(xcp) - ENABLED_FIELD -> enabled = xcp.booleanValue() - SCHEDULE_FIELD -> schedule = Schedule.parse(xcp) - INPUTS_FIELD -> { - ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_ARRAY) { - val input = Input.parse(xcp) - if (input is ClusterMetricsInput) - SupportedClusterMetricsSettings.validateApiType(input) - inputs.add(input) - } - } - TRIGGERS_FIELD -> { - ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_ARRAY) { - triggers.add(Trigger.parse(xcp)) - } - } - ENABLED_TIME_FIELD -> enabledTime = xcp.instant() - LAST_UPDATE_TIME_FIELD -> lastUpdateTime = xcp.instant() - UI_METADATA_FIELD -> uiMetadata = xcp.map() - DATA_SOURCES_FIELD -> dataSources = if (xcp.currentToken() == Token.VALUE_NULL) DataSources() - else DataSources.parse(xcp) - OWNER_FIELD -> owner = if (xcp.currentToken() == Token.VALUE_NULL) owner - else xcp.text() - else -> { - xcp.skipChildren() - } - } - } - - if (enabled && enabledTime == null) { - enabledTime = Instant.now() - } else if (!enabled) { - enabledTime = null - } - return Monitor( - id, - version, - requireNotNull(name) { "Monitor name is null" }, - enabled, - requireNotNull(schedule) { "Monitor schedule is null" }, - lastUpdateTime ?: Instant.now(), - enabledTime, - MonitorType.valueOf(monitorType.uppercase(Locale.ROOT)), - user, - schemaVersion, - inputs.toList(), - triggers.toList(), - uiMetadata, - dataSources, - owner - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): Monitor? { - return Monitor(sin) - } - - @Suppress("UNCHECKED_CAST") - fun suppressWarning(map: MutableMap?): MutableMap { - return map as MutableMap - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/MonitorMetadata.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/MonitorMetadata.kt index a78972a33..2007139f1 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/MonitorMetadata.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/MonitorMetadata.kt @@ -5,7 +5,6 @@ package org.opensearch.alerting.model -import org.opensearch.alerting.opensearchapi.instant import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.io.stream.Writeable @@ -13,6 +12,8 @@ import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.util.instant import java.io.IOException import java.time.Instant diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/MonitorRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/MonitorRunResult.kt index 7e13f9281..38445a0ec 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/MonitorRunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/MonitorRunResult.kt @@ -7,13 +7,14 @@ package org.opensearch.alerting.model import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchException -import org.opensearch.alerting.alerts.AlertError -import org.opensearch.alerting.opensearchapi.optionalTimeField import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.io.stream.Writeable import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.commons.alerting.model.Trigger +import org.opensearch.commons.alerting.util.optionalTimeField import org.opensearch.script.ScriptException import java.io.IOException import java.time.Instant diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/QueryLevelTrigger.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/QueryLevelTrigger.kt deleted file mode 100644 index d8b00442d..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/QueryLevelTrigger.kt +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.alerting.model.Trigger.Companion.ACTIONS_FIELD -import org.opensearch.alerting.model.Trigger.Companion.ID_FIELD -import org.opensearch.alerting.model.Trigger.Companion.NAME_FIELD -import org.opensearch.alerting.model.Trigger.Companion.SEVERITY_FIELD -import org.opensearch.alerting.model.action.Action -import org.opensearch.common.CheckedFunction -import org.opensearch.common.ParseField -import org.opensearch.common.UUIDs -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.NamedXContentRegistry -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.script.Script -import java.io.IOException - -/** - * A single-alert Trigger that uses Painless scripts which execute on the response of the Monitor input query to define - * alerting conditions. - */ -data class QueryLevelTrigger( - override val id: String = UUIDs.base64UUID(), - override val name: String, - override val severity: String, - override val actions: List, - val condition: Script -) : Trigger { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readString(), // id - sin.readString(), // name - sin.readString(), // severity - sin.readList(::Action), // actions - Script(sin) // condition - ) - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .startObject(QUERY_LEVEL_TRIGGER_FIELD) - .field(ID_FIELD, id) - .field(NAME_FIELD, name) - .field(SEVERITY_FIELD, severity) - .startObject(CONDITION_FIELD) - .field(SCRIPT_FIELD, condition) - .endObject() - .field(ACTIONS_FIELD, actions.toTypedArray()) - .endObject() - .endObject() - return builder - } - - override fun name(): String { - return QUERY_LEVEL_TRIGGER_FIELD - } - - /** Returns a representation of the trigger suitable for passing into painless and mustache scripts. */ - fun asTemplateArg(): Map { - return mapOf( - ID_FIELD to id, NAME_FIELD to name, SEVERITY_FIELD to severity, - ACTIONS_FIELD to actions.map { it.asTemplateArg() } - ) - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(id) - out.writeString(name) - out.writeString(severity) - out.writeCollection(actions) - condition.writeTo(out) - } - - companion object { - const val QUERY_LEVEL_TRIGGER_FIELD = "query_level_trigger" - const val CONDITION_FIELD = "condition" - const val SCRIPT_FIELD = "script" - - val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( - Trigger::class.java, ParseField(QUERY_LEVEL_TRIGGER_FIELD), - CheckedFunction { parseInner(it) } - ) - - /** - * This parse method needs to account for both the old and new Trigger format. - * In the old format, only one Trigger existed (which is now QueryLevelTrigger) and it was - * not a named object. - * - * The parse() method in the Trigger interface needs to consume the outer START_OBJECT to be able - * to infer whether it is dealing with the old or new Trigger format. This means that the currentToken at - * the time this parseInner method is called could differ based on which format is being dealt with. - * - * Old Format - * ---------- - * { - * "id": ..., - * ^ - * Current token starts here - * "name" ..., - * ... - * } - * - * New Format - * ---------- - * { - * "query_level_trigger": { - * "id": ..., ^ Current token starts here - * "name": ..., - * ... - * } - * } - * - * It isn't typically conventional but this parse method will account for both START_OBJECT - * and FIELD_NAME as the starting token to cover both cases. - */ - @JvmStatic @Throws(IOException::class) - fun parseInner(xcp: XContentParser): QueryLevelTrigger { - var id = UUIDs.base64UUID() // assign a default triggerId if one is not specified - lateinit var name: String - lateinit var severity: String - lateinit var condition: Script - val actions: MutableList = mutableListOf() - - if (xcp.currentToken() != Token.START_OBJECT && xcp.currentToken() != Token.FIELD_NAME) { - XContentParserUtils.throwUnknownToken(xcp.currentToken(), xcp.tokenLocation) - } - - // If the parser began on START_OBJECT, move to the next token so that the while loop enters on - // the fieldName (or END_OBJECT if it's empty). - if (xcp.currentToken() == Token.START_OBJECT) xcp.nextToken() - - while (xcp.currentToken() != Token.END_OBJECT) { - val fieldName = xcp.currentName() - - xcp.nextToken() - when (fieldName) { - ID_FIELD -> id = xcp.text() - NAME_FIELD -> name = xcp.text() - SEVERITY_FIELD -> severity = xcp.text() - CONDITION_FIELD -> { - xcp.nextToken() - condition = Script.parse(xcp) - require(condition.lang == Script.DEFAULT_SCRIPT_LANG) { - "Invalid script language. Allowed languages are [${Script.DEFAULT_SCRIPT_LANG}]" - } - xcp.nextToken() - } - ACTIONS_FIELD -> { - ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_ARRAY) { - actions.add(Action.parse(xcp)) - } - } - } - xcp.nextToken() - } - - return QueryLevelTrigger( - name = requireNotNull(name) { "Trigger name is null" }, - severity = requireNotNull(severity) { "Trigger severity is null" }, - condition = requireNotNull(condition) { "Trigger condition is null" }, - actions = requireNotNull(actions) { "Trigger actions are null" }, - id = requireNotNull(id) { "Trigger id is null." } - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): QueryLevelTrigger { - return QueryLevelTrigger(sin) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/QueryLevelTriggerRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/QueryLevelTriggerRunResult.kt index 190df4f3b..a4a90c003 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/QueryLevelTriggerRunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/QueryLevelTriggerRunResult.kt @@ -5,11 +5,11 @@ package org.opensearch.alerting.model -import org.opensearch.alerting.alerts.AlertError import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.commons.alerting.alerts.AlertError import org.opensearch.script.ScriptException import java.io.IOException import java.time.Instant diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/Table.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/Table.kt deleted file mode 100644 index 0a9ff0e4f..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/Table.kt +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import java.io.IOException - -data class Table( - val sortOrder: String, - val sortString: String, - val missing: String?, - val size: Int, - val startIndex: Int, - val searchString: String? -) : Writeable { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sortOrder = sin.readString(), - sortString = sin.readString(), - missing = sin.readOptionalString(), - size = sin.readInt(), - startIndex = sin.readInt(), - searchString = sin.readOptionalString() - ) - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(sortOrder) - out.writeString(sortString) - out.writeOptionalString(missing) - out.writeInt(size) - out.writeInt(startIndex) - out.writeOptionalString(searchString) - } - - companion object { - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): Table { - return Table(sin) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/Trigger.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/Trigger.kt deleted file mode 100644 index e3a9b12ab..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/Trigger.kt +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.alerting.core.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX -import org.opensearch.alerting.model.action.Action -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException - -interface Trigger : Writeable, ToXContentObject { - - enum class Type(val value: String) { - DOCUMENT_LEVEL_TRIGGER(DocumentLevelTrigger.DOCUMENT_LEVEL_TRIGGER_FIELD), - QUERY_LEVEL_TRIGGER(QueryLevelTrigger.QUERY_LEVEL_TRIGGER_FIELD), - BUCKET_LEVEL_TRIGGER(BucketLevelTrigger.BUCKET_LEVEL_TRIGGER_FIELD); - - override fun toString(): String { - return value - } - } - - companion object { - const val ID_FIELD = "id" - const val NAME_FIELD = "name" - const val SEVERITY_FIELD = "severity" - const val ACTIONS_FIELD = "actions" - - @Throws(IOException::class) - fun parse(xcp: XContentParser): Trigger { - val trigger: Trigger - - ensureExpectedToken(Token.START_OBJECT, xcp.currentToken(), xcp) - ensureExpectedToken(Token.FIELD_NAME, xcp.nextToken(), xcp) - val triggerTypeNames = Type.values().map { it.toString() } - if (triggerTypeNames.contains(xcp.currentName())) { - ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) - trigger = xcp.namedObject(Trigger::class.java, xcp.currentName(), null) - ensureExpectedToken(Token.END_OBJECT, xcp.nextToken(), xcp) - } else { - // Infer the old Trigger (now called QueryLevelTrigger) when it is not defined as a named - // object to remain backwards compatible when parsing the old format - trigger = QueryLevelTrigger.parseInner(xcp) - ensureExpectedToken(Token.END_OBJECT, xcp.currentToken(), xcp) - } - return trigger - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): Trigger { - return when (val type = sin.readEnum(Trigger.Type::class.java)) { - Type.QUERY_LEVEL_TRIGGER -> QueryLevelTrigger(sin) - Type.BUCKET_LEVEL_TRIGGER -> BucketLevelTrigger(sin) - Type.DOCUMENT_LEVEL_TRIGGER -> DocumentLevelTrigger(sin) - // This shouldn't be reachable but ensuring exhaustiveness as Kotlin warns - // enum can be null in Java - else -> throw IllegalStateException("Unexpected input [$type] when reading Trigger") - } - } - } - - /** The id of the Trigger in the [SCHEDULED_JOBS_INDEX] */ - val id: String - - /** The name of the Trigger */ - val name: String - - /** The severity of the Trigger, used to classify the subsequent Alert */ - val severity: String - - /** The actions executed if the Trigger condition evaluates to true */ - val actions: List - - fun name(): String -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/TriggerRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/TriggerRunResult.kt index 0522076f9..137414dba 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/TriggerRunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/TriggerRunResult.kt @@ -5,11 +5,11 @@ package org.opensearch.alerting.model -import org.opensearch.alerting.alerts.AlertError import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.io.stream.Writeable import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.commons.alerting.alerts.AlertError import java.io.IOException import java.time.Instant diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/action/Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/action/Action.kt deleted file mode 100644 index 6e38a2b31..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/action/Action.kt +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model.action - -import org.opensearch.common.UUIDs -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import org.opensearch.script.Script -import java.io.IOException - -/** - * This class holds the data and parser logic for Action which is part of a trigger - */ -data class Action( - val name: String, - val destinationId: String, - val subjectTemplate: Script?, - val messageTemplate: Script, - val throttleEnabled: Boolean, - val throttle: Throttle?, - val id: String = UUIDs.base64UUID(), - val actionExecutionPolicy: ActionExecutionPolicy? = null -) : Writeable, ToXContentObject { - - init { - if (subjectTemplate != null) { - require(subjectTemplate.lang == MUSTACHE) { "subject_template must be a mustache script" } - } - require(messageTemplate.lang == MUSTACHE) { "message_template must be a mustache script" } - - if (actionExecutionPolicy?.actionExecutionScope is PerExecutionActionScope) { - require(throttle == null) { "Throttle is currently not supported for per execution action scope" } - } - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readString(), // name - sin.readString(), // destinationId - sin.readOptionalWriteable(::Script), // subjectTemplate - Script(sin), // messageTemplate - sin.readBoolean(), // throttleEnabled - sin.readOptionalWriteable(::Throttle), // throttle - sin.readString(), // id - sin.readOptionalWriteable(::ActionExecutionPolicy) // actionExecutionPolicy - ) - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - val xContentBuilder = builder.startObject() - .field(ID_FIELD, id) - .field(NAME_FIELD, name) - .field(DESTINATION_ID_FIELD, destinationId) - .field(MESSAGE_TEMPLATE_FIELD, messageTemplate) - .field(THROTTLE_ENABLED_FIELD, throttleEnabled) - if (subjectTemplate != null) { - xContentBuilder.field(SUBJECT_TEMPLATE_FIELD, subjectTemplate) - } - if (throttle != null) { - xContentBuilder.field(THROTTLE_FIELD, throttle) - } - if (actionExecutionPolicy != null) { - xContentBuilder.field(ACTION_EXECUTION_POLICY_FIELD, actionExecutionPolicy) - } - return xContentBuilder.endObject() - } - - fun asTemplateArg(): Map { - return mapOf(NAME_FIELD to name) - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(name) - out.writeString(destinationId) - if (subjectTemplate != null) { - out.writeBoolean(true) - subjectTemplate.writeTo(out) - } else { - out.writeBoolean(false) - } - messageTemplate.writeTo(out) - out.writeBoolean(throttleEnabled) - if (throttle != null) { - out.writeBoolean(true) - throttle.writeTo(out) - } else { - out.writeBoolean(false) - } - out.writeString(id) - if (actionExecutionPolicy != null) { - out.writeBoolean(true) - actionExecutionPolicy.writeTo(out) - } else { - out.writeBoolean(false) - } - } - - companion object { - const val ID_FIELD = "id" - const val NAME_FIELD = "name" - const val DESTINATION_ID_FIELD = "destination_id" - const val SUBJECT_TEMPLATE_FIELD = "subject_template" - const val MESSAGE_TEMPLATE_FIELD = "message_template" - const val THROTTLE_ENABLED_FIELD = "throttle_enabled" - const val THROTTLE_FIELD = "throttle" - const val ACTION_EXECUTION_POLICY_FIELD = "action_execution_policy" - const val MUSTACHE = "mustache" - const val SUBJECT = "subject" - const val MESSAGE = "message" - const val MESSAGE_ID = "messageId" - - @JvmStatic - @Throws(IOException::class) - fun parse(xcp: XContentParser): Action { - var id = UUIDs.base64UUID() // assign a default action id if one is not specified - lateinit var name: String - lateinit var destinationId: String - var subjectTemplate: Script? = null // subject template could be null for some destinations - lateinit var messageTemplate: Script - var throttleEnabled = false - var throttle: Throttle? = null - var actionExecutionPolicy: ActionExecutionPolicy? = null - - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - when (fieldName) { - ID_FIELD -> id = xcp.text() - NAME_FIELD -> name = xcp.textOrNull() - DESTINATION_ID_FIELD -> destinationId = xcp.textOrNull() - SUBJECT_TEMPLATE_FIELD -> { - subjectTemplate = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else - Script.parse(xcp, Script.DEFAULT_TEMPLATE_LANG) - } - MESSAGE_TEMPLATE_FIELD -> messageTemplate = Script.parse(xcp, Script.DEFAULT_TEMPLATE_LANG) - THROTTLE_FIELD -> { - throttle = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else Throttle.parse(xcp) - } - THROTTLE_ENABLED_FIELD -> { - throttleEnabled = xcp.booleanValue() - } - ACTION_EXECUTION_POLICY_FIELD -> { - actionExecutionPolicy = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { - null - } else { - ActionExecutionPolicy.parse(xcp) - } - } - else -> { - throw IllegalStateException("Unexpected field: $fieldName, while parsing action") - } - } - } - - if (throttleEnabled) { - requireNotNull(throttle, { "Action throttle enabled but not set throttle value" }) - } - - return Action( - requireNotNull(name) { "Action name is null" }, - requireNotNull(destinationId) { "Destination id is null" }, - subjectTemplate, - requireNotNull(messageTemplate) { "Action message template is null" }, - throttleEnabled, - throttle, - id = requireNotNull(id), - actionExecutionPolicy = actionExecutionPolicy - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): Action { - return Action(sin) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/action/ActionExecutionPolicy.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/action/ActionExecutionPolicy.kt deleted file mode 100644 index 809f06624..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/action/ActionExecutionPolicy.kt +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model.action - -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException - -/** - * This class represents the container for various configurations which control Action behavior. - */ -data class ActionExecutionPolicy( - val actionExecutionScope: ActionExecutionScope -) : Writeable, ToXContentObject { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this ( - ActionExecutionScope.readFrom(sin) // actionExecutionScope - ) - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .field(ACTION_EXECUTION_SCOPE, actionExecutionScope) - return builder.endObject() - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - if (actionExecutionScope is PerAlertActionScope) { - out.writeEnum(ActionExecutionScope.Type.PER_ALERT) - } else { - out.writeEnum(ActionExecutionScope.Type.PER_EXECUTION) - } - actionExecutionScope.writeTo(out) - } - - companion object { - const val ACTION_EXECUTION_SCOPE = "action_execution_scope" - - @JvmStatic - @Throws(IOException::class) - fun parse(xcp: XContentParser): ActionExecutionPolicy { - lateinit var actionExecutionScope: ActionExecutionScope - - ensureExpectedToken(Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - ACTION_EXECUTION_SCOPE -> actionExecutionScope = ActionExecutionScope.parse(xcp) - } - } - - return ActionExecutionPolicy( - requireNotNull(actionExecutionScope) { "Action execution scope is null" } - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): ActionExecutionPolicy { - return ActionExecutionPolicy(sin) - } - - /** - * The default [ActionExecutionPolicy] configuration for Bucket-Level Monitors. - * - * If Query-Level Monitors integrate the use of [ActionExecutionPolicy] then a separate default configuration - * will need to be made depending on the desired behavior. - */ - fun getDefaultConfigurationForBucketLevelMonitor(): ActionExecutionPolicy { - val defaultActionExecutionScope = PerAlertActionScope( - actionableAlerts = setOf(AlertCategory.DEDUPED, AlertCategory.NEW) - ) - return ActionExecutionPolicy(actionExecutionScope = defaultActionExecutionScope) - } - - /** - * The default [ActionExecutionPolicy] configuration for Document-Level Monitors. - * - * If Query-Level Monitors integrate the use of [ActionExecutionPolicy] then a separate default configuration - * will need to be made depending on the desired behavior. - */ - fun getDefaultConfigurationForDocumentLevelMonitor(): ActionExecutionPolicy { - val defaultActionExecutionScope = PerAlertActionScope( - actionableAlerts = setOf(AlertCategory.DEDUPED, AlertCategory.NEW) - ) - return ActionExecutionPolicy(actionExecutionScope = defaultActionExecutionScope) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/action/ActionExecutionScope.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/action/ActionExecutionScope.kt deleted file mode 100644 index 2e0256520..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/action/ActionExecutionScope.kt +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model.action - -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException -import java.lang.IllegalArgumentException - -/** - * This class represents configurations used to control the scope of Action executions when Alerts are created. - */ -sealed class ActionExecutionScope : Writeable, ToXContentObject { - - enum class Type { PER_ALERT, PER_EXECUTION } - - companion object { - const val PER_ALERT_FIELD = "per_alert" - const val PER_EXECUTION_FIELD = "per_execution" - const val ACTIONABLE_ALERTS_FIELD = "actionable_alerts" - - @JvmStatic - @Throws(IOException::class) - fun parse(xcp: XContentParser): ActionExecutionScope { - var type: Type? = null - var actionExecutionScope: ActionExecutionScope? = null - val alertFilter = mutableSetOf() - - ensureExpectedToken(Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - // If the type field has already been set, the user has provided more than one type of schedule - if (type != null) { - throw IllegalArgumentException("You can only specify one type of action execution scope.") - } - - when (fieldName) { - PER_ALERT_FIELD -> { - type = Type.PER_ALERT - while (xcp.nextToken() != Token.END_OBJECT) { - val perAlertFieldName = xcp.currentName() - xcp.nextToken() - when (perAlertFieldName) { - ACTIONABLE_ALERTS_FIELD -> { - ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) - val allowedCategories = AlertCategory.values().map { it.toString() } - while (xcp.nextToken() != Token.END_ARRAY) { - val alertCategory = xcp.text() - if (!allowedCategories.contains(alertCategory)) { - throw IllegalStateException("Actionable alerts should be one of $allowedCategories") - } - alertFilter.add(AlertCategory.valueOf(alertCategory)) - } - } - else -> throw IllegalArgumentException( - "Invalid field [$perAlertFieldName] found in per alert action execution scope." - ) - } - } - } - PER_EXECUTION_FIELD -> { - type = Type.PER_EXECUTION - while (xcp.nextToken() != Token.END_OBJECT) {} - } - else -> throw IllegalArgumentException("Invalid field [$fieldName] found in action execution scope.") - } - } - - if (type == Type.PER_ALERT) { - actionExecutionScope = PerAlertActionScope(alertFilter) - } else if (type == Type.PER_EXECUTION) { - actionExecutionScope = PerExecutionActionScope() - } - - return requireNotNull(actionExecutionScope) { "Action execution scope is null." } - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): ActionExecutionScope { - val type = sin.readEnum(ActionExecutionScope.Type::class.java) - return if (type == Type.PER_ALERT) { - PerAlertActionScope(sin) - } else { - PerExecutionActionScope(sin) - } - } - } - - abstract fun getExecutionScope(): Type -} - -data class PerAlertActionScope( - val actionableAlerts: Set -) : ActionExecutionScope() { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readSet { si -> si.readEnum(AlertCategory::class.java) } // alertFilter - ) - - override fun getExecutionScope(): Type = Type.PER_ALERT - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .startObject(PER_ALERT_FIELD) - .field(ACTIONABLE_ALERTS_FIELD, actionableAlerts.toTypedArray()) - .endObject() - return builder.endObject() - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeCollection(actionableAlerts) { o, v -> o.writeEnum(v) } - } - - companion object { - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): PerAlertActionScope { - return PerAlertActionScope(sin) - } - } -} - -class PerExecutionActionScope() : ActionExecutionScope() { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this() - - override fun hashCode(): Int { - return javaClass.hashCode() - } - - // Creating an equals method that just checks class type rather than reference since this is currently stateless. - // Otherwise, it would have been a dataclass which would have handled this. - override fun equals(other: Any?): Boolean { - if (this === other) return true - if (other?.javaClass != javaClass) return false - return true - } - - override fun getExecutionScope(): Type = Type.PER_EXECUTION - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .startObject(PER_EXECUTION_FIELD) - .endObject() - return builder.endObject() - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) {} - - companion object { - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): PerExecutionActionScope { - return PerExecutionActionScope(sin) - } - } -} - -enum class AlertCategory { DEDUPED, NEW, COMPLETED } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/action/Throttle.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/action/Throttle.kt deleted file mode 100644 index 177345b44..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/action/Throttle.kt +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model.action - -import org.apache.commons.codec.binary.StringUtils -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import java.io.IOException -import java.time.temporal.ChronoUnit -import java.util.Locale - -data class Throttle( - val value: Int, - val unit: ChronoUnit -) : Writeable, ToXContentObject { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this ( - sin.readInt(), // value - sin.readEnum(ChronoUnit::class.java) // unit - ) - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject() - .field(VALUE_FIELD, value) - .field(UNIT_FIELD, unit.name) - .endObject() - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeInt(value) - out.writeEnum(unit) - } - - companion object { - const val VALUE_FIELD = "value" - const val UNIT_FIELD = "unit" - - @JvmStatic - @Throws(IOException::class) - fun parse(xcp: XContentParser): Throttle { - var value: Int = 0 - var unit: ChronoUnit = ChronoUnit.MINUTES // only support MINUTES throttle unit currently - - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - when (fieldName) { - UNIT_FIELD -> { - val unitString = xcp.text().uppercase(Locale.ROOT) - require(StringUtils.equals(unitString, ChronoUnit.MINUTES.name), { "Only support MINUTES throttle unit currently" }) - unit = ChronoUnit.valueOf(unitString) - } - VALUE_FIELD -> { - val currentToken = xcp.currentToken() - require(currentToken != XContentParser.Token.VALUE_NULL, { "Throttle value can't be null" }) - when { - currentToken.isValue -> { - value = xcp.intValue() - require(value > 0, { "Can only set positive throttle period" }) - } - else -> { - XContentParserUtils.throwUnknownToken(currentToken, xcp.tokenLocation) - } - } - } - - else -> { - throw IllegalStateException("Unexpected field: $fieldName, while parsing action") - } - } - } - return Throttle(value = value, unit = requireNotNull(unit)) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): Throttle { - return Throttle(sin) - } - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Destination.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Destination.kt index ce0987514..043c4bcf1 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Destination.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Destination.kt @@ -8,11 +8,7 @@ package org.opensearch.alerting.model.destination import org.apache.logging.log4j.LogManager import org.opensearch.alerting.model.destination.email.Email import org.opensearch.alerting.opensearchapi.convertToMap -import org.opensearch.alerting.opensearchapi.instant -import org.opensearch.alerting.opensearchapi.optionalTimeField -import org.opensearch.alerting.opensearchapi.optionalUserField import org.opensearch.alerting.util.DestinationType -import org.opensearch.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION import org.opensearch.alerting.util.destinationmigration.DestinationConversionUtils.Companion.convertAlertingToNotificationMethodType import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput @@ -20,6 +16,10 @@ import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken +import org.opensearch.commons.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.commons.alerting.util.optionalUserField import org.opensearch.commons.authuser.User import org.opensearch.commons.destination.message.LegacyBaseMessage import org.opensearch.commons.destination.message.LegacyChimeMessage diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailAccount.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailAccount.kt index 47d6509e9..968c483fe 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailAccount.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailAccount.kt @@ -5,7 +5,6 @@ package org.opensearch.alerting.model.destination.email -import org.opensearch.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION import org.opensearch.alerting.util.isValidEmail import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput @@ -16,6 +15,7 @@ import org.opensearch.common.xcontent.XContentBuilder import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParser.Token import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken +import org.opensearch.commons.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION import java.io.IOException /** diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailGroup.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailGroup.kt index f0a547842..2f4790b54 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailGroup.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailGroup.kt @@ -5,7 +5,6 @@ package org.opensearch.alerting.model.destination.email -import org.opensearch.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION import org.opensearch.alerting.util.isValidEmail import org.opensearch.common.Strings import org.opensearch.common.io.stream.StreamInput @@ -16,6 +15,7 @@ import org.opensearch.common.xcontent.XContentBuilder import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParser.Token import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken +import org.opensearch.commons.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION import java.io.IOException /** diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteMonitorAction.kt index 70c81f121..bf6e63f68 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteMonitorAction.kt @@ -8,11 +8,10 @@ import org.apache.logging.log4j.LogManager import org.apache.logging.log4j.Logger import org.opensearch.action.support.WriteRequest.RefreshPolicy import org.opensearch.alerting.AlertingPlugin -import org.opensearch.alerting.action.DeleteMonitorAction -import org.opensearch.alerting.action.DeleteMonitorRequest -import org.opensearch.alerting.model.Alert import org.opensearch.alerting.util.REFRESH import org.opensearch.client.node.NodeClient +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteMonitorRequest import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BaseRestHandler.RestChannelConsumer import org.opensearch.rest.RestHandler.ReplacedRoute @@ -60,7 +59,7 @@ class RestDeleteMonitorAction : BaseRestHandler() { val deleteMonitorRequest = DeleteMonitorRequest(monitorId, refreshPolicy) return RestChannelConsumer { channel -> - client.execute(DeleteMonitorAction.INSTANCE, deleteMonitorRequest, RestToXContentListener(channel)) + client.execute(AlertingActions.DELETE_MONITOR_ACTION_TYPE, deleteMonitorRequest, RestToXContentListener(channel)) } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteMonitorAction.kt index ee90a5cdf..2096e1ba8 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteMonitorAction.kt @@ -9,11 +9,11 @@ import org.apache.logging.log4j.LogManager import org.opensearch.alerting.AlertingPlugin import org.opensearch.alerting.action.ExecuteMonitorAction import org.opensearch.alerting.action.ExecuteMonitorRequest -import org.opensearch.alerting.model.Monitor import org.opensearch.client.node.NodeClient import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.XContentParser.Token.START_OBJECT import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken +import org.opensearch.commons.alerting.model.Monitor import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BaseRestHandler.RestChannelConsumer import org.opensearch.rest.RestHandler.ReplacedRoute diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetAlertsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetAlertsAction.kt index 3503a8e86..4ba14d59a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetAlertsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetAlertsAction.kt @@ -7,10 +7,10 @@ package org.opensearch.alerting.resthandler import org.apache.logging.log4j.LogManager import org.opensearch.alerting.AlertingPlugin -import org.opensearch.alerting.action.GetAlertsAction -import org.opensearch.alerting.action.GetAlertsRequest -import org.opensearch.alerting.model.Table import org.opensearch.client.node.NodeClient +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetAlertsRequest +import org.opensearch.commons.alerting.model.Table import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BaseRestHandler.RestChannelConsumer import org.opensearch.rest.RestHandler.ReplacedRoute @@ -69,7 +69,7 @@ class RestGetAlertsAction : BaseRestHandler() { val getAlertsRequest = GetAlertsRequest(table, severityLevel, alertState, monitorId, null) return RestChannelConsumer { channel -> - client.execute(GetAlertsAction.INSTANCE, getAlertsRequest, RestToXContentListener(channel)) + client.execute(AlertingActions.GET_ALERTS_ACTION_TYPE, getAlertsRequest, RestToXContentListener(channel)) } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetDestinationsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetDestinationsAction.kt index d6ee82f71..7e5e1530f 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetDestinationsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetDestinationsAction.kt @@ -9,9 +9,9 @@ import org.apache.logging.log4j.LogManager import org.opensearch.alerting.AlertingPlugin import org.opensearch.alerting.action.GetDestinationsAction import org.opensearch.alerting.action.GetDestinationsRequest -import org.opensearch.alerting.model.Table import org.opensearch.alerting.util.context import org.opensearch.client.node.NodeClient +import org.opensearch.commons.alerting.model.Table import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BaseRestHandler.RestChannelConsumer import org.opensearch.rest.RestHandler.ReplacedRoute diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetFindingsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetFindingsAction.kt index 69b65d142..75607a701 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetFindingsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetFindingsAction.kt @@ -7,10 +7,10 @@ package org.opensearch.alerting.resthandler import org.apache.logging.log4j.LogManager import org.opensearch.alerting.AlertingPlugin -import org.opensearch.alerting.action.GetFindingsAction -import org.opensearch.alerting.action.GetFindingsRequest -import org.opensearch.alerting.model.Table import org.opensearch.client.node.NodeClient +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetFindingsRequest +import org.opensearch.commons.alerting.model.Table import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BaseRestHandler.RestChannelConsumer import org.opensearch.rest.RestHandler.Route @@ -61,7 +61,7 @@ class RestGetFindingsAction : BaseRestHandler() { ) return RestChannelConsumer { channel -> - client.execute(GetFindingsAction.INSTANCE, getFindingsSearchRequest, RestToXContentListener(channel)) + client.execute(AlertingActions.GET_FINDINGS_ACTION_TYPE, getFindingsSearchRequest, RestToXContentListener(channel)) } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexMonitorAction.kt index d3730e475..7925dff0a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexMonitorAction.kt @@ -7,15 +7,7 @@ package org.opensearch.alerting.resthandler import org.apache.logging.log4j.LogManager import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.AlertingPlugin -import org.opensearch.alerting.action.IndexMonitorAction -import org.opensearch.alerting.action.IndexMonitorRequest -import org.opensearch.alerting.action.IndexMonitorResponse import org.opensearch.alerting.alerts.AlertIndices -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.model.BucketLevelTrigger -import org.opensearch.alerting.model.DocumentLevelTrigger -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.model.QueryLevelTrigger import org.opensearch.alerting.util.IF_PRIMARY_TERM import org.opensearch.alerting.util.IF_SEQ_NO import org.opensearch.alerting.util.REFRESH @@ -23,6 +15,14 @@ import org.opensearch.client.node.NodeClient import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentParser.Token import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.IndexMonitorRequest +import org.opensearch.commons.alerting.action.IndexMonitorResponse +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.DocumentLevelTrigger +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.QueryLevelTrigger +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BaseRestHandler.RestChannelConsumer @@ -85,7 +85,6 @@ class RestIndexMonitorAction : BaseRestHandler() { ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) val monitor = Monitor.parse(xcp, id).copy(lastUpdateTime = Instant.now()) validateDataSources(monitor) - validateOwner(monitor.owner) val monitorType = monitor.monitorType val triggers = monitor.triggers when (monitorType) { @@ -121,13 +120,7 @@ class RestIndexMonitorAction : BaseRestHandler() { val indexMonitorRequest = IndexMonitorRequest(id, seqNo, primaryTerm, refreshPolicy, request.method(), monitor) return RestChannelConsumer { channel -> - client.execute(IndexMonitorAction.INSTANCE, indexMonitorRequest, indexMonitorResponse(channel, request.method())) - } - } - - private fun validateOwner(owner: String?) { - if (owner != "alerting") { - throw IllegalArgumentException("Invalid owner field") + client.execute(AlertingActions.INDEX_MONITOR_ACTION_TYPE, indexMonitorRequest, indexMonitorResponse(channel, request.method())) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailAccountAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailAccountAction.kt index f979c4c50..de8314f8a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailAccountAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailAccountAction.kt @@ -9,7 +9,6 @@ import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.alerting.AlertingPlugin import org.opensearch.alerting.action.SearchEmailAccountAction -import org.opensearch.alerting.core.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.alerting.model.destination.email.EmailAccount import org.opensearch.alerting.util.context import org.opensearch.client.node.NodeClient @@ -18,6 +17,7 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS import org.opensearch.common.xcontent.XContentFactory.jsonBuilder import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BytesRestResponse diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailGroupAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailGroupAction.kt index f6fc16f85..cc206b2ee 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailGroupAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailGroupAction.kt @@ -9,7 +9,6 @@ import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.alerting.AlertingPlugin import org.opensearch.alerting.action.SearchEmailGroupAction -import org.opensearch.alerting.core.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.alerting.model.destination.email.EmailGroup import org.opensearch.alerting.util.context import org.opensearch.client.node.NodeClient @@ -18,6 +17,7 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BytesRestResponse diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchMonitorAction.kt index 3f775ef1b..064748455 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchMonitorAction.kt @@ -12,8 +12,6 @@ import org.opensearch.alerting.AlertingPlugin import org.opensearch.alerting.action.SearchMonitorAction import org.opensearch.alerting.action.SearchMonitorRequest import org.opensearch.alerting.alerts.AlertIndices.Companion.ALL_ALERT_INDEX_PATTERN -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.core.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.context import org.opensearch.client.node.NodeClient @@ -24,6 +22,10 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS import org.opensearch.common.xcontent.XContentFactory.jsonBuilder import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX +import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BaseRestHandler.RestChannelConsumer import org.opensearch.rest.BytesRestResponse @@ -95,6 +97,14 @@ class RestSearchMonitorAction( searchSourceBuilder.parseXContent(request.contentOrSourceParamParser()) searchSourceBuilder.fetchSource(context(request)) + val queryBuilder = QueryBuilders.boolQuery().must(searchSourceBuilder.query()) + if (index == SCHEDULED_JOBS_INDEX) { + queryBuilder.filter(QueryBuilders.existsQuery(Monitor.MONITOR_TYPE)) + } + + searchSourceBuilder.query(queryBuilder) + .seqNoAndPrimaryTerm(true) + .version(true) val searchRequest = SearchRequest() .source(searchSourceBuilder) .indices(index) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/BucketLevelTriggerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/BucketLevelTriggerExecutionContext.kt index 065e0ab80..72518ed48 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/BucketLevelTriggerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/BucketLevelTriggerExecutionContext.kt @@ -5,11 +5,11 @@ package org.opensearch.alerting.script -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.BucketLevelTrigger import org.opensearch.alerting.model.BucketLevelTriggerRunResult -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorRunResult +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.Monitor import java.time.Instant data class BucketLevelTriggerExecutionContext( diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/DocumentLevelTriggerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/DocumentLevelTriggerExecutionContext.kt index 67938387e..66de731f6 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/DocumentLevelTriggerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/DocumentLevelTriggerExecutionContext.kt @@ -5,9 +5,9 @@ package org.opensearch.alerting.script -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.DocumentLevelTrigger -import org.opensearch.alerting.model.Monitor +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.DocumentLevelTrigger +import org.opensearch.commons.alerting.model.Monitor import java.time.Instant data class DocumentLevelTriggerExecutionContext( diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/QueryLevelTriggerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/QueryLevelTriggerExecutionContext.kt index 6a492c316..2c7b53097 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/QueryLevelTriggerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/QueryLevelTriggerExecutionContext.kt @@ -5,11 +5,11 @@ package org.opensearch.alerting.script -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorRunResult -import org.opensearch.alerting.model.QueryLevelTrigger import org.opensearch.alerting.model.QueryLevelTriggerRunResult +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.QueryLevelTrigger import java.time.Instant data class QueryLevelTriggerExecutionContext( diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerExecutionContext.kt index f00033525..7ad1bfc86 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerExecutionContext.kt @@ -5,9 +5,9 @@ package org.opensearch.alerting.script -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorRunResult -import org.opensearch.alerting.model.Trigger +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.Trigger import java.time.Instant abstract class TriggerExecutionContext( diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/settings/AlertingSettings.kt b/alerting/src/main/kotlin/org/opensearch/alerting/settings/AlertingSettings.kt index 1268703c9..e23d44c5b 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/settings/AlertingSettings.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/settings/AlertingSettings.kt @@ -16,9 +16,6 @@ import java.util.concurrent.TimeUnit class AlertingSettings { companion object { - - const val MONITOR_MAX_INPUTS = 1 - const val MONITOR_MAX_TRIGGERS = 10 const val DEFAULT_MAX_ACTIONABLE_ALERT_COUNT = 50L val ALERTING_MAX_MONITORS = Setting.intSetting( diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroAlertingSettings.kt b/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroAlertingSettings.kt index 387b6cec9..84c000150 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroAlertingSettings.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroAlertingSettings.kt @@ -17,9 +17,6 @@ class LegacyOpenDistroAlertingSettings { companion object { - const val MONITOR_MAX_INPUTS = 1 - const val MONITOR_MAX_TRIGGERS = 10 - val ALERTING_MAX_MONITORS = Setting.intSetting( "opendistro.alerting.monitor.max_monitors", 1000, diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/settings/SupportedClusterMetricsSettings.kt b/alerting/src/main/kotlin/org/opensearch/alerting/settings/SupportedClusterMetricsSettings.kt index fe568aee7..c2e7b27a0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/settings/SupportedClusterMetricsSettings.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/settings/SupportedClusterMetricsSettings.kt @@ -14,15 +14,16 @@ import org.opensearch.action.admin.cluster.state.ClusterStateRequest import org.opensearch.action.admin.cluster.stats.ClusterStatsRequest import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksRequest import org.opensearch.action.admin.indices.recovery.RecoveryRequest -import org.opensearch.alerting.core.model.ClusterMetricsInput -import org.opensearch.alerting.core.model.ClusterMetricsInput.ClusterMetricType import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.commons.alerting.model.ClusterMetricsInput +import org.opensearch.commons.alerting.model.ClusterMetricsInput.ClusterMetricType +import org.opensearch.commons.alerting.util.IndexUtils.Companion.supportedClusterMetricsSettings /** * A class that supports storing a unique set of API paths that can be accessed by general users. */ -class SupportedClusterMetricsSettings { +class SupportedClusterMetricsSettings : org.opensearch.commons.alerting.settings.SupportedClusterMetricsSettings { companion object { const val RESOURCE_FILE = "supported_json_payloads.json" @@ -131,9 +132,17 @@ class SupportedClusterMetricsSettings { * @param clusterMetricsInput The [ClusterMetricsInput] to validate. * @throws IllegalArgumentException when supportedApiList does not contain the provided path. */ - fun validateApiType(clusterMetricsInput: ClusterMetricsInput) { + fun validateApiTyped(clusterMetricsInput: ClusterMetricsInput) { if (!supportedApiList.keys.contains(clusterMetricsInput.clusterMetricType.defaultPath)) throw IllegalArgumentException("API path not in supportedApiList.") } } + + constructor() { + supportedClusterMetricsSettings = this + } + + override fun validateApiType(clusterMetricsInput: ClusterMetricsInput) { + validateApiTyped(clusterMetricsInput) + } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeAlertAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeAlertAction.kt index e0e4be1d9..a0e91cbc2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeAlertAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeAlertAction.kt @@ -23,8 +23,6 @@ import org.opensearch.alerting.action.AcknowledgeAlertAction import org.opensearch.alerting.action.AcknowledgeAlertRequest import org.opensearch.alerting.action.AcknowledgeAlertResponse import org.opensearch.alerting.alerts.AlertIndices -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.opensearchapi.optionalTimeField import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.AlertingException @@ -39,6 +37,8 @@ import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.util.optionalTimeField import org.opensearch.index.query.QueryBuilders import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.tasks.Task diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt index 4f7ed1e45..7fc344a6c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt @@ -8,16 +8,13 @@ package org.opensearch.alerting.transport import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchStatusException import org.opensearch.action.ActionListener +import org.opensearch.action.ActionRequest import org.opensearch.action.delete.DeleteRequest import org.opensearch.action.delete.DeleteResponse import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction -import org.opensearch.alerting.action.DeleteMonitorAction -import org.opensearch.alerting.action.DeleteMonitorRequest -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.AlertingException import org.opensearch.client.Client @@ -28,7 +25,13 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteMonitorRequest +import org.opensearch.commons.alerting.action.DeleteMonitorResponse +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.authuser.User +import org.opensearch.commons.utils.recreateObject import org.opensearch.index.query.QueryBuilders import org.opensearch.index.reindex.BulkByScrollResponse import org.opensearch.index.reindex.DeleteByQueryAction @@ -47,8 +50,8 @@ class TransportDeleteMonitorAction @Inject constructor( val clusterService: ClusterService, settings: Settings, val xContentRegistry: NamedXContentRegistry -) : HandledTransportAction( - DeleteMonitorAction.NAME, transportService, actionFilters, ::DeleteMonitorRequest +) : HandledTransportAction( + AlertingActions.DELETE_MONITOR_ACTION_NAME, transportService, actionFilters, ::DeleteMonitorRequest ), SecureTransportAction { @@ -58,22 +61,24 @@ class TransportDeleteMonitorAction @Inject constructor( listenFilterBySettingChange(clusterService) } - override fun doExecute(task: Task, request: DeleteMonitorRequest, actionListener: ActionListener) { + override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { + val transformedRequest = request as? DeleteMonitorRequest + ?: recreateObject(request) { DeleteMonitorRequest(it) } val user = readUserFromThreadContext(client) - val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, request.monitorId) - .setRefreshPolicy(request.refreshPolicy) + val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, transformedRequest.monitorId) + .setRefreshPolicy(transformedRequest.refreshPolicy) if (!validateUserBackendRoles(user, actionListener)) { return } client.threadPool().threadContext.stashContext().use { - DeleteMonitorHandler(client, actionListener, deleteRequest, user, request.monitorId).resolveUserAndStart() + DeleteMonitorHandler(client, actionListener, deleteRequest, user, transformedRequest.monitorId).resolveUserAndStart() } } inner class DeleteMonitorHandler( private val client: Client, - private val actionListener: ActionListener, + private val actionListener: ActionListener, private val deleteRequest: DeleteRequest, private val user: User?, private val monitorId: String @@ -137,7 +142,7 @@ class TransportDeleteMonitorAction @Inject constructor( } deleteMetadata() - actionListener.onResponse(response) + actionListener.onResponse(DeleteMonitorResponse(response.id, response.version)) } override fun onFailure(t: Exception) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteMonitorAction.kt index 8e8c9e2a4..dd804e980 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteMonitorAction.kt @@ -21,8 +21,6 @@ import org.opensearch.alerting.MonitorRunnerService import org.opensearch.alerting.action.ExecuteMonitorAction import org.opensearch.alerting.action.ExecuteMonitorRequest import org.opensearch.alerting.action.ExecuteMonitorResponse -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.AlertingException import org.opensearch.alerting.util.DocLevelMonitorQueries @@ -35,6 +33,8 @@ import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.ConfigConstants +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.authuser.User import org.opensearch.rest.RestStatus import org.opensearch.tasks.Task diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetAlertsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetAlertsAction.kt index c4ea894f4..aca172a46 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetAlertsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetAlertsAction.kt @@ -11,18 +11,15 @@ import kotlinx.coroutines.launch import kotlinx.coroutines.withContext import org.apache.logging.log4j.LogManager import org.opensearch.action.ActionListener +import org.opensearch.action.ActionRequest import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction -import org.opensearch.alerting.action.GetAlertsAction -import org.opensearch.alerting.action.GetAlertsRequest -import org.opensearch.alerting.action.GetAlertsResponse import org.opensearch.alerting.action.GetMonitorAction import org.opensearch.alerting.action.GetMonitorRequest import org.opensearch.alerting.action.GetMonitorResponse import org.opensearch.alerting.alerts.AlertIndices -import org.opensearch.alerting.model.Alert import org.opensearch.alerting.opensearchapi.addFilter import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.settings.AlertingSettings @@ -37,7 +34,12 @@ import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetAlertsRequest +import org.opensearch.commons.alerting.action.GetAlertsResponse +import org.opensearch.commons.alerting.model.Alert import org.opensearch.commons.authuser.User +import org.opensearch.commons.utils.recreateObject import org.opensearch.index.query.Operator import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.RestRequest @@ -60,12 +62,13 @@ class TransportGetAlertsAction @Inject constructor( val settings: Settings, val xContentRegistry: NamedXContentRegistry, val transportGetMonitorAction: TransportGetMonitorAction -) : HandledTransportAction( - GetAlertsAction.NAME, transportService, actionFilters, ::GetAlertsRequest +) : HandledTransportAction( + AlertingActions.GET_ALERTS_ACTION_NAME, transportService, actionFilters, ::GetAlertsRequest ), SecureTransportAction { - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + @Volatile + override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { listenFilterBySettingChange(clusterService) @@ -73,9 +76,11 @@ class TransportGetAlertsAction @Inject constructor( override fun doExecute( task: Task, - getAlertsRequest: GetAlertsRequest, + request: ActionRequest, actionListener: ActionListener ) { + val getAlertsRequest = request as? GetAlertsRequest + ?: recreateObject(request) { GetAlertsRequest(it) } val user = readUserFromThreadContext(client) val tableProp = getAlertsRequest.table @@ -140,11 +145,11 @@ class TransportGetAlertsAction @Inject constructor( suspend fun resolveAlertsIndexName(getAlertsRequest: GetAlertsRequest): String { var alertIndex = AlertIndices.ALL_ALERT_INDEX_PATTERN if (getAlertsRequest.alertIndex.isNullOrEmpty() == false) { - alertIndex = getAlertsRequest.alertIndex + alertIndex = getAlertsRequest.alertIndex!! } else if (getAlertsRequest.monitorId.isNullOrEmpty() == false) withContext(Dispatchers.IO) { val getMonitorRequest = GetMonitorRequest( - getAlertsRequest.monitorId, + getAlertsRequest.monitorId!!, -3L, RestRequest.Method.GET, FetchSourceContext.FETCH_SOURCE diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetDestinationsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetDestinationsAction.kt index 245a1bd87..dac609783 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetDestinationsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetDestinationsAction.kt @@ -14,7 +14,6 @@ import org.opensearch.action.support.HandledTransportAction import org.opensearch.alerting.action.GetDestinationsAction import org.opensearch.alerting.action.GetDestinationsRequest import org.opensearch.alerting.action.GetDestinationsResponse -import org.opensearch.alerting.core.model.ScheduledJob import org.opensearch.alerting.model.destination.Destination import org.opensearch.alerting.opensearchapi.addFilter import org.opensearch.alerting.settings.AlertingSettings @@ -30,6 +29,7 @@ import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.authuser.User import org.opensearch.index.query.Operator import org.opensearch.index.query.QueryBuilders diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailAccountAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailAccountAction.kt index 9d771d203..f89f56f78 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailAccountAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailAccountAction.kt @@ -15,7 +15,6 @@ import org.opensearch.action.support.HandledTransportAction import org.opensearch.alerting.action.GetEmailAccountAction import org.opensearch.alerting.action.GetEmailAccountRequest import org.opensearch.alerting.action.GetEmailAccountResponse -import org.opensearch.alerting.core.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.alerting.model.destination.email.EmailAccount import org.opensearch.alerting.settings.DestinationSettings.Companion.ALLOW_LIST import org.opensearch.alerting.util.AlertingException @@ -28,6 +27,7 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailGroupAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailGroupAction.kt index 00b6d3e6b..b9a9f15ae 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailGroupAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailGroupAction.kt @@ -15,7 +15,6 @@ import org.opensearch.action.support.HandledTransportAction import org.opensearch.alerting.action.GetEmailGroupAction import org.opensearch.alerting.action.GetEmailGroupRequest import org.opensearch.alerting.action.GetEmailGroupResponse -import org.opensearch.alerting.core.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.alerting.model.destination.email.EmailGroup import org.opensearch.alerting.settings.DestinationSettings.Companion.ALLOW_LIST import org.opensearch.alerting.util.AlertingException @@ -28,6 +27,7 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetFindingsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetFindingsAction.kt index 682058d56..9b49968bc 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetFindingsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetFindingsAction.kt @@ -12,22 +12,17 @@ import kotlinx.coroutines.withContext import org.apache.logging.log4j.LogManager import org.apache.lucene.search.join.ScoreMode import org.opensearch.action.ActionListener +import org.opensearch.action.ActionRequest import org.opensearch.action.get.MultiGetRequest import org.opensearch.action.get.MultiGetResponse import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction -import org.opensearch.alerting.action.GetFindingsAction -import org.opensearch.alerting.action.GetFindingsRequest -import org.opensearch.alerting.action.GetFindingsResponse import org.opensearch.alerting.action.GetMonitorAction import org.opensearch.alerting.action.GetMonitorRequest import org.opensearch.alerting.action.GetMonitorResponse import org.opensearch.alerting.alerts.AlertIndices.Companion.ALL_FINDING_INDEX_PATTERN -import org.opensearch.alerting.model.Finding -import org.opensearch.alerting.model.FindingDocument -import org.opensearch.alerting.model.FindingWithDocs import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.AlertingException @@ -42,6 +37,13 @@ import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetFindingsRequest +import org.opensearch.commons.alerting.action.GetFindingsResponse +import org.opensearch.commons.alerting.model.Finding +import org.opensearch.commons.alerting.model.FindingDocument +import org.opensearch.commons.alerting.model.FindingWithDocs +import org.opensearch.commons.utils.recreateObject import org.opensearch.index.query.Operator import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.RestRequest @@ -62,8 +64,8 @@ class TransportGetFindingsSearchAction @Inject constructor( actionFilters: ActionFilters, val settings: Settings, val xContentRegistry: NamedXContentRegistry -) : HandledTransportAction ( - GetFindingsAction.NAME, transportService, actionFilters, ::GetFindingsRequest +) : HandledTransportAction ( + AlertingActions.GET_FINDINGS_ACTION_NAME, transportService, actionFilters, ::GetFindingsRequest ), SecureTransportAction { @@ -75,9 +77,11 @@ class TransportGetFindingsSearchAction @Inject constructor( override fun doExecute( task: Task, - getFindingsRequest: GetFindingsRequest, + request: ActionRequest, actionListener: ActionListener ) { + val getFindingsRequest = request as? GetFindingsRequest + ?: recreateObject(request) { GetFindingsRequest(it) } val tableProp = getFindingsRequest.table val sortBuilder = SortBuilders @@ -144,13 +148,13 @@ class TransportGetFindingsSearchAction @Inject constructor( if (findingsRequest.findingIndex.isNullOrEmpty() == false) { // findingIndex has highest priority, so use that if available - indexName = findingsRequest.findingIndex + indexName = findingsRequest.findingIndex!! } else if (findingsRequest.monitorId.isNullOrEmpty() == false) { // second best is monitorId. // We will use it to fetch monitor and then read indexName from dataSources field of monitor withContext(Dispatchers.IO) { val getMonitorRequest = GetMonitorRequest( - findingsRequest.monitorId, + findingsRequest.monitorId!!, -3L, RestRequest.Method.GET, FetchSourceContext.FETCH_SOURCE diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetMonitorAction.kt index b2be635d9..e6a6c28a0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetMonitorAction.kt @@ -15,8 +15,6 @@ import org.opensearch.action.support.HandledTransportAction import org.opensearch.alerting.action.GetMonitorAction import org.opensearch.alerting.action.GetMonitorRequest import org.opensearch.alerting.action.GetMonitorResponse -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.AlertingException import org.opensearch.client.Client @@ -27,6 +25,8 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt index 2c0186848..06c6b0e26 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt @@ -12,6 +12,7 @@ import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchSecurityException import org.opensearch.OpenSearchStatusException import org.opensearch.action.ActionListener +import org.opensearch.action.ActionRequest import org.opensearch.action.admin.indices.create.CreateIndexResponse import org.opensearch.action.admin.indices.get.GetIndexRequest import org.opensearch.action.admin.indices.get.GetIndexResponse @@ -26,17 +27,8 @@ import org.opensearch.action.support.HandledTransportAction import org.opensearch.action.support.WriteRequest.RefreshPolicy import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.alerting.DocumentLevelMonitorRunner -import org.opensearch.alerting.action.IndexMonitorAction -import org.opensearch.alerting.action.IndexMonitorRequest -import org.opensearch.alerting.action.IndexMonitorResponse import org.opensearch.alerting.core.ScheduledJobIndices -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelMonitorInput.Companion.DOC_LEVEL_INPUT_FIELD -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.core.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX -import org.opensearch.alerting.core.model.SearchInput import org.opensearch.alerting.model.AlertingConfigAccessor.Companion.getMonitorMetadata -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorMetadata import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.settings.AlertingSettings @@ -61,7 +53,17 @@ import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory.jsonBuilder import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.IndexMonitorRequest +import org.opensearch.commons.alerting.action.IndexMonitorResponse +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelMonitorInput.Companion.DOC_LEVEL_INPUT_FIELD +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX +import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.commons.authuser.User +import org.opensearch.commons.utils.recreateObject import org.opensearch.index.query.QueryBuilders import org.opensearch.index.reindex.BulkByScrollResponse import org.opensearch.index.reindex.DeleteByQueryAction @@ -86,8 +88,8 @@ class TransportIndexMonitorAction @Inject constructor( val clusterService: ClusterService, val settings: Settings, val xContentRegistry: NamedXContentRegistry -) : HandledTransportAction( - IndexMonitorAction.NAME, transportService, actionFilters, ::IndexMonitorRequest +) : HandledTransportAction( + AlertingActions.INDEX_MONITOR_ACTION_NAME, transportService, actionFilters, ::IndexMonitorRequest ), SecureTransportAction { @@ -107,18 +109,20 @@ class TransportIndexMonitorAction @Inject constructor( listenFilterBySettingChange(clusterService) } - override fun doExecute(task: Task, request: IndexMonitorRequest, actionListener: ActionListener) { + override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { + val transformedRequest = request as? IndexMonitorRequest + ?: recreateObject(request) { IndexMonitorRequest(it) } val user = readUserFromThreadContext(client) if (!validateUserBackendRoles(user, actionListener)) { return } - if (!isADMonitor(request.monitor)) { - checkIndicesAndExecute(client, actionListener, request, user) + if (!isADMonitor(transformedRequest.monitor)) { + checkIndicesAndExecute(client, actionListener, transformedRequest, user) } else { // check if user has access to any anomaly detector for AD monitor - checkAnomalyDetectorAndExecute(client, actionListener, request, user) + checkAnomalyDetectorAndExecute(client, actionListener, transformedRequest, user) } } @@ -323,12 +327,12 @@ class TransportIndexMonitorAction @Inject constructor( trigger.actions.forEach { action -> if (action.throttle != null) { require( - TimeValue(Duration.of(action.throttle.value.toLong(), action.throttle.unit).toMillis()) + TimeValue(Duration.of(action.throttle!!.value.toLong(), action.throttle!!.unit).toMillis()) .compareTo(maxValue) <= 0, { "Can only set throttle period less than or equal to $maxValue" } ) require( - TimeValue(Duration.of(action.throttle.value.toLong(), action.throttle.unit).toMillis()) + TimeValue(Duration.of(action.throttle!!.value.toLong(), action.throttle!!.unit).toMillis()) .compareTo(minValue) >= 0, { "Can only set throttle period greater than or equal to $minValue" } ) @@ -343,7 +347,7 @@ class TransportIndexMonitorAction @Inject constructor( private fun onSearchResponse(response: SearchResponse) { val totalHits = response.hits.totalHits?.value if (totalHits != null && totalHits >= maxMonitors) { - log.error("This request would create more than the allowed monitors [$maxMonitors].") + log.info("This request would create more than the allowed monitors [$maxMonitors].") actionListener.onFailure( AlertingException.wrap( IllegalArgumentException( @@ -364,7 +368,7 @@ class TransportIndexMonitorAction @Inject constructor( prepareMonitorIndexing() IndexUtils.scheduledJobIndexUpdated() } else { - log.error("Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") + log.info("Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") actionListener.onFailure( AlertingException.wrap( OpenSearchStatusException( @@ -381,7 +385,7 @@ class TransportIndexMonitorAction @Inject constructor( IndexUtils.scheduledJobIndexUpdated() prepareMonitorIndexing() } else { - log.error("Update ${ScheduledJob.SCHEDULED_JOBS_INDEX} mappings call not acknowledged.") + log.info("Update ${ScheduledJob.SCHEDULED_JOBS_INDEX} mappings call not acknowledged.") actionListener.onFailure( AlertingException.wrap( OpenSearchStatusException( @@ -407,6 +411,7 @@ class TransportIndexMonitorAction @Inject constructor( val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } val failureReasons = checkShardsFailure(indexResponse) if (failureReasons != null) { + log.info(failureReasons.toString()) actionListener.onFailure( AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())) ) @@ -432,7 +437,7 @@ class TransportIndexMonitorAction @Inject constructor( actionListener.onResponse( IndexMonitorResponse( indexResponse.id, indexResponse.version, indexResponse.seqNo, - indexResponse.primaryTerm, RestStatus.CREATED, request.monitor + indexResponse.primaryTerm, request.monitor ) ) } catch (t: Exception) { @@ -545,7 +550,7 @@ class TransportIndexMonitorAction @Inject constructor( actionListener.onResponse( IndexMonitorResponse( indexResponse.id, indexResponse.version, indexResponse.seqNo, - indexResponse.primaryTerm, RestStatus.CREATED, request.monitor + indexResponse.primaryTerm, request.monitor ) ) } catch (t: Exception) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt index 01e126c4c..13ed9c9cb 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt @@ -13,7 +13,6 @@ import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction import org.opensearch.alerting.action.SearchMonitorAction import org.opensearch.alerting.action.SearchMonitorRequest -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.opensearchapi.addFilter import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.AlertingException @@ -21,6 +20,7 @@ import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings +import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.authuser.User import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.ExistsQueryBuilder diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionRPNResolver.kt b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionRPNResolver.kt index 749214048..45937c8ab 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionRPNResolver.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionRPNResolver.kt @@ -5,11 +5,11 @@ package org.opensearch.alerting.triggercondition.resolvers -import org.opensearch.alerting.core.model.DocLevelQuery import org.opensearch.alerting.triggercondition.tokens.ExpressionToken import org.opensearch.alerting.triggercondition.tokens.TriggerExpressionConstant import org.opensearch.alerting.triggercondition.tokens.TriggerExpressionOperator import org.opensearch.alerting.triggercondition.tokens.TriggerExpressionToken +import org.opensearch.commons.alerting.model.DocLevelQuery import java.util.Optional import java.util.Stack diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionResolver.kt b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionResolver.kt index faeabad08..fea22c356 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionResolver.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionResolver.kt @@ -5,7 +5,7 @@ package org.opensearch.alerting.triggercondition.resolvers -import org.opensearch.alerting.core.model.DocLevelQuery +import org.opensearch.commons.alerting.model.DocLevelQuery interface TriggerExpressionResolver { fun evaluate(queryToDocIds: Map>): Set diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AggregationQueryRewriter.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AggregationQueryRewriter.kt index 066dfa3c2..e1b6675b2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AggregationQueryRewriter.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AggregationQueryRewriter.kt @@ -6,10 +6,10 @@ package org.opensearch.alerting.util import org.opensearch.action.search.SearchResponse -import org.opensearch.alerting.model.BucketLevelTrigger import org.opensearch.alerting.model.InputRunResults -import org.opensearch.alerting.model.Trigger import org.opensearch.alerting.model.TriggerAfterKey +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.Trigger import org.opensearch.search.aggregations.AggregationBuilder import org.opensearch.search.aggregations.AggregatorFactories import org.opensearch.search.aggregations.bucket.SingleBucketAggregation diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt index fd44d525c..086c1302c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt @@ -9,14 +9,8 @@ import org.apache.logging.log4j.LogManager import org.opensearch.action.index.IndexRequest import org.opensearch.action.index.IndexResponse import org.opensearch.action.support.WriteRequest -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.model.AggregationResultBucket import org.opensearch.alerting.model.BucketLevelTriggerRunResult -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorMetadata -import org.opensearch.alerting.model.action.Action -import org.opensearch.alerting.model.action.ActionExecutionPolicy -import org.opensearch.alerting.model.action.ActionExecutionScope import org.opensearch.alerting.model.destination.Destination import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.settings.AlertingSettings @@ -25,6 +19,13 @@ import org.opensearch.client.Client import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.commons.alerting.model.AggregationResultBucket +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy +import org.opensearch.commons.alerting.model.action.ActionExecutionScope +import org.opensearch.commons.alerting.util.isBucketLevelMonitor private val logger = LogManager.getLogger("AlertingUtils") @@ -51,8 +52,6 @@ fun Destination.isAllowed(allowList: List): Boolean = allowList.contains fun Destination.isTestAction(): Boolean = this.type == DestinationType.TEST_ACTION -fun Monitor.isBucketLevelMonitor(): Boolean = this.monitorType == Monitor.MonitorType.BUCKET_LEVEL_MONITOR - fun Monitor.isDocLevelMonitor(): Boolean = this.monitorType == Monitor.MonitorType.DOC_LEVEL_MONITOR /** diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtils.kt index e1c7903f7..1196c8f19 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtils.kt @@ -6,9 +6,9 @@ package org.opensearch.alerting.util import org.apache.lucene.search.join.ScoreMode -import org.opensearch.alerting.core.model.SearchInput -import org.opensearch.alerting.model.Monitor import org.opensearch.common.Strings +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.commons.authuser.User import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.NestedQueryBuilder diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/DocLevelMonitorQueries.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/DocLevelMonitorQueries.kt index 60f9ccda5..98693def1 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/DocLevelMonitorQueries.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/DocLevelMonitorQueries.kt @@ -17,16 +17,16 @@ import org.opensearch.action.bulk.BulkResponse import org.opensearch.action.index.IndexRequest import org.opensearch.action.support.WriteRequest.RefreshPolicy import org.opensearch.action.support.master.AcknowledgedResponse -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.model.DataSources -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue +import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob private val log = LogManager.getLogger(DocLevelMonitorQueries::class.java) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/IndexUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/IndexUtils.kt index 9f299e8c5..b24962aa5 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/IndexUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/IndexUtils.kt @@ -17,13 +17,13 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.util.IndexUtils class IndexUtils { companion object { const val _META = "_meta" const val SCHEMA_VERSION = "schema_version" - const val NO_SCHEMA_VERSION = 0 var scheduledJobIndexSchemaVersion: Int private set @@ -90,7 +90,7 @@ class IndexUtils { } xcp.nextToken() } - return NO_SCHEMA_VERSION + return IndexUtils.NO_SCHEMA_VERSION } @JvmStatic @@ -100,7 +100,7 @@ class IndexUtils { @JvmStatic fun shouldUpdateIndex(index: IndexMetadata, mapping: String): Boolean { - var oldVersion = NO_SCHEMA_VERSION + var oldVersion = IndexUtils.NO_SCHEMA_VERSION val newVersion = getSchemaVersion(mapping) val indexMapping = index.mapping()?.sourceAsMap() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/RestHandlerUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/RestHandlerUtils.kt index 70bd9775b..7c72aae9a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/RestHandlerUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/RestHandlerUtils.kt @@ -24,10 +24,6 @@ fun context(request: RestRequest): FetchSourceContext? { } else null } -const val _ID = "_id" -const val _VERSION = "_version" -const val _SEQ_NO = "_seq_no" const val IF_SEQ_NO = "if_seq_no" -const val _PRIMARY_TERM = "_primary_term" const val IF_PRIMARY_TERM = "if_primary_term" const val REFRESH = "refresh" diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/SupportedClusterMetricsSettingsExtensions.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/SupportedClusterMetricsSettingsExtensions.kt index 2e3027991..6623ec483 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/SupportedClusterMetricsSettingsExtensions.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/SupportedClusterMetricsSettingsExtensions.kt @@ -22,14 +22,13 @@ import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksRequest import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksResponse import org.opensearch.action.admin.indices.recovery.RecoveryRequest import org.opensearch.action.admin.indices.recovery.RecoveryResponse -import org.opensearch.alerting.core.model.ClusterMetricsInput -import org.opensearch.alerting.core.model.ClusterMetricsInput.ClusterMetricType import org.opensearch.alerting.opensearchapi.convertToMap import org.opensearch.alerting.settings.SupportedClusterMetricsSettings import org.opensearch.alerting.settings.SupportedClusterMetricsSettings.Companion.resolveToActionRequest import org.opensearch.client.Client import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.support.XContentMapValues +import org.opensearch.commons.alerting.model.ClusterMetricsInput /** * Calls the appropriate transport action for the API requested in the [clusterMetricsInput]. @@ -40,17 +39,18 @@ import org.opensearch.common.xcontent.support.XContentMapValues fun executeTransportAction(clusterMetricsInput: ClusterMetricsInput, client: Client): ActionResponse { val request = resolveToActionRequest(clusterMetricsInput) return when (clusterMetricsInput.clusterMetricType) { - ClusterMetricType.CAT_PENDING_TASKS -> client.admin().cluster().pendingClusterTasks(request as PendingClusterTasksRequest).get() - ClusterMetricType.CAT_RECOVERY -> client.admin().indices().recoveries(request as RecoveryRequest).get() - ClusterMetricType.CAT_SNAPSHOTS -> client.admin().cluster().getSnapshots(request as GetSnapshotsRequest).get() - ClusterMetricType.CAT_TASKS -> client.admin().cluster().listTasks(request as ListTasksRequest).get() - ClusterMetricType.CLUSTER_HEALTH -> client.admin().cluster().health(request as ClusterHealthRequest).get() - ClusterMetricType.CLUSTER_SETTINGS -> { + ClusterMetricsInput.ClusterMetricType.CAT_PENDING_TASKS -> client.admin().cluster() + .pendingClusterTasks(request as PendingClusterTasksRequest).get() + ClusterMetricsInput.ClusterMetricType.CAT_RECOVERY -> client.admin().indices().recoveries(request as RecoveryRequest).get() + ClusterMetricsInput.ClusterMetricType.CAT_SNAPSHOTS -> client.admin().cluster().getSnapshots(request as GetSnapshotsRequest).get() + ClusterMetricsInput.ClusterMetricType.CAT_TASKS -> client.admin().cluster().listTasks(request as ListTasksRequest).get() + ClusterMetricsInput.ClusterMetricType.CLUSTER_HEALTH -> client.admin().cluster().health(request as ClusterHealthRequest).get() + ClusterMetricsInput.ClusterMetricType.CLUSTER_SETTINGS -> { val metadata = client.admin().cluster().state(request as ClusterStateRequest).get().state.metadata return ClusterGetSettingsResponse(metadata.persistentSettings(), metadata.transientSettings(), Settings.EMPTY) } - ClusterMetricType.CLUSTER_STATS -> client.admin().cluster().clusterStats(request as ClusterStatsRequest).get() - ClusterMetricType.NODES_STATS -> client.admin().cluster().nodesStats(request as NodesStatsRequest).get() + ClusterMetricsInput.ClusterMetricType.CLUSTER_STATS -> client.admin().cluster().clusterStats(request as ClusterStatsRequest).get() + ClusterMetricsInput.ClusterMetricType.NODES_STATS -> client.admin().cluster().nodesStats(request as NodesStatsRequest).get() else -> throw IllegalArgumentException("Unsupported API request type: ${request.javaClass.name}") } } @@ -64,35 +64,35 @@ fun ActionResponse.toMap(): Map { return when (this) { is ClusterHealthResponse -> redactFieldsFromResponse( this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricType.CLUSTER_HEALTH.defaultPath) + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CLUSTER_HEALTH.defaultPath) ) is ClusterStatsResponse -> redactFieldsFromResponse( this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricType.CLUSTER_STATS.defaultPath) + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CLUSTER_STATS.defaultPath) ) is ClusterGetSettingsResponse -> redactFieldsFromResponse( this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricType.CLUSTER_SETTINGS.defaultPath) + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CLUSTER_SETTINGS.defaultPath) ) is NodesStatsResponse -> redactFieldsFromResponse( this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricType.NODES_STATS.defaultPath) + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.NODES_STATS.defaultPath) ) is PendingClusterTasksResponse -> redactFieldsFromResponse( this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricType.CAT_PENDING_TASKS.defaultPath) + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_PENDING_TASKS.defaultPath) ) is RecoveryResponse -> redactFieldsFromResponse( this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricType.CAT_RECOVERY.defaultPath) + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_RECOVERY.defaultPath) ) is GetSnapshotsResponse -> redactFieldsFromResponse( this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricType.CAT_SNAPSHOTS.defaultPath) + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_SNAPSHOTS.defaultPath) ) is ListTasksResponse -> redactFieldsFromResponse( this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricType.CAT_TASKS.defaultPath) + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_TASKS.defaultPath) ) else -> throw IllegalArgumentException("Unsupported ActionResponse type: ${this.javaClass.name}") } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilService.kt index 898561142..8c0f21c80 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilService.kt @@ -12,7 +12,6 @@ import org.opensearch.action.delete.DeleteRequest import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.WriteRequest -import org.opensearch.alerting.core.model.ScheduledJob import org.opensearch.alerting.model.destination.Destination import org.opensearch.alerting.model.destination.email.EmailAccount import org.opensearch.alerting.model.destination.email.EmailGroup @@ -30,6 +29,7 @@ import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.ConfigConstants +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.notifications.action.CreateNotificationConfigRequest import org.opensearch.commons.notifications.model.NotificationConfig import org.opensearch.commons.notifications.model.NotificationConfigInfo diff --git a/alerting/src/main/resources/org/opensearch/alerting/org.opensearch.alerting.txt b/alerting/src/main/resources/org/opensearch/alerting/org.opensearch.alerting.txt index 78d53e839..bd1f94482 100644 --- a/alerting/src/main/resources/org/opensearch/alerting/org.opensearch.alerting.txt +++ b/alerting/src/main/resources/org/opensearch/alerting/org.opensearch.alerting.txt @@ -1,4 +1,4 @@ -# Copyright OpenSearch Contributors + # Copyright OpenSearch Contributors # SPDX-License-Identifier: Apache-2.0 # Painless definition of classes used by alerting plugin @@ -31,21 +31,21 @@ class org.opensearch.alerting.script.QueryLevelTriggerExecutionContext { Exception getError() } -class org.opensearch.alerting.model.Monitor { +class org.opensearch.commons.alerting.model.Monitor { String getId() long getVersion() String getName() boolean getEnabled() } -class org.opensearch.alerting.model.QueryLevelTrigger { +class org.opensearch.commons.alerting.model.QueryLevelTrigger { String getId() String getName() String getSeverity() List getActions() } -class org.opensearch.alerting.model.Alert { +class org.opensearch.commons.alerting.model.Alert { String getId() long getVersion() boolean isAcknowledged() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/ADTestHelpers.kt b/alerting/src/test/kotlin/org/opensearch/alerting/ADTestHelpers.kt index a82999bfe..6eda9ec30 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/ADTestHelpers.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/ADTestHelpers.kt @@ -4,13 +4,13 @@ */ package org.opensearch.alerting -import org.opensearch.alerting.core.model.Input -import org.opensearch.alerting.core.model.IntervalSchedule -import org.opensearch.alerting.core.model.Schedule -import org.opensearch.alerting.core.model.SearchInput -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.model.QueryLevelTrigger -import org.opensearch.alerting.model.Trigger +import org.opensearch.commons.alerting.model.Input +import org.opensearch.commons.alerting.model.IntervalSchedule +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.QueryLevelTrigger +import org.opensearch.commons.alerting.model.Schedule +import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.model.Trigger import org.opensearch.commons.authuser.User import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.QueryBuilders diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/AlertServiceTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/AlertServiceTests.kt index 1f208eda4..6fc2055dd 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/AlertServiceTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/AlertServiceTests.kt @@ -9,11 +9,6 @@ import org.junit.Before import org.mockito.Mockito import org.opensearch.Version import org.opensearch.alerting.alerts.AlertIndices -import org.opensearch.alerting.model.AggregationResultBucket -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.BucketLevelTrigger -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.model.action.AlertCategory import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.getBucketKeysHash import org.opensearch.client.Client @@ -23,6 +18,11 @@ import org.opensearch.common.settings.ClusterSettings import org.opensearch.common.settings.Setting import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.NamedXContentRegistry +import org.opensearch.commons.alerting.model.AggregationResultBucket +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.action.AlertCategory import org.opensearch.test.ClusterServiceUtils import org.opensearch.test.OpenSearchTestCase import org.opensearch.threadpool.ThreadPool diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/AlertingRestTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/AlertingRestTestCase.kt index ecc8abf12..647132a1a 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/AlertingRestTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/AlertingRestTestCase.kt @@ -16,28 +16,15 @@ import org.junit.rules.DisableOnDebug import org.opensearch.action.search.SearchResponse import org.opensearch.alerting.AlertingPlugin.Companion.EMAIL_ACCOUNT_BASE_URI import org.opensearch.alerting.AlertingPlugin.Companion.EMAIL_GROUP_BASE_URI -import org.opensearch.alerting.action.GetFindingsResponse import org.opensearch.alerting.alerts.AlertIndices import org.opensearch.alerting.alerts.AlertIndices.Companion.FINDING_HISTORY_WRITE_INDEX -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.core.model.SearchInput import org.opensearch.alerting.core.settings.ScheduledJobSettings -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.BucketLevelTrigger -import org.opensearch.alerting.model.DocumentLevelTrigger -import org.opensearch.alerting.model.Finding -import org.opensearch.alerting.model.FindingWithDocs -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.model.QueryLevelTrigger import org.opensearch.alerting.model.destination.Chime import org.opensearch.alerting.model.destination.CustomWebhook import org.opensearch.alerting.model.destination.Destination import org.opensearch.alerting.model.destination.Slack import org.opensearch.alerting.model.destination.email.EmailAccount import org.opensearch.alerting.model.destination.email.EmailGroup -import org.opensearch.alerting.opensearchapi.string import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.settings.DestinationSettings import org.opensearch.alerting.util.DestinationType @@ -60,6 +47,19 @@ import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.common.xcontent.json.JsonXContent.jsonXContent +import org.opensearch.commons.alerting.action.GetFindingsResponse +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.DocumentLevelTrigger +import org.opensearch.commons.alerting.model.Finding +import org.opensearch.commons.alerting.model.FindingWithDocs +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.QueryLevelTrigger +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.util.string import org.opensearch.rest.RestStatus import org.opensearch.search.SearchModule import java.net.URLEncoder diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/DocumentMonitorRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/DocumentMonitorRunnerIT.kt index b07e6f78f..b53398b5b 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/DocumentMonitorRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/DocumentMonitorRunnerIT.kt @@ -7,15 +7,15 @@ package org.opensearch.alerting import org.opensearch.alerting.alerts.AlertIndices.Companion.ALL_ALERT_INDEX_PATTERN import org.opensearch.alerting.alerts.AlertIndices.Companion.ALL_FINDING_INDEX_PATTERN -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.model.DataSources -import org.opensearch.alerting.model.action.ActionExecutionPolicy -import org.opensearch.alerting.model.action.AlertCategory -import org.opensearch.alerting.model.action.PerAlertActionScope -import org.opensearch.alerting.model.action.PerExecutionActionScope import org.opensearch.client.Response import org.opensearch.client.ResponseException +import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy +import org.opensearch.commons.alerting.model.action.AlertCategory +import org.opensearch.commons.alerting.model.action.PerAlertActionScope +import org.opensearch.commons.alerting.model.action.PerExecutionActionScope import org.opensearch.script.Script import java.time.ZonedDateTime import java.time.format.DateTimeFormatter @@ -624,47 +624,6 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { } } - fun `test execute monitor with non-null owner`() { - - val testIndex = createTestIndex() - val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) - val testDoc = """{ - "message" : "This is an error from IAD region", - "test_strict_date_time" : "$testTime", - "test_field" : "us-west-2" - }""" - - val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3") - val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) - - val alertCategories = AlertCategory.values() - val actionExecutionScope = PerAlertActionScope( - actionableAlerts = (1..randomInt(alertCategories.size)).map { alertCategories[it - 1] }.toSet() - ) - val actionExecutionPolicy = ActionExecutionPolicy(actionExecutionScope) - val actions = (0..randomInt(10)).map { - randomActionWithPolicy( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - actionExecutionPolicy = actionExecutionPolicy - ) - } - - val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = actions) - try { - createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - owner = "owner" - ) - ) - fail("Expected create monitor to fail") - } catch (e: ResponseException) { - assertTrue(e.message!!.contains("illegal_argument_exception")) - } - } - @Suppress("UNCHECKED_CAST") /** helper that returns a field in a json map whose values are all json objects */ private fun Map.objectMap(key: String): Map> { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt index 708ff1ae1..c7929ad90 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt @@ -8,25 +8,19 @@ package org.opensearch.alerting import org.junit.Assert import org.opensearch.action.admin.cluster.state.ClusterStateRequest import org.opensearch.action.admin.indices.create.CreateIndexRequest -import org.opensearch.action.search.SearchRequest -import org.opensearch.alerting.action.GetAlertsAction -import org.opensearch.alerting.action.GetAlertsRequest -import org.opensearch.alerting.action.SearchMonitorAction -import org.opensearch.alerting.action.SearchMonitorRequest import org.opensearch.alerting.core.ScheduledJobIndices -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.core.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX -import org.opensearch.alerting.model.DataSources -import org.opensearch.alerting.model.Table import org.opensearch.alerting.transport.AlertingSingleNodeTestCase import org.opensearch.common.settings.Settings -import org.opensearch.index.query.MatchQueryBuilder -import org.opensearch.test.OpenSearchTestCase +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetAlertsRequest +import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX +import org.opensearch.commons.alerting.model.Table import java.time.ZonedDateTime import java.time.format.DateTimeFormatter import java.time.temporal.ChronoUnit.MILLIS -import java.util.concurrent.TimeUnit class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { @@ -47,7 +41,6 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { }""" assertFalse(monitorResponse?.id.isNullOrEmpty()) monitor = monitorResponse!!.monitor - Assert.assertEquals(monitor.owner, "alerting") indexDoc(index, "1", testDoc) val id = monitorResponse.id val executeMonitorResponse = executeMonitor(monitor, id, true) @@ -56,18 +49,18 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { searchAlerts(id) val table = Table("asc", "id", null, 1, 0, "") var getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 0) getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", id, null)) + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, null)) .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 0) try { client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", null, "wrong_alert_index")) + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, "wrong_alert_index")) .get() fail() } catch (e: Exception) { @@ -103,53 +96,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals("Alert saved for test monitor", 1, alerts.size) val table = Table("asc", "id", null, 1, 0, "") var getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", id, null)) - .get() - Assert.assertTrue(getAlertsResponse != null) - Assert.assertTrue(getAlertsResponse.alerts.size == 1) - } - - fun `test execute monitor with owner field`() { - val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3") - val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) - val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val customAlertsIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(alertsIndex = customAlertsIndex), - owner = "owner" - ) - val monitorResponse = createMonitor(monitor) - val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) - val testDoc = """{ - "message" : "This is an error from IAD region", - "test_strict_date_time" : "$testTime", - "test_field" : "us-west-2" - }""" - assertFalse(monitorResponse?.id.isNullOrEmpty()) - monitor = monitorResponse!!.monitor - Assert.assertEquals(monitor.owner, "owner") - indexDoc(index, "1", testDoc) - val id = monitorResponse.id - val executeMonitorResponse = executeMonitor(monitor, id, false) - Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) - Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) - val alerts = searchAlerts(id, customAlertsIndex) - assertEquals("Alert saved for test monitor", 1, alerts.size) - val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) - .get() - Assert.assertTrue(getAlertsResponse != null) - Assert.assertTrue(getAlertsResponse.alerts.size == 1) - getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", id, null)) + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, null)) .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) @@ -182,25 +134,15 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { searchAlerts(id) val table = Table("asc", "id", null, 1, 0, "") var getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", id, null)) + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, null)) .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) - var queryIndexSearchResponse = client().search(SearchRequest(customQueryIndex)).get() - Assert.assertNotNull(queryIndexSearchResponse) - Assert.assertTrue(queryIndexSearchResponse.hits.hits.size > 0) - deleteMonitor(id) - val docDeletedChecker: () -> Boolean = { - queryIndexSearchResponse = client().search(SearchRequest(customQueryIndex)).get() - Assert.assertNotNull(queryIndexSearchResponse) - queryIndexSearchResponse.hits.hits.isEmpty() - } - OpenSearchTestCase.waitUntil(docDeletedChecker, 5, TimeUnit.SECONDS) } fun `test execute monitor with custom query index and custom field mappings`() { @@ -235,17 +177,6 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val clusterStateResponse = client().admin().cluster().state(ClusterStateRequest().indices(customQueryIndex).metadata(true)).get() val mapping = clusterStateResponse.state.metadata.index(customQueryIndex).mapping() Assert.assertTrue(mapping?.source()?.string()?.contains("\"analyzer\":\"$analyzer\"") == true) - val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() - Assert.assertTrue(getAlertsResponse != null) - Assert.assertTrue(getAlertsResponse.alerts.size == 1) - getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", id, null)) - .get() - Assert.assertTrue(getAlertsResponse != null) - Assert.assertTrue(getAlertsResponse.alerts.size == 1) } fun `test execute monitor with custom findings index`() { @@ -278,12 +209,129 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertTrue("Findings saved for test monitor", findings[0].relatedDocIds.contains("1")) val table = Table("asc", "id", null, 1, 0, "") var getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", id, null)) + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, null)) + .get() + Assert.assertTrue(getAlertsResponse != null) + Assert.assertTrue(getAlertsResponse.alerts.size == 1) + } + + fun `test execute pre-existing monitorand update`() { + val request = CreateIndexRequest(SCHEDULED_JOBS_INDEX).mapping(ScheduledJobIndices.scheduledJobMappings()) + .settings(Settings.builder().put("index.hidden", true).build()) + client().admin().indices().create(request) + val monitorStringWithoutName = """ + { + "monitor": { + "type": "monitor", + "schema_version": 0, + "name": "UayEuXpZtb", + "monitor_type": "doc_level_monitor", + "user": { + "name": "", + "backend_roles": [], + "roles": [], + "custom_attribute_names": [], + "user_requested_tenant": null + }, + "enabled": true, + "enabled_time": 1662753436791, + "schedule": { + "period": { + "interval": 5, + "unit": "MINUTES" + } + }, + "inputs": [{ + "doc_level_input": { + "description": "description", + "indices": [ + "$index" + ], + "queries": [{ + "id": "63efdcce-b5a1-49f4-a25f-6b5f9496a755", + "name": "3", + "query": "test_field:\"us-west-2\"", + "tags": [] + }] + } + }], + "triggers": [{ + "document_level_trigger": { + "id": "OGnTI4MBv6qt0ATc9Phk", + "name": "mrbHRMevYI", + "severity": "1", + "condition": { + "script": { + "source": "return true", + "lang": "painless" + } + }, + "actions": [] + } + }], + "last_update_time": 1662753436791 + } + } + """.trimIndent() + val monitorId = "abc" + indexDoc(SCHEDULED_JOBS_INDEX, monitorId, monitorStringWithoutName) + val getMonitorResponse = getMonitorResponse(monitorId) + Assert.assertNotNull(getMonitorResponse) + Assert.assertNotNull(getMonitorResponse.monitor) + val monitor = getMonitorResponse.monitor + + val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) + val testDoc = """{ + "message" : "This is an error from IAD region", + "test_strict_date_time" : "$testTime", + "test_field" : "us-west-2" + }""" + indexDoc(index, "1", testDoc) + var executeMonitorResponse = executeMonitor(monitor!!, monitorId, false) + Assert.assertNotNull(executeMonitorResponse) + if (executeMonitorResponse != null) { + Assert.assertNotNull(executeMonitorResponse.monitorRunResult.monitorName) + } + val alerts = searchAlerts(monitorId) + assertEquals(alerts.size, 1) + + val customAlertsIndex = "custom_alerts_index" + val customQueryIndex = "custom_query_index" + val customFindingsIndex = "custom_findings_index" + val updateMonitorResponse = updateMonitor( + monitor.copy( + id = monitorId, + dataSources = DataSources( + alertsIndex = customAlertsIndex, + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex + ) + ), + monitorId + ) + Assert.assertNotNull(updateMonitorResponse) + indexDoc(index, "2", testDoc) + if (updateMonitorResponse != null) { + executeMonitorResponse = executeMonitor(updateMonitorResponse.monitor, monitorId, false) + } + val findings = searchFindings(monitorId, customFindingsIndex) + assertEquals("Findings saved for test monitor", 1, findings.size) + assertTrue("Findings saved for test monitor", findings[0].relatedDocIds.contains("2")) + val customAlertsIndexAlerts = searchAlerts(monitorId, customAlertsIndex) + assertEquals("Alert saved for test monitor", 1, customAlertsIndexAlerts.size) + val table = Table("asc", "id", null, 1, 0, "") + var getAlertsResponse = client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) + .get() + Assert.assertTrue(getAlertsResponse != null) + Assert.assertTrue(getAlertsResponse.alerts.size == 1) + getAlertsResponse = client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, null)) .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) @@ -444,139 +492,4 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } } } - - fun `test execute pre-existing monitor and update`() { - val request = CreateIndexRequest(SCHEDULED_JOBS_INDEX).mapping(ScheduledJobIndices.scheduledJobMappings()) - .settings(Settings.builder().put("index.hidden", true).build()) - client().admin().indices().create(request) - val monitorStringWithoutName = """ - { - "monitor": { - "type": "monitor", - "schema_version": 0, - "name": "UayEuXpZtb", - "monitor_type": "doc_level_monitor", - "user": { - "name": "", - "backend_roles": [], - "roles": [], - "custom_attribute_names": [], - "user_requested_tenant": null - }, - "enabled": true, - "enabled_time": 1662753436791, - "schedule": { - "period": { - "interval": 5, - "unit": "MINUTES" - } - }, - "inputs": [{ - "doc_level_input": { - "description": "description", - "indices": [ - "$index" - ], - "queries": [{ - "id": "63efdcce-b5a1-49f4-a25f-6b5f9496a755", - "name": "3", - "query": "test_field:\"us-west-2\"", - "tags": [] - }] - } - }], - "triggers": [{ - "document_level_trigger": { - "id": "OGnTI4MBv6qt0ATc9Phk", - "name": "mrbHRMevYI", - "severity": "1", - "condition": { - "script": { - "source": "return true", - "lang": "painless" - } - }, - "actions": [] - } - }], - "last_update_time": 1662753436791 - } - } - """.trimIndent() - val monitorId = "abc" - indexDoc(SCHEDULED_JOBS_INDEX, monitorId, monitorStringWithoutName) - val getMonitorResponse = getMonitorResponse(monitorId) - Assert.assertNotNull(getMonitorResponse) - Assert.assertNotNull(getMonitorResponse.monitor) - val monitor = getMonitorResponse.monitor - val sr = SearchRequest(SCHEDULED_JOBS_INDEX) - val g = - client().execute(SearchMonitorAction.INSTANCE, SearchMonitorRequest(sr)) - .get() - Assert.assertNotNull(g) - Assert.assertEquals(g.hits.hits.size, 1) - val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) - val testDoc = """{ - "message" : "This is an error from IAD region", - "test_strict_date_time" : "$testTime", - "test_field" : "us-west-2" - }""" - indexDoc(index, "1", testDoc) - var executeMonitorResponse = executeMonitor(monitor!!, monitorId, false) - Assert.assertNotNull(executeMonitorResponse) - if (executeMonitorResponse != null) { - Assert.assertNotNull(executeMonitorResponse.monitorRunResult.monitorName) - } - val alerts = searchAlerts(monitorId) - assertEquals(alerts.size, 1) - - val customQueryIndex = "custom_query_index" - Assert.assertFalse(client().admin().cluster().state(ClusterStateRequest()).get().state.routingTable.hasIndex(customQueryIndex)) - val customAlertsIndex = "custom_alerts_index" - val customFindingsIndex = "custom_findings_index" - val updateMonitorResponse = updateMonitor( - monitor.copy( - id = monitorId, - owner = "security_analytics_plugin", - dataSources = DataSources( - alertsIndex = customAlertsIndex, - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex - ) - ), - monitorId - ) - Assert.assertNotNull(updateMonitorResponse) - Assert.assertEquals(updateMonitorResponse!!.monitor.owner, "security_analytics_plugin") - indexDoc(index, "2", testDoc) - executeMonitorResponse = executeMonitor(updateMonitorResponse.monitor, monitorId, false) - Assert.assertTrue(client().admin().cluster().state(ClusterStateRequest()).get().state.routingTable.hasIndex(customQueryIndex)) - val findings = searchFindings(monitorId, customFindingsIndex) - assertEquals("Findings saved for test monitor", 1, findings.size) - assertTrue("Findings saved for test monitor", findings[0].relatedDocIds.contains("2")) - val customAlertsIndexAlerts = searchAlerts(monitorId, customAlertsIndex) - assertEquals("Alert saved for test monitor", 1, customAlertsIndexAlerts.size) - val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) - .get() - Assert.assertTrue(getAlertsResponse != null) - Assert.assertTrue(getAlertsResponse.alerts.size == 1) - getAlertsResponse = client() - .execute(GetAlertsAction.INSTANCE, GetAlertsRequest(table, "ALL", "ALL", monitorId, null)) - .get() - Assert.assertTrue(getAlertsResponse != null) - Assert.assertTrue(getAlertsResponse.alerts.size == 1) - - val searchRequest = SearchRequest(SCHEDULED_JOBS_INDEX) - var searchMonitorResponse = - client().execute(SearchMonitorAction.INSTANCE, SearchMonitorRequest(searchRequest)) - .get() - Assert.assertEquals(searchMonitorResponse.hits.hits.size, 0) - searchRequest.source().query(MatchQueryBuilder("monitor.owner", "security_analytics_plugin")) - searchMonitorResponse = - client().execute(SearchMonitorAction.INSTANCE, SearchMonitorRequest(searchRequest)) - .get() - Assert.assertEquals(searchMonitorResponse.hits.hits.size, 1) - } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt index d56ca4d95..007fcf4b0 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt @@ -6,23 +6,7 @@ package org.opensearch.alerting import org.junit.Assert -import org.opensearch.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder -import org.opensearch.alerting.alerts.AlertError import org.opensearch.alerting.alerts.AlertIndices -import org.opensearch.alerting.core.model.IntervalSchedule -import org.opensearch.alerting.core.model.SearchInput -import org.opensearch.alerting.model.ActionExecutionResult -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.Alert.State.ACKNOWLEDGED -import org.opensearch.alerting.model.Alert.State.ACTIVE -import org.opensearch.alerting.model.Alert.State.COMPLETED -import org.opensearch.alerting.model.Alert.State.ERROR -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.model.action.ActionExecutionPolicy -import org.opensearch.alerting.model.action.AlertCategory -import org.opensearch.alerting.model.action.PerAlertActionScope -import org.opensearch.alerting.model.action.PerExecutionActionScope -import org.opensearch.alerting.model.action.Throttle import org.opensearch.alerting.model.destination.CustomWebhook import org.opensearch.alerting.model.destination.Destination import org.opensearch.alerting.model.destination.email.Email @@ -32,6 +16,23 @@ import org.opensearch.alerting.util.getBucketKeysHash import org.opensearch.client.ResponseException import org.opensearch.client.WarningFailureException import org.opensearch.common.settings.Settings +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.commons.alerting.model.ActionExecutionResult +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.Alert.State +import org.opensearch.commons.alerting.model.Alert.State.ACKNOWLEDGED +import org.opensearch.commons.alerting.model.Alert.State.ACTIVE +import org.opensearch.commons.alerting.model.Alert.State.COMPLETED +import org.opensearch.commons.alerting.model.Alert.State.ERROR +import org.opensearch.commons.alerting.model.IntervalSchedule +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy +import org.opensearch.commons.alerting.model.action.AlertCategory +import org.opensearch.commons.alerting.model.action.PerAlertActionScope +import org.opensearch.commons.alerting.model.action.PerExecutionActionScope +import org.opensearch.commons.alerting.model.action.Throttle import org.opensearch.commons.authuser.User import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.RestStatus @@ -665,7 +666,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val monitor = createMonitor( randomQueryLevelMonitor( triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = actions)), - schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES) + schedule = IntervalSchedule(interval = 1, unit = MINUTES) ) ) val monitorRunResultNotThrottled = entityAsMap(executeMonitor(monitor.id)) @@ -1738,7 +1739,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { private fun verifyAlert( alert: Alert, monitor: Monitor, - expectedState: Alert.State = ACTIVE, + expectedState: State = ACTIVE, expectNotification: Boolean = true ) { assertNotNull(alert.id) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorTests.kt index 02631eac9..f6ed78541 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorTests.kt @@ -5,7 +5,7 @@ package org.opensearch.alerting -import org.opensearch.alerting.model.Trigger +import org.opensearch.commons.alerting.model.Trigger import org.opensearch.test.OpenSearchTestCase import java.lang.IllegalArgumentException import java.time.Instant diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt index d09a11d44..551e6d2c7 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt @@ -8,42 +8,15 @@ package org.opensearch.alerting import junit.framework.TestCase.assertNull import org.apache.http.Header import org.apache.http.HttpEntity -import org.opensearch.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder -import org.opensearch.alerting.aggregation.bucketselectorext.BucketSelectorExtFilter -import org.opensearch.alerting.core.model.ClusterMetricsInput -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.core.model.Input -import org.opensearch.alerting.core.model.IntervalSchedule -import org.opensearch.alerting.core.model.Schedule -import org.opensearch.alerting.core.model.SearchInput -import org.opensearch.alerting.model.ActionExecutionResult import org.opensearch.alerting.model.ActionRunResult -import org.opensearch.alerting.model.AggregationResultBucket -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.BucketLevelTrigger import org.opensearch.alerting.model.BucketLevelTriggerRunResult -import org.opensearch.alerting.model.DataSources -import org.opensearch.alerting.model.DocumentLevelTrigger import org.opensearch.alerting.model.DocumentLevelTriggerRunResult -import org.opensearch.alerting.model.Finding import org.opensearch.alerting.model.InputRunResults -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.model.MonitorRunResult -import org.opensearch.alerting.model.QueryLevelTrigger import org.opensearch.alerting.model.QueryLevelTriggerRunResult -import org.opensearch.alerting.model.Trigger -import org.opensearch.alerting.model.action.Action -import org.opensearch.alerting.model.action.ActionExecutionPolicy -import org.opensearch.alerting.model.action.ActionExecutionScope -import org.opensearch.alerting.model.action.AlertCategory -import org.opensearch.alerting.model.action.PerAlertActionScope -import org.opensearch.alerting.model.action.PerExecutionActionScope -import org.opensearch.alerting.model.action.Throttle import org.opensearch.alerting.model.destination.email.EmailAccount import org.opensearch.alerting.model.destination.email.EmailEntry import org.opensearch.alerting.model.destination.email.EmailGroup -import org.opensearch.alerting.opensearchapi.string import org.opensearch.alerting.util.getBucketKeysHash import org.opensearch.client.Request import org.opensearch.client.RequestOptions @@ -60,6 +33,33 @@ import org.opensearch.common.xcontent.XContentBuilder import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtFilter +import org.opensearch.commons.alerting.model.ActionExecutionResult +import org.opensearch.commons.alerting.model.AggregationResultBucket +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.ClusterMetricsInput +import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.DocumentLevelTrigger +import org.opensearch.commons.alerting.model.Finding +import org.opensearch.commons.alerting.model.Input +import org.opensearch.commons.alerting.model.IntervalSchedule +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.QueryLevelTrigger +import org.opensearch.commons.alerting.model.Schedule +import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.model.Trigger +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy +import org.opensearch.commons.alerting.model.action.ActionExecutionScope +import org.opensearch.commons.alerting.model.action.AlertCategory +import org.opensearch.commons.alerting.model.action.PerAlertActionScope +import org.opensearch.commons.alerting.model.action.PerExecutionActionScope +import org.opensearch.commons.alerting.model.action.Throttle +import org.opensearch.commons.alerting.util.string import org.opensearch.commons.authuser.User import org.opensearch.index.query.QueryBuilders import org.opensearch.script.Script @@ -171,26 +171,6 @@ fun randomDocumentLevelMonitor( ) } -fun randomDocumentLevelMonitor( - name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), - user: User? = randomUser(), - inputs: List = listOf(DocLevelMonitorInput("description", listOf("index"), emptyList())), - schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), - enabled: Boolean = randomBoolean(), - triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, - enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, - lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false, - dataSources: DataSources = DataSources(), - owner: String -): Monitor { - return Monitor( - name = name, monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR, enabled = enabled, inputs = inputs, - schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, - uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf(), dataSources = dataSources, owner = owner - ) -} - fun randomDocumentLevelMonitor( name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), user: User? = randomUser(), @@ -568,16 +548,6 @@ fun randomActionRunResult(): ActionRunResult { ) } -fun Monitor.toJsonString(): String { - val builder = XContentFactory.jsonBuilder() - return this.toXContent(builder, ToXContent.EMPTY_PARAMS).string() -} - -fun Monitor.toJsonStringWithUser(): String { - val builder = XContentFactory.jsonBuilder() - return this.toXContentWithUser(builder, ToXContent.EMPTY_PARAMS).string() -} - fun Alert.toJsonString(): String { val builder = XContentFactory.jsonBuilder() return this.toXContent(builder, ToXContent.EMPTY_PARAMS).string() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/AcknowledgeAlertResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/AcknowledgeAlertResponseTests.kt index 13d98f599..7928c2ee0 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/AcknowledgeAlertResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/AcknowledgeAlertResponseTests.kt @@ -6,12 +6,12 @@ package org.opensearch.alerting.action import org.junit.Assert -import org.opensearch.alerting.alerts.AlertError -import org.opensearch.alerting.model.ActionExecutionResult -import org.opensearch.alerting.model.Alert import org.opensearch.alerting.randomUser import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.common.io.stream.StreamInput +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.commons.alerting.model.ActionExecutionResult +import org.opensearch.commons.alerting.model.Alert import org.opensearch.test.OpenSearchTestCase import java.time.Instant diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/DeleteMonitorActionTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/DeleteMonitorActionTests.kt deleted file mode 100644 index 8db7761cd..000000000 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/DeleteMonitorActionTests.kt +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.junit.Assert -import org.opensearch.test.OpenSearchTestCase - -class DeleteMonitorActionTests : OpenSearchTestCase() { - - fun `test delete monitor action name`() { - Assert.assertNotNull(DeleteMonitorAction.INSTANCE.name()) - Assert.assertEquals(DeleteMonitorAction.INSTANCE.name(), DeleteMonitorAction.NAME) - } -} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/DeleteMonitorRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/DeleteMonitorRequestTests.kt deleted file mode 100644 index e66940460..000000000 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/DeleteMonitorRequestTests.kt +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.junit.Assert -import org.opensearch.action.support.WriteRequest -import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.test.OpenSearchTestCase - -class DeleteMonitorRequestTests : OpenSearchTestCase() { - - fun `test delete monitor request`() { - - val req = DeleteMonitorRequest("1234", WriteRequest.RefreshPolicy.IMMEDIATE) - Assert.assertNotNull(req) - Assert.assertEquals("1234", req.monitorId) - Assert.assertEquals("true", req.refreshPolicy.value) - - val out = BytesStreamOutput() - req.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newReq = DeleteMonitorRequest(sin) - Assert.assertEquals("1234", newReq.monitorId) - Assert.assertEquals("true", newReq.refreshPolicy.value) - } -} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequestTests.kt index ba22c5c46..022b93455 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequestTests.kt @@ -5,11 +5,11 @@ package org.opensearch.alerting.action -import org.opensearch.alerting.core.model.SearchInput import org.opensearch.alerting.randomQueryLevelMonitor import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.unit.TimeValue +import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.OpenSearchTestCase diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetAlertsActionTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetAlertsActionTests.kt deleted file mode 100644 index 73ee6a37c..000000000 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetAlertsActionTests.kt +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.test.OpenSearchTestCase - -class GetAlertsActionTests : OpenSearchTestCase() { - - fun `test get alerts action name`() { - assertNotNull(GetAlertsAction.INSTANCE.name()) - assertEquals(GetAlertsAction.INSTANCE.name(), GetAlertsAction.NAME) - } -} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetAlertsRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetAlertsRequestTests.kt deleted file mode 100644 index 166637457..000000000 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetAlertsRequestTests.kt +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.alerting.model.Table -import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.test.OpenSearchTestCase - -class GetAlertsRequestTests : OpenSearchTestCase() { - - fun `test get alerts request`() { - - val table = Table("asc", "sortString", null, 1, 0, "") - - val req = GetAlertsRequest(table, "1", "active", null, null) - assertNotNull(req) - - val out = BytesStreamOutput() - req.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newReq = GetAlertsRequest(sin) - - assertEquals("1", newReq.severityLevel) - assertEquals("active", newReq.alertState) - assertNull(newReq.monitorId) - assertEquals(table, newReq.table) - } - - fun `test get alerts request with filter`() { - - val table = Table("asc", "sortString", null, 1, 0, "") - val req = GetAlertsRequest(table, "1", "active", null, null) - assertNotNull(req) - - val out = BytesStreamOutput() - req.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newReq = GetAlertsRequest(sin) - - assertEquals("1", newReq.severityLevel) - assertEquals("active", newReq.alertState) - assertNull(newReq.monitorId) - assertEquals(table, newReq.table) - } - - fun `test validate returns null`() { - val table = Table("asc", "sortString", null, 1, 0, "") - - val req = GetAlertsRequest(table, "1", "active", null, null) - assertNotNull(req) - assertNull(req.validate()) - } -} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetAlertsResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetAlertsResponseTests.kt deleted file mode 100644 index 277e8e9a2..000000000 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetAlertsResponseTests.kt +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.junit.Assert -import org.opensearch.alerting.builder -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.opensearchapi.string -import org.opensearch.alerting.randomUser -import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.test.OpenSearchTestCase -import java.time.Instant -import java.util.Collections - -class GetAlertsResponseTests : OpenSearchTestCase() { - - fun `test get alerts response with no alerts`() { - val req = GetAlertsResponse(Collections.emptyList(), 0) - assertNotNull(req) - - val out = BytesStreamOutput() - req.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newReq = GetAlertsResponse(sin) - Assert.assertTrue(newReq.alerts.isEmpty()) - assertEquals(0, newReq.totalAlerts) - } - - fun `test get alerts response with alerts`() { - val alert = Alert( - "id", - 0L, - 0, - "monitorId", - "monitorName", - 0L, - randomUser(), - "triggerId", - "triggerName", - Collections.emptyList(), - Collections.emptyList(), - Alert.State.ACKNOWLEDGED, - Instant.MIN, - null, - null, - null, - null, - Collections.emptyList(), - "severity", - Collections.emptyList(), - null - ) - val req = GetAlertsResponse(listOf(alert), 1) - assertNotNull(req) - - val out = BytesStreamOutput() - req.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newReq = GetAlertsResponse(sin) - assertEquals(1, newReq.alerts.size) - assertEquals(alert, newReq.alerts[0]) - assertEquals(1, newReq.totalAlerts) - } - - fun `test toXContent for get alerts response`() { - val now = Instant.now() - - val alert = Alert( - "id", - 0L, - 0, - "monitorId", - "monitorName", - 0L, - null, - "triggerId", - "triggerName", - Collections.emptyList(), - Collections.emptyList(), - Alert.State.ACKNOWLEDGED, - now, - null, - null, - null, - null, - Collections.emptyList(), - "severity", - Collections.emptyList(), - null - ) - val req = GetAlertsResponse(listOf(alert), 1) - var actualXContentString = req.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val expectedXContentString = "{\"alerts\":[{\"id\":\"id\",\"version\":0,\"monitor_id\":\"monitorId\"," + - "\"schema_version\":0,\"monitor_version\":0,\"monitor_name\":\"monitorName\"," + - "\"trigger_id\":\"triggerId\",\"trigger_name\":\"triggerName\"," + - "\"finding_ids\":[],\"related_doc_ids\":[],\"state\":\"ACKNOWLEDGED\",\"error_message\":null,\"alert_history\":[]," + - "\"severity\":\"severity\",\"action_execution_results\":[],\"start_time\":" + now.toEpochMilli() + - ",\"last_notification_time\":null,\"end_time\":null,\"acknowledged_time\":null}],\"totalAlerts\":1}" - assertEquals(expectedXContentString, actualXContentString) - } -} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsRequestTests.kt index 42cf1736a..e8875caca 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsRequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsRequestTests.kt @@ -5,9 +5,9 @@ package org.opensearch.alerting.action -import org.opensearch.alerting.model.Table import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.common.io.stream.StreamInput +import org.opensearch.commons.alerting.model.Table import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchTestCase diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetFindingsRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetFindingsRequestTests.kt index 2cfcbdb40..43da11870 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetFindingsRequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetFindingsRequestTests.kt @@ -5,9 +5,10 @@ package org.opensearch.alerting.action -import org.opensearch.alerting.model.Table import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.common.io.stream.StreamInput +import org.opensearch.commons.alerting.action.GetFindingsRequest +import org.opensearch.commons.alerting.model.Table import org.opensearch.test.OpenSearchTestCase class GetFindingsRequestTests : OpenSearchTestCase() { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetMonitorResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetMonitorResponseTests.kt index 2bd14a45f..ae2e68747 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetMonitorResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetMonitorResponseTests.kt @@ -5,11 +5,11 @@ package org.opensearch.alerting.action -import org.opensearch.alerting.core.model.CronSchedule -import org.opensearch.alerting.model.Monitor import org.opensearch.alerting.randomUser import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.common.io.stream.StreamInput +import org.opensearch.commons.alerting.model.CronSchedule +import org.opensearch.commons.alerting.model.Monitor import org.opensearch.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import java.time.Instant diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/IndexMonitorActionTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/IndexMonitorActionTests.kt deleted file mode 100644 index c115aa0cf..000000000 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/IndexMonitorActionTests.kt +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.test.OpenSearchTestCase - -class IndexMonitorActionTests : OpenSearchTestCase() { - - fun `test index monitor action name`() { - assertNotNull(IndexMonitorAction.INSTANCE.name()) - assertEquals(IndexMonitorAction.INSTANCE.name(), IndexMonitorAction.NAME) - } -} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/IndexMonitorRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/IndexMonitorRequestTests.kt deleted file mode 100644 index c840e130e..000000000 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/IndexMonitorRequestTests.kt +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.action.support.WriteRequest -import org.opensearch.alerting.core.model.SearchInput -import org.opensearch.alerting.randomQueryLevelMonitor -import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.rest.RestRequest -import org.opensearch.search.builder.SearchSourceBuilder -import org.opensearch.test.OpenSearchTestCase - -class IndexMonitorRequestTests : OpenSearchTestCase() { - - fun `test index monitor post request`() { - - val req = IndexMonitorRequest( - "1234", 1L, 2L, WriteRequest.RefreshPolicy.IMMEDIATE, RestRequest.Method.POST, - randomQueryLevelMonitor().copy(inputs = listOf(SearchInput(emptyList(), SearchSourceBuilder()))) - ) - assertNotNull(req) - - val out = BytesStreamOutput() - req.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newReq = IndexMonitorRequest(sin) - assertEquals("1234", newReq.monitorId) - assertEquals(1L, newReq.seqNo) - assertEquals(2L, newReq.primaryTerm) - assertEquals(RestRequest.Method.POST, newReq.method) - assertNotNull(newReq.monitor) - } - - fun `test index monitor put request`() { - - val req = IndexMonitorRequest( - "1234", 1L, 2L, WriteRequest.RefreshPolicy.IMMEDIATE, RestRequest.Method.PUT, - randomQueryLevelMonitor().copy(inputs = listOf(SearchInput(emptyList(), SearchSourceBuilder()))) - ) - assertNotNull(req) - - val out = BytesStreamOutput() - req.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newReq = IndexMonitorRequest(sin) - assertEquals("1234", newReq.monitorId) - assertEquals(1L, newReq.seqNo) - assertEquals(2L, newReq.primaryTerm) - assertEquals(RestRequest.Method.PUT, newReq.method) - assertNotNull(newReq.monitor) - } -} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/IndexMonitorResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/IndexMonitorResponseTests.kt deleted file mode 100644 index 00210dce6..000000000 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/IndexMonitorResponseTests.kt +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.action - -import org.opensearch.alerting.core.model.CronSchedule -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.randomUser -import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.rest.RestStatus -import org.opensearch.test.OpenSearchTestCase -import java.time.Instant -import java.time.ZoneId - -class IndexMonitorResponseTests : OpenSearchTestCase() { - - fun `test index monitor response with monitor`() { - val cronExpression = "31 * * * *" // Run at minute 31. - val testInstance = Instant.ofEpochSecond(1538164858L) - - val cronSchedule = CronSchedule(cronExpression, ZoneId.of("Asia/Kolkata"), testInstance) - val monitor = Monitor( - id = "123", - version = 0L, - name = "test-monitor", - enabled = true, - schedule = cronSchedule, - lastUpdateTime = Instant.now(), - enabledTime = Instant.now(), - monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR, - user = randomUser(), - schemaVersion = 0, - inputs = mutableListOf(), - triggers = mutableListOf(), - uiMetadata = mutableMapOf() - ) - val req = IndexMonitorResponse("1234", 1L, 2L, 0L, RestStatus.OK, monitor) - assertNotNull(req) - - val out = BytesStreamOutput() - req.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newReq = IndexMonitorResponse(sin) - assertEquals("1234", newReq.id) - assertEquals(1L, newReq.version) - assertEquals(RestStatus.OK, newReq.status) - assertNotNull(newReq.monitor) - } -} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilderTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilderTests.kt index 4b7155bce..60021e20b 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilderTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilderTests.kt @@ -6,6 +6,8 @@ package org.opensearch.alerting.aggregation.bucketselectorext import org.opensearch.alerting.AlertingPlugin +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtFilter import org.opensearch.plugins.SearchPlugin import org.opensearch.script.Script import org.opensearch.script.ScriptType diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregatorTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregatorTests.kt index 742cc2069..257a0a705 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregatorTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregatorTests.kt @@ -16,6 +16,9 @@ import org.apache.lucene.util.BytesRef import org.hamcrest.CoreMatchers import org.opensearch.common.CheckedConsumer import org.opensearch.common.settings.Settings +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtFilter +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorIndices import org.opensearch.index.mapper.KeywordFieldMapper.KeywordFieldType import org.opensearch.index.mapper.MappedFieldType import org.opensearch.index.mapper.NumberFieldMapper diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/alerts/AlertIndicesIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/alerts/AlertIndicesIT.kt index e1b780491..55263b536 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/alerts/AlertIndicesIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/alerts/AlertIndicesIT.kt @@ -11,9 +11,6 @@ import org.opensearch.action.search.SearchResponse import org.opensearch.alerting.ALWAYS_RUN import org.opensearch.alerting.AlertingRestTestCase import org.opensearch.alerting.NEVER_RUN -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.core.model.ScheduledJob import org.opensearch.alerting.makeRequest import org.opensearch.alerting.randomDocumentLevelMonitor import org.opensearch.alerting.randomDocumentLevelTrigger @@ -22,6 +19,9 @@ import org.opensearch.alerting.randomQueryLevelTrigger import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.common.xcontent.XContentType import org.opensearch.common.xcontent.json.JsonXContent.jsonXContent +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.rest.RestStatus class AlertIndicesIT : AlertingRestTestCase() { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/bwc/AlertingBackwardsCompatibilityIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/bwc/AlertingBackwardsCompatibilityIT.kt index 3f41bd522..d9de08d9e 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/bwc/AlertingBackwardsCompatibilityIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/bwc/AlertingBackwardsCompatibilityIT.kt @@ -10,9 +10,9 @@ import org.apache.http.entity.StringEntity import org.opensearch.alerting.ALERTING_BASE_URI import org.opensearch.alerting.AlertingRestTestCase import org.opensearch.alerting.makeRequest -import org.opensearch.alerting.model.Monitor import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.Monitor import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.RestStatus import org.opensearch.search.builder.SearchSourceBuilder diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertTests.kt index 698754055..08fba74cb 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertTests.kt @@ -8,6 +8,7 @@ package org.opensearch.alerting.model import org.junit.Assert import org.opensearch.alerting.randomAlert import org.opensearch.alerting.randomAlertWithAggregationResultBucket +import org.opensearch.commons.alerting.model.Alert import org.opensearch.test.OpenSearchTestCase class AlertTests : OpenSearchTestCase() { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/DocLevelMonitorInputTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/DocLevelMonitorInputTests.kt deleted file mode 100644 index 4c6f1825d..000000000 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/DocLevelMonitorInputTests.kt +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.opensearchapi.string -import org.opensearch.alerting.randomDocLevelMonitorInput -import org.opensearch.alerting.randomDocLevelQuery -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentType -import org.opensearch.test.OpenSearchTestCase -import java.lang.IllegalArgumentException - -class DocLevelMonitorInputTests : OpenSearchTestCase() { - fun `test DocLevelQuery asTemplateArgs`() { - // GIVEN - val query = randomDocLevelQuery() - - // WHEN - val templateArgs = query.asTemplateArg() - - // THEN - assertEquals("Template args 'id' field does not match:", templateArgs[DocLevelQuery.QUERY_ID_FIELD], query.id) - assertEquals("Template args 'query' field does not match:", templateArgs[DocLevelQuery.QUERY_FIELD], query.query) - assertEquals("Template args 'name' field does not match:", templateArgs[DocLevelQuery.NAME_FIELD], query.name) - assertEquals("Template args 'tags' field does not match:", templateArgs[DocLevelQuery.TAGS_FIELD], query.tags) - } - - fun `test create Doc Level Query with invalid characters for name`() { - val badString = "query with space" - try { - randomDocLevelQuery(name = badString) - fail("Expecting an illegal argument exception") - } catch (e: IllegalArgumentException) { - assertEquals( - "They query name or tag, $badString, contains an invalid character: [' ','[',']','{','}','(',')']", - e.message - ) - } - } - - @Throws(IllegalArgumentException::class) - fun `test create Doc Level Query with invalid characters for tags`() { - val badString = "[(){}]" - try { - randomDocLevelQuery(tags = listOf(badString)) - fail("Expecting an illegal argument exception") - } catch (e: IllegalArgumentException) { - assertEquals( - "They query name or tag, $badString, contains an invalid character: [' ','[',']','{','}','(',')']", - e.message - ) - } - } - - fun `test DocLevelMonitorInput asTemplateArgs`() { - // GIVEN - val input = randomDocLevelMonitorInput() - - // test - val inputString = input.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() - // assertEquals("test", inputString) - // test end - // WHEN - val templateArgs = input.asTemplateArg() - - // THEN - assertEquals( - "Template args 'description' field does not match:", - templateArgs[DocLevelMonitorInput.DESCRIPTION_FIELD], - input.description - ) - assertEquals( - "Template args 'indices' field does not match:", - templateArgs[DocLevelMonitorInput.INDICES_FIELD], - input.indices - ) - assertEquals( - "Template args 'queries' field does not contain the expected number of queries:", - input.queries.size, - (templateArgs[DocLevelMonitorInput.QUERIES_FIELD] as List<*>).size - ) - input.queries.forEach { - assertTrue( - "Template args 'queries' field does not match:", - (templateArgs[DocLevelMonitorInput.QUERIES_FIELD] as List<*>).contains(it.asTemplateArg()) - ) - } - } -} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt index 5078beb2d..f77ca3ddc 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt @@ -6,6 +6,7 @@ package org.opensearch.alerting.model import org.opensearch.alerting.randomFinding +import org.opensearch.commons.alerting.model.Finding import org.opensearch.test.OpenSearchTestCase class FindingTests : OpenSearchTestCase() { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/WriteableTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/WriteableTests.kt index 9e01cd09b..16ff7db0a 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/WriteableTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/WriteableTests.kt @@ -5,120 +5,26 @@ package org.opensearch.alerting.model -import org.opensearch.alerting.core.model.SearchInput -import org.opensearch.alerting.model.action.Action -import org.opensearch.alerting.model.action.ActionExecutionPolicy -import org.opensearch.alerting.model.action.Throttle import org.opensearch.alerting.model.destination.email.EmailAccount import org.opensearch.alerting.model.destination.email.EmailGroup -import org.opensearch.alerting.randomAction -import org.opensearch.alerting.randomActionExecutionPolicy import org.opensearch.alerting.randomActionRunResult import org.opensearch.alerting.randomBucketLevelMonitorRunResult -import org.opensearch.alerting.randomBucketLevelTrigger import org.opensearch.alerting.randomBucketLevelTriggerRunResult import org.opensearch.alerting.randomDocumentLevelMonitorRunResult -import org.opensearch.alerting.randomDocumentLevelTrigger import org.opensearch.alerting.randomDocumentLevelTriggerRunResult import org.opensearch.alerting.randomEmailAccount import org.opensearch.alerting.randomEmailGroup import org.opensearch.alerting.randomInputRunResults -import org.opensearch.alerting.randomQueryLevelMonitor import org.opensearch.alerting.randomQueryLevelMonitorRunResult -import org.opensearch.alerting.randomQueryLevelTrigger import org.opensearch.alerting.randomQueryLevelTriggerRunResult -import org.opensearch.alerting.randomThrottle -import org.opensearch.alerting.randomUser -import org.opensearch.alerting.randomUserEmpty import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.common.io.stream.StreamInput -import org.opensearch.commons.authuser.User +import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.OpenSearchTestCase class WriteableTests : OpenSearchTestCase() { - fun `test throttle as stream`() { - val throttle = randomThrottle() - val out = BytesStreamOutput() - throttle.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newThrottle = Throttle(sin) - assertEquals("Round tripping Throttle doesn't work", throttle, newThrottle) - } - - fun `test action as stream`() { - val action = randomAction() - val out = BytesStreamOutput() - action.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newAction = Action(sin) - assertEquals("Round tripping Action doesn't work", action, newAction) - } - - fun `test action as stream with null subject template`() { - val action = randomAction().copy(subjectTemplate = null) - val out = BytesStreamOutput() - action.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newAction = Action(sin) - assertEquals("Round tripping Action doesn't work", action, newAction) - } - - fun `test action as stream with null throttle`() { - val action = randomAction().copy(throttle = null) - val out = BytesStreamOutput() - action.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newAction = Action(sin) - assertEquals("Round tripping Action doesn't work", action, newAction) - } - - fun `test action as stream with throttled enabled and null throttle`() { - val action = randomAction().copy(throttle = null).copy(throttleEnabled = true) - val out = BytesStreamOutput() - action.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newAction = Action(sin) - assertEquals("Round tripping Action doesn't work", action, newAction) - } - - fun `test query-level monitor as stream`() { - val monitor = randomQueryLevelMonitor().copy(inputs = listOf(SearchInput(emptyList(), SearchSourceBuilder()))) - val out = BytesStreamOutput() - monitor.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newMonitor = Monitor(sin) - assertEquals("Round tripping QueryLevelMonitor doesn't work", monitor, newMonitor) - } - - fun `test query-level trigger as stream`() { - val trigger = randomQueryLevelTrigger() - val out = BytesStreamOutput() - trigger.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newTrigger = QueryLevelTrigger.readFrom(sin) - assertEquals("Round tripping QueryLevelTrigger doesn't work", trigger, newTrigger) - } - - fun `test bucket-level trigger as stream`() { - val trigger = randomBucketLevelTrigger() - val out = BytesStreamOutput() - trigger.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newTrigger = BucketLevelTrigger.readFrom(sin) - assertEquals("Round tripping BucketLevelTrigger doesn't work", trigger, newTrigger) - } - - fun `test doc-level trigger as stream`() { - val trigger = randomDocumentLevelTrigger() - val out = BytesStreamOutput() - trigger.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newTrigger = DocumentLevelTrigger.readFrom(sin) - assertEquals("Round tripping DocumentLevelTrigger doesn't work", trigger, newTrigger) - } - fun `test actionrunresult as stream`() { val actionRunResult = randomActionRunResult() val out = BytesStreamOutput() @@ -200,24 +106,6 @@ class WriteableTests : OpenSearchTestCase() { assertEquals("Round tripping MonitorRunResult doesn't work", input, newInput) } - fun `test user as stream`() { - val user = randomUser() - val out = BytesStreamOutput() - user.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newUser = User(sin) - assertEquals("Round tripping User doesn't work", user, newUser) - } - - fun `test empty user as stream`() { - val user = randomUserEmpty() - val out = BytesStreamOutput() - user.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newUser = User(sin) - assertEquals("Round tripping User doesn't work", user, newUser) - } - fun `test emailaccount as stream`() { val emailAccount = randomEmailAccount() val out = BytesStreamOutput() @@ -235,13 +123,4 @@ class WriteableTests : OpenSearchTestCase() { val newEmailGroup = EmailGroup.readFrom(sin) assertEquals("Round tripping EmailGroup doesn't work", emailGroup, newEmailGroup) } - - fun `test action execution policy as stream`() { - val actionExecutionPolicy = randomActionExecutionPolicy() - val out = BytesStreamOutput() - actionExecutionPolicy.writeTo(out) - val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) - val newActionExecutionPolicy = ActionExecutionPolicy.readFrom(sin) - assertEquals("Round tripping ActionExecutionPolicy doesn't work", actionExecutionPolicy, newActionExecutionPolicy) - } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/XContentTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/XContentTests.kt index 17e772890..c2d49a8f3 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/XContentTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/XContentTests.kt @@ -6,175 +6,23 @@ package org.opensearch.alerting.model import org.opensearch.alerting.builder -import org.opensearch.alerting.core.model.SearchInput -import org.opensearch.alerting.model.action.Action -import org.opensearch.alerting.model.action.ActionExecutionPolicy -import org.opensearch.alerting.model.action.PerExecutionActionScope -import org.opensearch.alerting.model.action.Throttle import org.opensearch.alerting.model.destination.email.EmailAccount import org.opensearch.alerting.model.destination.email.EmailGroup -import org.opensearch.alerting.opensearchapi.string import org.opensearch.alerting.parser -import org.opensearch.alerting.randomAction -import org.opensearch.alerting.randomActionExecutionPolicy import org.opensearch.alerting.randomActionExecutionResult -import org.opensearch.alerting.randomActionWithPolicy import org.opensearch.alerting.randomAlert -import org.opensearch.alerting.randomBucketLevelMonitor -import org.opensearch.alerting.randomBucketLevelTrigger import org.opensearch.alerting.randomEmailAccount import org.opensearch.alerting.randomEmailGroup -import org.opensearch.alerting.randomQueryLevelMonitor -import org.opensearch.alerting.randomQueryLevelMonitorWithoutUser -import org.opensearch.alerting.randomQueryLevelTrigger -import org.opensearch.alerting.randomThrottle -import org.opensearch.alerting.randomUser -import org.opensearch.alerting.randomUserEmpty import org.opensearch.alerting.toJsonString -import org.opensearch.alerting.toJsonStringWithUser import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory -import org.opensearch.commons.authuser.User -import org.opensearch.index.query.QueryBuilders -import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.commons.alerting.model.ActionExecutionResult +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.util.string import org.opensearch.test.OpenSearchTestCase -import java.time.temporal.ChronoUnit -import kotlin.test.assertFailsWith class XContentTests : OpenSearchTestCase() { - fun `test action parsing`() { - val action = randomAction() - val actionString = action.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedAction = Action.parse(parser(actionString)) - assertEquals("Round tripping Action doesn't work", action, parsedAction) - } - - fun `test action parsing with null subject template`() { - val action = randomAction().copy(subjectTemplate = null) - val actionString = action.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedAction = Action.parse(parser(actionString)) - assertEquals("Round tripping Action doesn't work", action, parsedAction) - } - - fun `test action parsing with null throttle`() { - val action = randomAction().copy(throttle = null) - val actionString = action.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedAction = Action.parse(parser(actionString)) - assertEquals("Round tripping Action doesn't work", action, parsedAction) - } - - fun `test action parsing with throttled enabled and null throttle`() { - val action = randomAction().copy(throttle = null).copy(throttleEnabled = true) - val actionString = action.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - assertFailsWith("Action throttle enabled but not set throttle value") { - Action.parse(parser(actionString)) - } - } - - fun `test action with per execution scope does not support throttling`() { - try { - randomActionWithPolicy().copy( - throttleEnabled = true, - throttle = Throttle(value = 5, unit = ChronoUnit.MINUTES), - actionExecutionPolicy = ActionExecutionPolicy(PerExecutionActionScope()) - ) - fail("Creating an action with per execution scope and throttle enabled did not fail.") - } catch (ignored: IllegalArgumentException) { - } - } - - fun `test throttle parsing`() { - val throttle = randomThrottle() - val throttleString = throttle.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedThrottle = Throttle.parse(parser(throttleString)) - assertEquals("Round tripping Monitor doesn't work", throttle, parsedThrottle) - } - - fun `test throttle parsing with wrong unit`() { - val throttle = randomThrottle() - val throttleString = throttle.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val wrongThrottleString = throttleString.replace("MINUTES", "wrongunit") - - assertFailsWith("Only support MINUTES throttle unit") { Throttle.parse(parser(wrongThrottleString)) } - } - - fun `test throttle parsing with negative value`() { - val throttle = randomThrottle().copy(value = -1) - val throttleString = throttle.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - - assertFailsWith("Can only set positive throttle period") { Throttle.parse(parser(throttleString)) } - } - - fun `test query-level monitor parsing`() { - val monitor = randomQueryLevelMonitor() - - val monitorString = monitor.toJsonStringWithUser() - val parsedMonitor = Monitor.parse(parser(monitorString)) - assertEquals("Round tripping QueryLevelMonitor doesn't work", monitor, parsedMonitor) - } - - fun `test monitor parsing with no name`() { - val monitorStringWithoutName = """ - { - "type": "monitor", - "enabled": false, - "schedule": { - "period": { - "interval": 1, - "unit": "MINUTES" - } - }, - "inputs": [], - "triggers": [] - } - """.trimIndent() - - assertFailsWith("Monitor name is null") { Monitor.parse(parser(monitorStringWithoutName)) } - } - - fun `test monitor parsing with no schedule`() { - val monitorStringWithoutSchedule = """ - { - "type": "monitor", - "name": "asdf", - "enabled": false, - "inputs": [], - "triggers": [] - } - """.trimIndent() - - assertFailsWith("Monitor schedule is null") { - Monitor.parse(parser(monitorStringWithoutSchedule)) - } - } - - fun `test bucket-level monitor parsing`() { - val monitor = randomBucketLevelMonitor() - - val monitorString = monitor.toJsonStringWithUser() - val parsedMonitor = Monitor.parse(parser(monitorString)) - assertEquals("Round tripping BucketLevelMonitor doesn't work", monitor, parsedMonitor) - } - - fun `test query-level trigger parsing`() { - val trigger = randomQueryLevelTrigger() - - val triggerString = trigger.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedTrigger = Trigger.parse(parser(triggerString)) - - assertEquals("Round tripping QueryLevelTrigger doesn't work", trigger, parsedTrigger) - } - - fun `test bucket-level trigger parsing`() { - val trigger = randomBucketLevelTrigger() - - val triggerString = trigger.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedTrigger = Trigger.parse(parser(triggerString)) - - assertEquals("Round tripping BucketLevelTrigger doesn't work", trigger, parsedTrigger) - } - fun `test alert parsing`() { val alert = randomAlert() @@ -215,41 +63,6 @@ class XContentTests : OpenSearchTestCase() { assertEquals("Round tripping alert doesn't work", actionExecutionResult, parsedActionExecutionResultString) } - fun `test creating a monitor with duplicate trigger ids fails`() { - try { - val repeatedTrigger = randomQueryLevelTrigger() - randomQueryLevelMonitor().copy(triggers = listOf(repeatedTrigger, repeatedTrigger)) - fail("Creating a monitor with duplicate triggers did not fail.") - } catch (ignored: IllegalArgumentException) { - } - } - - fun `test user parsing`() { - val user = randomUser() - val userString = user.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedUser = User.parse(parser(userString)) - assertEquals("Round tripping user doesn't work", user, parsedUser) - } - - fun `test empty user parsing`() { - val user = randomUserEmpty() - val userString = user.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - - val parsedUser = User.parse(parser(userString)) - assertEquals("Round tripping user doesn't work", user, parsedUser) - assertEquals("", parsedUser.name) - assertEquals(0, parsedUser.roles.size) - } - - fun `test query-level monitor parsing without user`() { - val monitor = randomQueryLevelMonitorWithoutUser() - - val monitorString = monitor.toJsonString() - val parsedMonitor = Monitor.parse(parser(monitorString)) - assertEquals("Round tripping QueryLevelMonitor doesn't work", monitor, parsedMonitor) - assertNull(parsedMonitor.user) - } - fun `test email account parsing`() { val emailAccount = randomEmailAccount() @@ -266,122 +79,6 @@ class XContentTests : OpenSearchTestCase() { assertEquals("Round tripping EmailGroup doesn't work", emailGroup, parsedEmailGroup) } - fun `test old monitor format parsing`() { - val monitorString = """ - { - "type": "monitor", - "schema_version": 3, - "name": "asdf", - "user": { - "name": "admin123", - "backend_roles": [], - "roles": [ - "all_access", - "security_manager" - ], - "custom_attribute_names": [], - "user_requested_tenant": null - }, - "enabled": true, - "enabled_time": 1613530078244, - "schedule": { - "period": { - "interval": 1, - "unit": "MINUTES" - } - }, - "inputs": [ - { - "search": { - "indices": [ - "test_index" - ], - "query": { - "size": 0, - "query": { - "bool": { - "filter": [ - { - "range": { - "order_date": { - "from": "{{period_end}}||-1h", - "to": "{{period_end}}", - "include_lower": true, - "include_upper": true, - "format": "epoch_millis", - "boost": 1.0 - } - } - } - ], - "adjust_pure_negative": true, - "boost": 1.0 - } - }, - "aggregations": {} - } - } - } - ], - "triggers": [ - { - "id": "e_sc0XcB98Q42rHjTh4K", - "name": "abc", - "severity": "1", - "condition": { - "script": { - "source": "ctx.results[0].hits.total.value > 100000", - "lang": "painless" - } - }, - "actions": [] - } - ], - "last_update_time": 1614121489719 - } - """.trimIndent() - val parsedMonitor = Monitor.parse(parser(monitorString)) - assertEquals("Incorrect monitor type", Monitor.MonitorType.QUERY_LEVEL_MONITOR, parsedMonitor.monitorType) - assertEquals("Incorrect trigger count", 1, parsedMonitor.triggers.size) - val trigger = parsedMonitor.triggers.first() - assertTrue("Incorrect trigger type", trigger is QueryLevelTrigger) - assertEquals("Incorrect name for parsed trigger", "abc", trigger.name) - } - - fun `test creating an query-level monitor with invalid trigger type fails`() { - try { - val bucketLevelTrigger = randomBucketLevelTrigger() - randomQueryLevelMonitor().copy(triggers = listOf(bucketLevelTrigger)) - fail("Creating a query-level monitor with bucket-level triggers did not fail.") - } catch (ignored: IllegalArgumentException) { - } - } - - fun `test creating an bucket-level monitor with invalid trigger type fails`() { - try { - val queryLevelTrigger = randomQueryLevelTrigger() - randomBucketLevelMonitor().copy(triggers = listOf(queryLevelTrigger)) - fail("Creating a bucket-level monitor with query-level triggers did not fail.") - } catch (ignored: IllegalArgumentException) { - } - } - - fun `test creating an bucket-level monitor with invalid input fails`() { - try { - val invalidInput = SearchInput(emptyList(), SearchSourceBuilder().query(QueryBuilders.matchAllQuery())) - randomBucketLevelMonitor().copy(inputs = listOf(invalidInput)) - fail("Creating an bucket-level monitor with an invalid input did not fail.") - } catch (ignored: IllegalArgumentException) { - } - } - - fun `test action execution policy`() { - val actionExecutionPolicy = randomActionExecutionPolicy() - val actionExecutionPolicyString = actionExecutionPolicy.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedActionExecutionPolicy = ActionExecutionPolicy.parse(parser(actionExecutionPolicyString)) - assertEquals("Round tripping ActionExecutionPolicy doesn't work", actionExecutionPolicy, parsedActionExecutionPolicy) - } - fun `test MonitorMetadata`() { val monitorMetadata = MonitorMetadata("monitorId-metadata", "monitorId", emptyList(), emptyMap()) val monitorMetadataString = monitorMetadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/FindingsRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/FindingsRestApiIT.kt index ebaf45a41..dc61468a8 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/FindingsRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/FindingsRestApiIT.kt @@ -7,10 +7,10 @@ package org.opensearch.alerting.resthandler import org.opensearch.alerting.ALWAYS_RUN import org.opensearch.alerting.AlertingRestTestCase -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelQuery import org.opensearch.alerting.randomDocumentLevelMonitor import org.opensearch.alerting.randomDocumentLevelTrigger +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery import org.opensearch.test.junit.annotations.TestLogging @TestLogging("level:DEBUG", reason = "Debug for tests.") diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorRestApiIT.kt index 66804bfb1..02fceaf3a 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorRestApiIT.kt @@ -15,17 +15,8 @@ import org.opensearch.alerting.AlertingRestTestCase import org.opensearch.alerting.LEGACY_OPENDISTRO_ALERTING_BASE_URI import org.opensearch.alerting.alerts.AlertIndices import org.opensearch.alerting.anomalyDetectorIndexMapping -import org.opensearch.alerting.core.model.CronSchedule -import org.opensearch.alerting.core.model.DocLevelMonitorInput -import org.opensearch.alerting.core.model.DocLevelQuery -import org.opensearch.alerting.core.model.ScheduledJob -import org.opensearch.alerting.core.model.SearchInput import org.opensearch.alerting.core.settings.ScheduledJobSettings import org.opensearch.alerting.makeRequest -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.DocumentLevelTrigger -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.model.QueryLevelTrigger import org.opensearch.alerting.model.destination.Chime import org.opensearch.alerting.model.destination.Destination import org.opensearch.alerting.randomADMonitor @@ -50,6 +41,15 @@ import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.CronSchedule +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.DocumentLevelTrigger +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.QueryLevelTrigger +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.RestStatus import org.opensearch.script.Script diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorRestApiIT.kt index 151be8d4d..9888578d0 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorRestApiIT.kt @@ -30,9 +30,7 @@ import org.opensearch.alerting.TEST_HR_INDEX import org.opensearch.alerting.TEST_HR_ROLE import org.opensearch.alerting.TEST_NON_HR_INDEX import org.opensearch.alerting.assertUserNull -import org.opensearch.alerting.core.model.SearchInput import org.opensearch.alerting.makeRequest -import org.opensearch.alerting.model.Alert import org.opensearch.alerting.randomAction import org.opensearch.alerting.randomAlert import org.opensearch.alerting.randomQueryLevelMonitor @@ -45,6 +43,8 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentType import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.commons.authuser.User import org.opensearch.commons.rest.SecureRestClientBuilder import org.opensearch.index.query.QueryBuilders diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt index 54fdabaa3..b1f3da648 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt @@ -10,28 +10,25 @@ import org.opensearch.action.admin.indices.refresh.RefreshAction import org.opensearch.action.admin.indices.refresh.RefreshRequest import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.AlertingPlugin -import org.opensearch.alerting.action.DeleteMonitorAction -import org.opensearch.alerting.action.DeleteMonitorRequest import org.opensearch.alerting.action.ExecuteMonitorAction import org.opensearch.alerting.action.ExecuteMonitorRequest import org.opensearch.alerting.action.ExecuteMonitorResponse -import org.opensearch.alerting.action.GetFindingsAction -import org.opensearch.alerting.action.GetFindingsRequest -import org.opensearch.alerting.action.GetFindingsResponse import org.opensearch.alerting.action.GetMonitorAction import org.opensearch.alerting.action.GetMonitorRequest -import org.opensearch.alerting.action.IndexMonitorAction -import org.opensearch.alerting.action.IndexMonitorRequest -import org.opensearch.alerting.action.IndexMonitorResponse import org.opensearch.alerting.alerts.AlertIndices -import org.opensearch.alerting.model.Alert -import org.opensearch.alerting.model.Finding -import org.opensearch.alerting.model.Monitor -import org.opensearch.alerting.model.Table import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.XContentType import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetFindingsRequest +import org.opensearch.commons.alerting.action.GetFindingsResponse +import org.opensearch.commons.alerting.action.IndexMonitorRequest +import org.opensearch.commons.alerting.action.IndexMonitorResponse +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.Finding +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.Table import org.opensearch.index.query.TermQueryBuilder import org.opensearch.index.reindex.ReindexPlugin import org.opensearch.index.seqno.SequenceNumbers @@ -89,7 +86,7 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { method = RestRequest.Method.POST, monitor = monitor ) - return client().execute(IndexMonitorAction.INSTANCE, request).actionGet() + return client().execute(AlertingActions.INDEX_MONITOR_ACTION_TYPE, request).actionGet() } protected fun updateMonitor(monitor: Monitor, monitorId: String): IndexMonitorResponse? { @@ -101,7 +98,7 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { method = RestRequest.Method.PUT, monitor = monitor ) - return client().execute(IndexMonitorAction.INSTANCE, request).actionGet() + return client().execute(AlertingActions.INDEX_MONITOR_ACTION_TYPE, request).actionGet() } protected fun searchAlerts(id: String, indices: String = AlertIndices.ALERT_INDEX, refresh: Boolean = true): List { @@ -156,7 +153,7 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { monitorId, findingIndexName ) - val getFindingsResponse: GetFindingsResponse = client().execute(GetFindingsAction.INSTANCE, getFindingsRequest).get() + val getFindingsResponse: GetFindingsResponse = client().execute(AlertingActions.GET_FINDINGS_ACTION_TYPE, getFindingsRequest).get() return getFindingsResponse.findings.map { it.finding }.toList() } @@ -170,13 +167,6 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { GetMonitorRequest(monitorId, version, RestRequest.Method.GET, fetchSourceContext) ).get() - protected fun deleteMonitor( - monitorId: String, - ) = client().execute( - DeleteMonitorAction.INSTANCE, - DeleteMonitorRequest(monitorId, WriteRequest.RefreshPolicy.IMMEDIATE) - ).get() - override fun getPlugins(): List> { return listOf(AlertingPlugin::class.java, ReindexPlugin::class.java) } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionResolverTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionResolverTests.kt index 134073485..68f6ea33b 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionResolverTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionResolverTests.kt @@ -6,8 +6,8 @@ package org.opensearch.alerting.triggeraction import org.junit.Assert -import org.opensearch.alerting.core.model.DocLevelQuery import org.opensearch.alerting.triggercondition.parsers.TriggerExpressionParser +import org.opensearch.commons.alerting.model.DocLevelQuery import org.opensearch.test.OpenSearchTestCase class TriggerExpressionResolverTests : OpenSearchTestCase() { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/AggregationQueryRewriterTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/AggregationQueryRewriterTests.kt index 20ec8f983..a7fededf6 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/AggregationQueryRewriterTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/AggregationQueryRewriterTests.kt @@ -8,7 +8,6 @@ package org.opensearch.alerting.util import org.junit.Assert import org.opensearch.action.search.SearchResponse import org.opensearch.alerting.model.InputRunResults -import org.opensearch.alerting.model.Trigger import org.opensearch.alerting.model.TriggerAfterKey import org.opensearch.alerting.randomBucketLevelTrigger import org.opensearch.alerting.randomBucketSelectorExtAggregationBuilder @@ -19,6 +18,7 @@ import org.opensearch.common.ParseField import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.commons.alerting.model.Trigger import org.opensearch.search.aggregations.Aggregation import org.opensearch.search.aggregations.AggregationBuilder import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtilsTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtilsTests.kt index 931a24f35..27b689d37 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtilsTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtilsTests.kt @@ -6,12 +6,12 @@ package org.opensearch.alerting.util import org.opensearch.alerting.ANOMALY_RESULT_INDEX -import org.opensearch.alerting.core.model.Input -import org.opensearch.alerting.core.model.SearchInput import org.opensearch.alerting.randomQueryLevelMonitor import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.commons.alerting.model.Input +import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.commons.authuser.User import org.opensearch.index.query.QueryBuilders import org.opensearch.search.builder.SearchSourceBuilder diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilServiceIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilServiceIT.kt index 8dd942de3..7997f246c 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilServiceIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilServiceIT.kt @@ -6,7 +6,6 @@ package org.opensearch.alerting.util.destinationmigration import org.opensearch.alerting.AlertingRestTestCase -import org.opensearch.alerting.core.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.alerting.makeRequest import org.opensearch.alerting.model.destination.Destination import org.opensearch.alerting.model.destination.email.Email @@ -15,9 +14,9 @@ import org.opensearch.alerting.model.destination.email.EmailEntry import org.opensearch.alerting.model.destination.email.EmailGroup import org.opensearch.alerting.model.destination.email.Recipient import org.opensearch.alerting.randomUser -import org.opensearch.alerting.toJsonString import org.opensearch.alerting.util.DestinationType import org.opensearch.client.ResponseException +import org.opensearch.commons.alerting.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX import org.opensearch.rest.RestStatus import java.time.Instant import java.util.UUID diff --git a/build.gradle b/build.gradle index 5caf41139..6cb2b83e6 100644 --- a/build.gradle +++ b/build.gradle @@ -7,10 +7,10 @@ buildscript { apply from: 'build-tools/repositories.gradle' ext { - opensearch_version = System.getProperty("opensearch.version", "2.2.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.4.0-SNAPSHOT") buildVersionQualifier = System.getProperty("build.version_qualifier", "") isSnapshot = "true" == System.getProperty("build.snapshot", "true") - // 2.2.0-SNAPSHOT -> 2.2.0.0-SNAPSHOT + // 2.4.0-SNAPSHOT -> 2.4.0.0-SNAPSHOT version_tokens = opensearch_version.tokenize('-') opensearch_build = version_tokens[0] + '.0' plugin_no_snapshot = opensearch_build diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/JobRunner.kt b/core/src/main/kotlin/org/opensearch/alerting/core/JobRunner.kt index 381b35309..c251c8c6a 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/JobRunner.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/JobRunner.kt @@ -5,7 +5,7 @@ package org.opensearch.alerting.core -import org.opensearch.alerting.core.model.ScheduledJob +import org.opensearch.commons.alerting.model.ScheduledJob import java.time.Instant interface JobRunner { diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeper.kt b/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeper.kt index 1a82cbc0f..a64bfb053 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeper.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeper.kt @@ -8,7 +8,6 @@ package org.opensearch.alerting.core import org.apache.logging.log4j.LogManager import org.opensearch.action.bulk.BackoffPolicy import org.opensearch.action.search.SearchRequest -import org.opensearch.alerting.core.model.ScheduledJob import org.opensearch.alerting.core.schedule.JobScheduler import org.opensearch.alerting.core.settings.ScheduledJobSettings.Companion.REQUEST_TIMEOUT import org.opensearch.alerting.core.settings.ScheduledJobSettings.Companion.SWEEPER_ENABLED @@ -38,6 +37,7 @@ import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.index.engine.Engine import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.QueryBuilders diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/ScheduledJobIndices.kt b/core/src/main/kotlin/org/opensearch/alerting/core/ScheduledJobIndices.kt index 621e2361f..b1d9f6d64 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/ScheduledJobIndices.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/ScheduledJobIndices.kt @@ -8,11 +8,11 @@ package org.opensearch.alerting.core import org.opensearch.action.ActionListener import org.opensearch.action.admin.indices.create.CreateIndexRequest import org.opensearch.action.admin.indices.create.CreateIndexResponse -import org.opensearch.alerting.core.model.ScheduledJob import org.opensearch.client.AdminClient import org.opensearch.cluster.health.ClusterIndexHealth import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.commons.alerting.model.ScheduledJob /** * Initialize the OpenSearch components required to run [ScheduledJobs]. diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/model/ClusterMetricsInput.kt b/core/src/main/kotlin/org/opensearch/alerting/core/model/ClusterMetricsInput.kt deleted file mode 100644 index 04e0efaeb..000000000 --- a/core/src/main/kotlin/org/opensearch/alerting/core/model/ClusterMetricsInput.kt +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core.model - -import org.apache.commons.validator.routines.UrlValidator -import org.apache.http.client.utils.URIBuilder -import org.opensearch.common.CheckedFunction -import org.opensearch.common.ParseField -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.NamedXContentRegistry -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import java.io.IOException -import java.net.URI - -val ILLEGAL_PATH_PARAMETER_CHARACTERS = arrayOf(':', '"', '+', '\\', '|', '?', '#', '>', '<', ' ') - -/** - * This is a data class for a URI type of input for Monitors specifically for local clusters. - */ -data class ClusterMetricsInput( - var path: String, - var pathParams: String = "", - var url: String -) : Input { - val clusterMetricType: ClusterMetricType - val constructedUri: URI - - // Verify parameters are valid during creation - init { - require(validateFields()) { - "The uri.api_type field, uri.path field, or uri.uri field must be defined." - } - - // Create an UrlValidator that only accepts "http" and "https" as valid scheme and allows local URLs. - val urlValidator = UrlValidator(arrayOf("http", "https"), UrlValidator.ALLOW_LOCAL_URLS) - - // Build url field by field if not provided as whole. - constructedUri = toConstructedUri() - - require(urlValidator.isValid(constructedUri.toString())) { - "Invalid URI constructed from the path and path_params inputs, or the url input." - } - - if (url.isNotEmpty() && validateFieldsNotEmpty()) - require(constructedUri == constructUrlFromInputs()) { - "The provided URL and URI fields form different URLs." - } - - require(constructedUri.host.lowercase() == SUPPORTED_HOST) { - "Only host '$SUPPORTED_HOST' is supported." - } - require(constructedUri.port == SUPPORTED_PORT) { - "Only port '$SUPPORTED_PORT' is supported." - } - - clusterMetricType = findApiType(constructedUri.path) - this.parseEmptyFields() - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readString(), // path - sin.readString(), // path params - sin.readString() // url - ) - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject() - .startObject(URI_FIELD) - .field(API_TYPE_FIELD, clusterMetricType) - .field(PATH_FIELD, path) - .field(PATH_PARAMS_FIELD, pathParams) - .field(URL_FIELD, url) - .endObject() - .endObject() - } - - override fun name(): String { - return URI_FIELD - } - - override fun writeTo(out: StreamOutput) { - out.writeString(clusterMetricType.toString()) - out.writeString(path) - out.writeString(pathParams) - out.writeString(url) - } - - companion object { - const val SUPPORTED_SCHEME = "http" - const val SUPPORTED_HOST = "localhost" - const val SUPPORTED_PORT = 9200 - - const val API_TYPE_FIELD = "api_type" - const val PATH_FIELD = "path" - const val PATH_PARAMS_FIELD = "path_params" - const val URL_FIELD = "url" - const val URI_FIELD = "uri" - - val XCONTENT_REGISTRY = NamedXContentRegistry.Entry(Input::class.java, ParseField(URI_FIELD), CheckedFunction { parseInner(it) }) - - /** - * This parse function uses [XContentParser] to parse JSON input and store corresponding fields to create a [ClusterMetricsInput] object - */ - @JvmStatic @Throws(IOException::class) - fun parseInner(xcp: XContentParser): ClusterMetricsInput { - var path = "" - var pathParams = "" - var url = "" - - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - when (fieldName) { - PATH_FIELD -> path = xcp.text() - PATH_PARAMS_FIELD -> pathParams = xcp.text() - URL_FIELD -> url = xcp.text() - } - } - return ClusterMetricsInput(path, pathParams, url) - } - } - - /** - * Constructs the [URI] using either the provided [url], or the - * supported scheme, host, and port and provided [path]+[pathParams]. - * @return The [URI] constructed from [url] if it's defined; - * otherwise a [URI] constructed from the provided [URI] fields. - */ - private fun toConstructedUri(): URI { - return if (url.isEmpty()) { - constructUrlFromInputs() - } else { - URIBuilder(url).build() - } - } - - /** - * Isolates just the path parameters from the [ClusterMetricsInput] URI. - * @return The path parameters portion of the [ClusterMetricsInput] URI. - * @throws IllegalArgumentException if the [ClusterMetricType] requires path parameters, but none are supplied; - * or when path parameters are provided for an [ClusterMetricType] that does not use path parameters. - */ - fun parsePathParams(): String { - val path = this.constructedUri.path - val apiType = this.clusterMetricType - - var pathParams: String - if (this.pathParams.isNotEmpty()) { - pathParams = this.pathParams - } else { - val prependPath = if (apiType.supportsPathParams) apiType.prependPath else apiType.defaultPath - pathParams = path.removePrefix(prependPath) - pathParams = pathParams.removeSuffix(apiType.appendPath) - } - - if (pathParams.isNotEmpty()) { - pathParams = pathParams.trim('/') - ILLEGAL_PATH_PARAMETER_CHARACTERS.forEach { character -> - if (pathParams.contains(character)) - throw IllegalArgumentException( - "The provided path parameters contain invalid characters or spaces. Please omit: " + - "${ILLEGAL_PATH_PARAMETER_CHARACTERS.joinToString(" ")}" - ) - } - } - - if (apiType.requiresPathParams && pathParams.isEmpty()) - throw IllegalArgumentException("The API requires path parameters.") - if (!apiType.supportsPathParams && pathParams.isNotEmpty()) - throw IllegalArgumentException("The API does not use path parameters.") - - return pathParams - } - - /** - * Examines the path of a [ClusterMetricsInput] to determine which API is being called. - * @param uriPath The path to examine. - * @return The [ClusterMetricType] associated with the [ClusterMetricsInput] monitor. - * @throws IllegalArgumentException when the API to call cannot be determined from the URI. - */ - private fun findApiType(uriPath: String): ClusterMetricType { - var apiType = ClusterMetricType.BLANK - ClusterMetricType.values() - .filter { option -> option != ClusterMetricType.BLANK } - .forEach { option -> - if (uriPath.startsWith(option.prependPath) || uriPath.startsWith(option.defaultPath)) - apiType = option - } - if (apiType.isBlank()) - throw IllegalArgumentException("The API could not be determined from the provided URI.") - return apiType - } - - /** - * Constructs a [URI] from the supported scheme, host, and port, and the provided [path], and [pathParams]. - * @return The constructed [URI]. - */ - private fun constructUrlFromInputs(): URI { - val uriBuilder = URIBuilder() - .setScheme(SUPPORTED_SCHEME) - .setHost(SUPPORTED_HOST) - .setPort(SUPPORTED_PORT) - .setPath(path + pathParams) - return uriBuilder.build() - } - - /** - * If [url] field is empty, populates it with [constructedUri]. - * If [path] and [pathParams] are empty, populates them with values from [url]. - */ - private fun parseEmptyFields() { - if (pathParams.isEmpty()) - pathParams = this.parsePathParams() - if (path.isEmpty()) - path = if (pathParams.isEmpty()) clusterMetricType.defaultPath else clusterMetricType.prependPath - if (url.isEmpty()) - url = constructedUri.toString() - } - - /** - * Helper function to confirm at least [url], or required URI component fields are defined. - * @return TRUE if at least either [url] or the other components are provided; otherwise FALSE. - */ - private fun validateFields(): Boolean { - return url.isNotEmpty() || validateFieldsNotEmpty() - } - - /** - * Confirms that required URI component fields are defined. - * Only validating path for now, as that's the only required field. - * @return TRUE if all those fields are defined; otherwise FALSE. - */ - private fun validateFieldsNotEmpty(): Boolean { - return path.isNotEmpty() - } - - /** - * An enum class to quickly reference various supported API. - */ - enum class ClusterMetricType( - val defaultPath: String, - val prependPath: String, - val appendPath: String, - val supportsPathParams: Boolean, - val requiresPathParams: Boolean - ) { - BLANK("", "", "", false, false), - CAT_PENDING_TASKS( - "/_cat/pending_tasks", - "/_cat/pending_tasks", - "", - false, - false - ), - CAT_RECOVERY( - "/_cat/recovery", - "/_cat/recovery", - "", - true, - false - ), - CAT_SNAPSHOTS( - "/_cat/snapshots", - "/_cat/snapshots", - "", - true, - true - ), - CAT_TASKS( - "/_cat/tasks", - "/_cat/tasks", - "", - false, - false - ), - CLUSTER_HEALTH( - "/_cluster/health", - "/_cluster/health", - "", - true, - false - ), - CLUSTER_SETTINGS( - "/_cluster/settings", - "/_cluster/settings", - "", - false, - false - ), - CLUSTER_STATS( - "/_cluster/stats", - "/_cluster/stats", - "", - true, - false - ), - NODES_STATS( - "/_nodes/stats", - "/_nodes", - "", - false, - false - ); - - /** - * @return TRUE if the [ClusterMetricType] is [BLANK]; otherwise FALSE. - */ - fun isBlank(): Boolean { - return this === BLANK - } - } -} diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/model/DocLevelMonitorInput.kt b/core/src/main/kotlin/org/opensearch/alerting/core/model/DocLevelMonitorInput.kt deleted file mode 100644 index fbeba6007..000000000 --- a/core/src/main/kotlin/org/opensearch/alerting/core/model/DocLevelMonitorInput.kt +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core.model - -import org.opensearch.common.CheckedFunction -import org.opensearch.common.ParseField -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.NamedXContentRegistry -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException - -data class DocLevelMonitorInput( - val description: String = NO_DESCRIPTION, - val indices: List, - val queries: List -) : Input { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readString(), // description - sin.readStringList(), // indices - sin.readList(::DocLevelQuery) // docLevelQueries - ) - - fun asTemplateArg(): Map { - return mapOf( - DESCRIPTION_FIELD to description, - INDICES_FIELD to indices, - QUERIES_FIELD to queries.map { it.asTemplateArg() } - ) - } - - override fun name(): String { - return DOC_LEVEL_INPUT_FIELD - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(description) - out.writeStringCollection(indices) - out.writeCollection(queries) - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .startObject(DOC_LEVEL_INPUT_FIELD) - .field(DESCRIPTION_FIELD, description) - .field(INDICES_FIELD, indices.toTypedArray()) - .field(QUERIES_FIELD, queries.toTypedArray()) - .endObject() - .endObject() - return builder - } - - companion object { - const val DESCRIPTION_FIELD = "description" - const val INDICES_FIELD = "indices" - const val DOC_LEVEL_INPUT_FIELD = "doc_level_input" - const val QUERIES_FIELD = "queries" - - const val NO_DESCRIPTION = "" - - val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( - Input::class.java, - ParseField(DOC_LEVEL_INPUT_FIELD), CheckedFunction { parse(it) } - ) - - @JvmStatic @Throws(IOException::class) - fun parse(xcp: XContentParser): DocLevelMonitorInput { - var description: String = NO_DESCRIPTION - val indices: MutableList = mutableListOf() - val docLevelQueries: MutableList = mutableListOf() - - ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - DESCRIPTION_FIELD -> description = xcp.text() - INDICES_FIELD -> { - ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - indices.add(xcp.text()) - } - } - QUERIES_FIELD -> { - ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - docLevelQueries.add(DocLevelQuery.parse(xcp)) - } - } - } - } - - return DocLevelMonitorInput(description = description, indices = indices, queries = docLevelQueries) - } - - @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): DocLevelMonitorInput { - return DocLevelMonitorInput(sin) - } - } -} diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/model/DocLevelQuery.kt b/core/src/main/kotlin/org/opensearch/alerting/core/model/DocLevelQuery.kt deleted file mode 100644 index 06d6c480b..000000000 --- a/core/src/main/kotlin/org/opensearch/alerting/core/model/DocLevelQuery.kt +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core.model - -import com.google.common.collect.ImmutableList -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException -import java.lang.IllegalArgumentException -import java.util.UUID - -data class DocLevelQuery( - val id: String = UUID.randomUUID().toString(), - val name: String, - val query: String, - val tags: List = mutableListOf() -) : Writeable, ToXContentObject { - - init { - // Ensure the name and tags have valid characters - validateQuery(name) - for (tag in tags) { - validateQuery(tag) - } - } - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readString(), // id - sin.readString(), // name - sin.readString(), // query - sin.readStringList() // tags - ) - - fun asTemplateArg(): Map { - return mapOf( - QUERY_ID_FIELD to id, - NAME_FIELD to name, - QUERY_FIELD to query, - TAGS_FIELD to tags - ) - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(id) - out.writeString(name) - out.writeString(query) - out.writeStringCollection(tags) - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .field(QUERY_ID_FIELD, id) - .field(NAME_FIELD, name) - .field(QUERY_FIELD, query) - .field(TAGS_FIELD, tags.toTypedArray()) - .endObject() - return builder - } - - companion object { - const val QUERY_ID_FIELD = "id" - const val NAME_FIELD = "name" - const val QUERY_FIELD = "query" - const val TAGS_FIELD = "tags" - const val NO_ID = "" - val INVALID_CHARACTERS: ImmutableList = ImmutableList.of(" ", "[", "]", "{", "}", "(", ")") - - @JvmStatic @Throws(IOException::class) - fun parse(xcp: XContentParser): DocLevelQuery { - var id: String = UUID.randomUUID().toString() - lateinit var query: String - lateinit var name: String - val tags: MutableList = mutableListOf() - - ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - QUERY_ID_FIELD -> id = xcp.text() - NAME_FIELD -> { - name = xcp.text() - validateQuery(name) - } - QUERY_FIELD -> query = xcp.text() - TAGS_FIELD -> { - ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - val tag = xcp.text() - validateQuery(tag) - tags.add(tag) - } - } - } - } - - return DocLevelQuery( - id = id, - name = name, - query = query, - tags = tags - ) - } - - @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): DocLevelQuery { - return DocLevelQuery(sin) - } - - // TODO: add test for this - private fun validateQuery(stringVal: String) { - for (inValidChar in INVALID_CHARACTERS) { - if (stringVal.contains(inValidChar)) { - throw IllegalArgumentException( - "They query name or tag, $stringVal, contains an invalid character: [' ','[',']','{','}','(',')']" - ) - } - } - } - } -} diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/model/Input.kt b/core/src/main/kotlin/org/opensearch/alerting/core/model/Input.kt deleted file mode 100644 index 06d351fb8..000000000 --- a/core/src/main/kotlin/org/opensearch/alerting/core/model/Input.kt +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core.model - -import org.opensearch.alerting.core.model.ClusterMetricsInput.Companion.URI_FIELD -import org.opensearch.alerting.core.model.DocLevelMonitorInput.Companion.DOC_LEVEL_INPUT_FIELD -import org.opensearch.alerting.core.model.SearchInput.Companion.SEARCH_FIELD -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException - -interface Input : Writeable, ToXContentObject { - - enum class Type(val value: String) { - DOCUMENT_LEVEL_INPUT(DOC_LEVEL_INPUT_FIELD), - CLUSTER_METRICS_INPUT(URI_FIELD), - SEARCH_INPUT(SEARCH_FIELD); - - override fun toString(): String { - return value - } - } - - companion object { - - @Throws(IOException::class) - fun parse(xcp: XContentParser): Input { - ensureExpectedToken(Token.START_OBJECT, xcp.currentToken(), xcp) - ensureExpectedToken(Token.FIELD_NAME, xcp.nextToken(), xcp) - ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) - val input = if (xcp.currentName() == Type.SEARCH_INPUT.value) { - SearchInput.parseInner(xcp) - } else if (xcp.currentName() == Type.CLUSTER_METRICS_INPUT.value) { - ClusterMetricsInput.parseInner(xcp) - } else { - DocLevelMonitorInput.parse(xcp) - } - ensureExpectedToken(Token.END_OBJECT, xcp.nextToken(), xcp) - return input - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): Input { - return when (val type = sin.readEnum(Input.Type::class.java)) { - Type.DOCUMENT_LEVEL_INPUT -> DocLevelMonitorInput(sin) - Type.CLUSTER_METRICS_INPUT -> ClusterMetricsInput(sin) - Type.SEARCH_INPUT -> SearchInput(sin) - // This shouldn't be reachable but ensuring exhaustiveness as Kotlin warns - // enum can be null in Java - else -> throw IllegalStateException("Unexpected input [$type] when reading Trigger") - } - } - } - - fun name(): String -} diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/model/Schedule.kt b/core/src/main/kotlin/org/opensearch/alerting/core/model/Schedule.kt deleted file mode 100644 index 7867dee07..000000000 --- a/core/src/main/kotlin/org/opensearch/alerting/core/model/Schedule.kt +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core.model - -import com.cronutils.model.CronType -import com.cronutils.model.definition.CronDefinitionBuilder -import com.cronutils.model.time.ExecutionTime -import com.cronutils.parser.CronParser -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException -import java.time.DateTimeException -import java.time.Duration -import java.time.Instant -import java.time.ZoneId -import java.time.ZonedDateTime -import java.time.temporal.ChronoUnit -import java.time.zone.ZoneRulesException -import java.util.Locale - -sealed class Schedule : Writeable, ToXContentObject { - enum class TYPE { CRON, INTERVAL } - companion object { - const val CRON_FIELD = "cron" - const val EXPRESSION_FIELD = "expression" - const val TIMEZONE_FIELD = "timezone" - const val PERIOD_FIELD = "period" - const val INTERVAL_FIELD = "interval" - const val UNIT_FIELD = "unit" - - val cronParser = CronParser(CronDefinitionBuilder.instanceDefinitionFor(CronType.UNIX)) - - @JvmStatic @Throws(IOException::class) - fun parse(xcp: XContentParser): Schedule { - var expression: String? = null - var timezone: ZoneId? = null - var interval: Int? = null - var unit: ChronoUnit? = null - var schedule: Schedule? = null - var type: TYPE? = null - ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldname = xcp.currentName() - xcp.nextToken() - // If the type field has already been set the customer has provide more than one type of schedule. - if (type != null) { - throw IllegalArgumentException("You can only specify one type of schedule.") - } - when (fieldname) { - CRON_FIELD -> { - type = TYPE.CRON - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val cronFieldName = xcp.currentName() - xcp.nextToken() - when (cronFieldName) { - EXPRESSION_FIELD -> expression = xcp.textOrNull() - TIMEZONE_FIELD -> timezone = getTimeZone(xcp.text()) - } - } - } - PERIOD_FIELD -> { - type = TYPE.INTERVAL - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val cronFieldName = xcp.currentName() - xcp.nextToken() - when (cronFieldName) { - INTERVAL_FIELD -> interval = xcp.intValue() - UNIT_FIELD -> unit = ChronoUnit.valueOf(xcp.text().uppercase(Locale.getDefault())) - } - } - } - else -> { - throw IllegalArgumentException("Invalid field: [$fieldname] found in schedule.") - } - } - } - if (type == TYPE.CRON) { - schedule = CronSchedule( - requireNotNull(expression) { "Expression in cron schedule is null." }, - requireNotNull(timezone) { "Timezone in cron schedule is null." } - ) - } else if (type == TYPE.INTERVAL) { - schedule = IntervalSchedule( - requireNotNull(interval) { "Interval in period schedule is null." }, - requireNotNull(unit) { "Unit in period schedule is null." } - ) - } - return requireNotNull(schedule) { "Schedule is null." } - } - - @JvmStatic @Throws(IllegalArgumentException::class) - private fun getTimeZone(timeZone: String): ZoneId { - try { - return ZoneId.of(timeZone) - } catch (zre: ZoneRulesException) { - throw IllegalArgumentException("Timezone $timeZone is not supported") - } catch (dte: DateTimeException) { - throw IllegalArgumentException("Timezone $timeZone is not supported") - } - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): Schedule { - val type = sin.readEnum(Schedule.TYPE::class.java) - if (type == Schedule.TYPE.CRON) - return CronSchedule(sin) - else - return IntervalSchedule(sin) - } - } - - /** - * @param enabledTime is used in IntervalSchedule to calculate next time to execute the schedule. - */ - abstract fun nextTimeToExecute(enabledTime: Instant): Duration? - - /** - * @param expectedPreviousExecutionTime is the calculated previous execution time that should always be correct, - * the first time this is called the value passed in is the enabledTime which acts as the expectedPreviousExecutionTime - */ - abstract fun getExpectedNextExecutionTime(enabledTime: Instant, expectedPreviousExecutionTime: Instant?): Instant? - - /** - * Returns the start and end time for this schedule starting at the given start time (if provided). - * If not, the start time is assumed to be the last time the Schedule would have executed (if it's a Cron schedule) - * or [Instant.now] if it's an interval schedule. - * - * If this is a schedule that runs only once this function will return [Instant.now] for both start and end time. - */ - abstract fun getPeriodStartingAt(startTime: Instant?): Pair - - /** - * Returns the start and end time for this schedule ending at the given end time (if provided). - * If not, the end time is assumed to be the next time the Schedule would have executed (if it's a Cron schedule) - * or [Instant.now] if it's an interval schedule. - * - * If this is a schedule that runs only once this function will return [Instant.now] for both start and end time. - */ - abstract fun getPeriodEndingAt(endTime: Instant?): Pair - - abstract fun runningOnTime(lastExecutionTime: Instant?): Boolean -} - -/** - * @param testInstant Normally this not be set and it should only be used in unit test to control time. - */ -data class CronSchedule( - val expression: String, - val timezone: ZoneId, - // visible for testing - @Transient val testInstant: Instant? = null -) : Schedule() { - @Transient - val executionTime: ExecutionTime = ExecutionTime.forCron(cronParser.parse(expression)) - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readString(), // expression - sin.readZoneId() // timezone - ) - - companion object { - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): CronSchedule { - return CronSchedule(sin) - } - } - - /* - * @param enabledTime is not used in CronSchedule. - */ - override fun nextTimeToExecute(enabledTime: Instant): Duration? { - val zonedDateTime = ZonedDateTime.ofInstant(testInstant ?: Instant.now(), timezone) - val timeToNextExecution = executionTime.timeToNextExecution(zonedDateTime) - return timeToNextExecution.orElse(null) - } - - override fun getExpectedNextExecutionTime(enabledTime: Instant, expectedPreviousExecutionTime: Instant?): Instant? { - val zonedDateTime = ZonedDateTime.ofInstant(expectedPreviousExecutionTime ?: testInstant ?: Instant.now(), timezone) - val nextExecution = executionTime.nextExecution(zonedDateTime) - return nextExecution.orElse(null)?.toInstant() - } - - override fun getPeriodStartingAt(startTime: Instant?): Pair { - val realStartTime = if (startTime != null) { - startTime - } else { - // Probably the first time we're running. Try to figure out the last execution time - val lastExecutionTime = executionTime.lastExecution(ZonedDateTime.now(timezone)) - // This shouldn't happen unless the cron is configured to run only once, which our current cron syntax doesn't support - if (!lastExecutionTime.isPresent) { - val currentTime = Instant.now() - return Pair(currentTime, currentTime) - } - lastExecutionTime.get().toInstant() - } - val zonedDateTime = ZonedDateTime.ofInstant(realStartTime, timezone) - val newEndTime = executionTime.nextExecution(zonedDateTime).orElse(null) - return Pair(realStartTime, newEndTime?.toInstant() ?: realStartTime) - } - - override fun getPeriodEndingAt(endTime: Instant?): Pair { - val realEndTime = if (endTime != null) { - endTime - } else { - val nextExecutionTime = executionTime.nextExecution(ZonedDateTime.now(timezone)) - // This shouldn't happen unless the cron is configured to run only once which our current cron syntax doesn't support - if (!nextExecutionTime.isPresent) { - val currentTime = Instant.now() - return Pair(currentTime, currentTime) - } - nextExecutionTime.get().toInstant() - } - val zonedDateTime = ZonedDateTime.ofInstant(realEndTime, timezone) - val newStartTime = executionTime.lastExecution(zonedDateTime).orElse(null) - return Pair(newStartTime?.toInstant() ?: realEndTime, realEndTime) - } - - override fun runningOnTime(lastExecutionTime: Instant?): Boolean { - if (lastExecutionTime == null) { - return true - } - - val zonedDateTime = ZonedDateTime.ofInstant(testInstant ?: Instant.now(), timezone) - val expectedExecutionTime = executionTime.lastExecution(zonedDateTime) - - if (!expectedExecutionTime.isPresent) { - // At this point we know lastExecutionTime is not null, this should never happen. - // If expected execution time is null, we shouldn't have executed the ScheduledJob. - return false - } - val actualExecutionTime = ZonedDateTime.ofInstant(lastExecutionTime, timezone) - - return ChronoUnit.SECONDS.between(expectedExecutionTime.get(), actualExecutionTime) == 0L - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .startObject(CRON_FIELD) - .field(EXPRESSION_FIELD, expression) - .field(TIMEZONE_FIELD, timezone.id) - .endObject() - .endObject() - return builder - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeString(expression) - out.writeZoneId(timezone) - } -} - -data class IntervalSchedule( - val interval: Int, - val unit: ChronoUnit, - // visible for testing - @Transient val testInstant: Instant? = null -) : Schedule() { - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readInt(), // interval - sin.readEnum(ChronoUnit::class.java) // unit - ) - companion object { - @Transient - private val SUPPORTED_UNIT = listOf(ChronoUnit.MINUTES, ChronoUnit.HOURS, ChronoUnit.DAYS) - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): IntervalSchedule { - return IntervalSchedule(sin) - } - } - - init { - if (!SUPPORTED_UNIT.contains(unit)) { - throw IllegalArgumentException("Timezone $unit is not supported expected $SUPPORTED_UNIT") - } - - if (interval <= 0) { - throw IllegalArgumentException("Interval is not allowed to be 0 or negative") - } - } - - @Transient - private val intervalInMills = Duration.of(interval.toLong(), unit).toMillis() - - override fun nextTimeToExecute(enabledTime: Instant): Duration? { - val enabledTimeEpochMillis = enabledTime.toEpochMilli() - - val currentTime = testInstant ?: Instant.now() - val delta = currentTime.toEpochMilli() - enabledTimeEpochMillis - // Remainder of the Delta time is how much we have already spent waiting. - // We need to subtract remainder of that time from the interval time to get remaining schedule time to wait. - val remainingScheduleTime = intervalInMills - delta.rem(intervalInMills) - return Duration.of(remainingScheduleTime, ChronoUnit.MILLIS) - } - - override fun getExpectedNextExecutionTime(enabledTime: Instant, expectedPreviousExecutionTime: Instant?): Instant? { - val expectedPreviousExecutionTimeEpochMillis = (expectedPreviousExecutionTime ?: enabledTime).toEpochMilli() - // We still need to calculate the delta even when using expectedPreviousExecutionTime because the initial value passed in - // is the enabledTime (which also happens with cluster/node restart) - val currentTime = testInstant ?: Instant.now() - val delta = currentTime.toEpochMilli() - expectedPreviousExecutionTimeEpochMillis - // Remainder of the Delta time is how much we have already spent waiting. - // We need to subtract remainder of that time from the interval time to get remaining schedule time to wait. - val remainingScheduleTime = intervalInMills - delta.rem(intervalInMills) - return Instant.ofEpochMilli(currentTime.toEpochMilli() + remainingScheduleTime) - } - - override fun getPeriodStartingAt(startTime: Instant?): Pair { - val realStartTime = startTime ?: Instant.now() - val newEndTime = realStartTime.plusMillis(intervalInMills) - return Pair(realStartTime, newEndTime) - } - - override fun getPeriodEndingAt(endTime: Instant?): Pair { - val realEndTime = endTime ?: Instant.now() - val newStartTime = realEndTime.minusMillis(intervalInMills) - return Pair(newStartTime, realEndTime) - } - - override fun runningOnTime(lastExecutionTime: Instant?): Boolean { - if (lastExecutionTime == null) { - return true - } - - // Make sure the lastExecutionTime is less than interval time. - val delta = ChronoUnit.MILLIS.between(lastExecutionTime, testInstant ?: Instant.now()) - return 0 < delta && delta < intervalInMills - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() - .startObject(PERIOD_FIELD) - .field(INTERVAL_FIELD, interval) - .field(UNIT_FIELD, unit.name) - .endObject() - .endObject() - return builder - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeInt(interval) - out.writeEnum(unit) - } -} diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/model/ScheduledJob.kt b/core/src/main/kotlin/org/opensearch/alerting/core/model/ScheduledJob.kt deleted file mode 100644 index fb595d9f0..000000000 --- a/core/src/main/kotlin/org/opensearch/alerting/core/model/ScheduledJob.kt +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core.model - -import org.opensearch.alerting.core.model.ScheduledJob.Companion.NO_ID -import org.opensearch.alerting.core.model.ScheduledJob.Companion.NO_VERSION -import org.opensearch.alerting.core.model.ScheduledJob.Companion.SCHEDULED_JOBS_INDEX -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import java.io.IOException -import java.time.Instant - -/** - * A job that runs periodically in the ElasticSearch cluster. - * - * All implementations of [ScheduledJob]s are stored in the [SCHEDULED_JOBS_INDEX] index and are scheduled in a - * single global Scheduler running on each node. Each implementation should have its own separate APIs for writing, - * updating and deleting instances of that job type into the [SCHEDULED_JOBS_INDEX] index. The index is periodically - * scanned for updates which are then scheduled or unscheduled with the Scheduler. - * - * Like all documents in OpenSearch [ScheduledJob]s also have an [id] and a [version]. Jobs that have not been - * persisted in the cluster should use the special sentinel values [NO_ID] and [NO_VERSION] for these fields. - */ -interface ScheduledJob : Writeable, ToXContentObject { - - fun toXContentWithType(builder: XContentBuilder): XContentBuilder = toXContent(builder, XCONTENT_WITH_TYPE) - - companion object { - /** The name of the ElasticSearch index in which we store jobs */ - const val SCHEDULED_JOBS_INDEX = ".opendistro-alerting-config" - const val DOC_LEVEL_QUERIES_INDEX = ".opensearch-alerting-queries" - - const val NO_ID = "" - - const val NO_VERSION = 1L - - private val XCONTENT_WITH_TYPE = ToXContent.MapParams(mapOf("with_type" to "true")) - - /** - * This function parses the job, delegating to the specific subtype parser registered in the [XContentParser.getXContentRegistry] - * at runtime. Each concrete job subclass is expected to register a parser in this registry. - * The Job's json representation is expected to be of the form: - * { "" : { } } - * - * If the job comes from an OpenSearch index it's [id] and [version] can also be supplied. - */ - @Throws(IOException::class) - fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): ScheduledJob { - ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) - ensureExpectedToken(Token.FIELD_NAME, xcp.nextToken(), xcp) - ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) - val job = xcp.namedObject(ScheduledJob::class.java, xcp.currentName(), null) - ensureExpectedToken(Token.END_OBJECT, xcp.nextToken(), xcp) - return job.fromDocument(id, version) - } - - /** - * This function parses the job, but expects the type to be passed in. This is for the specific - * use case in sweeper where we first want to check if the job is allowed to be swept before - * trying to fully parse it. If you need to parse a job, you most likely want to use - * the above parse function. - */ - @Throws(IOException::class) - fun parse(xcp: XContentParser, type: String, id: String = NO_ID, version: Long = NO_VERSION): ScheduledJob { - ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) - val job = xcp.namedObject(ScheduledJob::class.java, type, null) - ensureExpectedToken(Token.END_OBJECT, xcp.nextToken(), xcp) - return job.fromDocument(id, version) - } - } - - /** The id of the job in the [SCHEDULED_JOBS_INDEX] or [NO_ID] if not persisted */ - val id: String - - /** The version of the job in the [SCHEDULED_JOBS_INDEX] or [NO_VERSION] if not persisted */ - val version: Long - - /** The name of the job */ - val name: String - - /** The type of the job */ - val type: String - - /** Controls whether the job will be scheduled or not */ - val enabled: Boolean - - /** The schedule for running the job */ - val schedule: Schedule - - /** The last time the job was updated */ - val lastUpdateTime: Instant - - /** The time the job was enabled */ - val enabledTime: Instant? - - /** Copy constructor for persisted jobs */ - fun fromDocument(id: String, version: Long): ScheduledJob -} diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/model/SearchInput.kt b/core/src/main/kotlin/org/opensearch/alerting/core/model/SearchInput.kt deleted file mode 100644 index 6e2d075eb..000000000 --- a/core/src/main/kotlin/org/opensearch/alerting/core/model/SearchInput.kt +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core.model - -import org.opensearch.common.CheckedFunction -import org.opensearch.common.ParseField -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.xcontent.NamedXContentRegistry -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParser.Token -import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.search.builder.SearchSourceBuilder -import java.io.IOException - -data class SearchInput(val indices: List, val query: SearchSourceBuilder) : Input { - - @Throws(IOException::class) - constructor(sin: StreamInput) : this( - sin.readStringList(), // indices - SearchSourceBuilder(sin) // query - ) - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject() - .startObject(SEARCH_FIELD) - .field(INDICES_FIELD, indices.toTypedArray()) - .field(QUERY_FIELD, query) - .endObject() - .endObject() - } - - override fun name(): String { - return SEARCH_FIELD - } - - @Throws(IOException::class) - override fun writeTo(out: StreamOutput) { - out.writeStringCollection(indices) - query.writeTo(out) - } - - companion object { - const val INDICES_FIELD = "indices" - const val QUERY_FIELD = "query" - const val SEARCH_FIELD = "search" - - val XCONTENT_REGISTRY = NamedXContentRegistry.Entry(Input::class.java, ParseField("search"), CheckedFunction { parseInner(it) }) - - @JvmStatic @Throws(IOException::class) - fun parseInner(xcp: XContentParser): SearchInput { - val indices = mutableListOf() - lateinit var searchSourceBuilder: SearchSourceBuilder - - ensureExpectedToken(Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - when (fieldName) { - INDICES_FIELD -> { - ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) - while (xcp.nextToken() != Token.END_ARRAY) { - indices.add(xcp.text()) - } - } - QUERY_FIELD -> { - searchSourceBuilder = SearchSourceBuilder.fromXContent(xcp, false) - } - } - } - - return SearchInput( - indices, - requireNotNull(searchSourceBuilder) { "SearchInput query is null" } - ) - } - - @JvmStatic - @Throws(IOException::class) - fun readFrom(sin: StreamInput): SearchInput { - return SearchInput(sin) - } - } -} diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobScheduler.kt b/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobScheduler.kt index 07ab0dfdb..a4a729121 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobScheduler.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobScheduler.kt @@ -7,8 +7,8 @@ package org.opensearch.alerting.core.schedule import org.apache.logging.log4j.LogManager import org.opensearch.alerting.core.JobRunner -import org.opensearch.alerting.core.model.ScheduledJob import org.opensearch.common.unit.TimeValue +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.threadpool.Scheduler import org.opensearch.threadpool.ThreadPool import java.time.Duration diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/settings/ScheduledJobSettings.kt b/core/src/main/kotlin/org/opensearch/alerting/core/settings/ScheduledJobSettings.kt index 19c8501c4..6bdb18bec 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/settings/ScheduledJobSettings.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/settings/ScheduledJobSettings.kt @@ -5,7 +5,6 @@ package org.opensearch.alerting.core.settings -import org.opensearch.alerting.core.model.ScheduledJob import org.opensearch.common.settings.Setting /** diff --git a/core/src/main/kotlin/org/opensearch/alerting/opensearchapi/OpenSearchExtensions.kt b/core/src/main/kotlin/org/opensearch/alerting/opensearchapi/OpenSearchExtensions.kt index a49181292..bf30957d7 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/opensearchapi/OpenSearchExtensions.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/opensearchapi/OpenSearchExtensions.kt @@ -16,15 +16,11 @@ import org.opensearch.action.bulk.BackoffPolicy import org.opensearch.action.search.SearchResponse import org.opensearch.action.search.ShardSearchFailure import org.opensearch.client.OpenSearchClient -import org.opensearch.common.bytes.BytesReference import org.opensearch.common.settings.Settings import org.opensearch.common.util.concurrent.ThreadContext import org.opensearch.common.util.concurrent.ThreadContext.StoredContext import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder import org.opensearch.common.xcontent.XContentHelper -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.InjectSecurity import org.opensearch.commons.authuser.User @@ -36,7 +32,6 @@ import org.opensearch.rest.RestStatus.BAD_GATEWAY import org.opensearch.rest.RestStatus.GATEWAY_TIMEOUT import org.opensearch.rest.RestStatus.SERVICE_UNAVAILABLE import org.opensearch.search.builder.SearchSourceBuilder -import java.time.Instant import kotlin.coroutines.CoroutineContext import kotlin.coroutines.resume import kotlin.coroutines.resumeWithException @@ -142,43 +137,12 @@ fun SearchResponse.firstFailureOrNull(): ShardSearchFailure? { return shardFailures?.getOrNull(0) } -fun XContentParser.instant(): Instant? { - return when { - currentToken() == XContentParser.Token.VALUE_NULL -> null - currentToken().isValue -> Instant.ofEpochMilli(longValue()) - else -> { - XContentParserUtils.throwUnknownToken(currentToken(), tokenLocation) - null // unreachable - } - } -} - -fun XContentBuilder.optionalTimeField(name: String, instant: Instant?): XContentBuilder { - if (instant == null) { - return nullField(name) - } - // second name as readableName should be different than first name - return this.timeField(name, "${name}_in_millis", instant.toEpochMilli()) -} - -fun XContentBuilder.optionalUserField(name: String, user: User?): XContentBuilder { - if (user == null) { - return nullField(name) - } - return this.field(name, user) -} - fun addFilter(user: User, searchSourceBuilder: SearchSourceBuilder, fieldName: String) { val filterBackendRoles = QueryBuilders.termsQuery(fieldName, user.backendRoles) val queryBuilder = searchSourceBuilder.query() as BoolQueryBuilder searchSourceBuilder.query(queryBuilder.filter(filterBackendRoles)) } -/** - * Extension function for ES 6.3 and above that duplicates the ES 6.2 XContentBuilder.string() method. - */ -fun XContentBuilder.string(): String = BytesReference.bytes(this).utf8ToString() - /** * Converts [OpenSearchClient] methods that take a callback into a kotlin suspending function. * diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/XContentTests.kt b/core/src/test/kotlin/org/opensearch/alerting/core/XContentTests.kt deleted file mode 100644 index 610125469..000000000 --- a/core/src/test/kotlin/org/opensearch/alerting/core/XContentTests.kt +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core - -import org.opensearch.alerting.core.model.Input -import org.opensearch.alerting.core.model.SearchInput -import org.opensearch.alerting.core.model.XContentTestBase -import org.opensearch.alerting.opensearchapi.string -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.index.query.QueryBuilders -import org.opensearch.search.builder.SearchSourceBuilder -import kotlin.test.Test -import kotlin.test.assertEquals - -class XContentTests : XContentTestBase { - - @Test - fun `test input parsing`() { - val input = randomInput() - - val inputString = input.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedInput = Input.parse(parser(inputString)) - - assertEquals(input, parsedInput, "Round tripping input doesn't work") - } - - private fun randomInput(): Input { - return SearchInput( - indices = listOf("foo", "bar"), - query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - ) - } -} diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/model/ClusterMetricsInputTests.kt b/core/src/test/kotlin/org/opensearch/alerting/core/model/ClusterMetricsInputTests.kt deleted file mode 100644 index 50fa27ee1..000000000 --- a/core/src/test/kotlin/org/opensearch/alerting/core/model/ClusterMetricsInputTests.kt +++ /dev/null @@ -1,448 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core.model - -import kotlin.test.Test -import kotlin.test.assertEquals -import kotlin.test.assertFailsWith - -class ClusterMetricsInputTests { - private var path = "/_cluster/health" - private var pathParams = "" - private var url = "" - - @Test - fun `test valid ClusterMetricsInput creation using HTTP URI component fields`() { - // GIVEN - val testUrl = "http://localhost:9200/_cluster/health" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(path, clusterMetricsInput.path) - assertEquals(pathParams, clusterMetricsInput.pathParams) - assertEquals(testUrl, clusterMetricsInput.url) - } - - @Test - fun `test valid ClusterMetricsInput creation using HTTP url field`() { - // GIVEN - path = "" - url = "http://localhost:9200/_cluster/health" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(url, clusterMetricsInput.url) - } - - @Test - fun `test valid ClusterMetricsInput creation using HTTPS url field`() { - // GIVEN - path = "" - url = "https://localhost:9200/_cluster/health" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(url, clusterMetricsInput.url) - } - - @Test - fun `test invalid path`() { - // GIVEN - path = "///" - - // WHEN + THEN - assertFailsWith("Invalid URL.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test invalid url`() { - // GIVEN - url = "///" - - // WHEN + THEN - assertFailsWith("Invalid URL.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test url field and URI component fields create equal URI`() { - // GIVEN - url = "http://localhost:9200/_cluster/health" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(path, clusterMetricsInput.path) - assertEquals(pathParams, clusterMetricsInput.pathParams) - assertEquals(url, clusterMetricsInput.url) - assertEquals(url, clusterMetricsInput.constructedUri.toString()) - } - - @Test - fun `test url field and URI component fields with path params create equal URI`() { - // GIVEN - path = "/_cluster/health/" - pathParams = "index1,index2,index3,index4,index5" - url = "http://localhost:9200/_cluster/health/index1,index2,index3,index4,index5" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(path, clusterMetricsInput.path) - assertEquals(pathParams, clusterMetricsInput.pathParams) - assertEquals(url, clusterMetricsInput.url) - assertEquals(url, clusterMetricsInput.constructedUri.toString()) - } - - @Test - fun `test url field and URI component fields create different URI`() { - // GIVEN - url = "http://localhost:9200/_cluster/stats" - - // WHEN + THEN - assertFailsWith("The provided URL and URI fields form different URLs.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test url field and URI component fields with path params create different URI`() { - // GIVEN - pathParams = "index1,index2,index3,index4,index5" - url = "http://localhost:9200/_cluster/stats/index1,index2,index3,index4,index5" - - // WHEN + THEN - assertFailsWith("The provided URL and URI fields form different URLs.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test ClusterMetricsInput creation when all inputs are empty`() { - // GIVEN - path = "" - pathParams = "" - url = "" - - // WHEN + THEN - assertFailsWith("The uri.api_type field, uri.path field, or uri.uri field must be defined.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test ClusterMetricsInput creation when all inputs but path params are empty`() { - // GIVEN - path = "" - pathParams = "index1,index2,index3,index4,index5" - url = "" - - // WHEN + THEN - assertFailsWith("The uri.api_type field, uri.path field, or uri.uri field must be defined.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test invalid scheme in url field`() { - // GIVEN - path = "" - url = "invalidScheme://localhost:9200/_cluster/health" - - // WHEN + THEN - assertFailsWith("Invalid URL.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test invalid host in url field`() { - // GIVEN - path = "" - url = "http://127.0.0.1:9200/_cluster/health" - - // WHEN + THEN - assertFailsWith("Only host '${ClusterMetricsInput.SUPPORTED_HOST}' is supported.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test invalid port in url field`() { - // GIVEN - path = "" - url = "http://localhost:${ClusterMetricsInput.SUPPORTED_PORT + 1}/_cluster/health" - - // WHEN + THEN - assertFailsWith("Only port '${ClusterMetricsInput.SUPPORTED_PORT}' is supported.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test parsePathParams with no path params`() { - // GIVEN - val testUrl = "http://localhost:9200/_cluster/health" - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // WHEN - val params = clusterMetricsInput.parsePathParams() - - // THEN - assertEquals(pathParams, params) - assertEquals(testUrl, clusterMetricsInput.constructedUri.toString()) - } - - @Test - fun `test parsePathParams with path params as URI field`() { - // GIVEN - path = "/_cluster/health/" - pathParams = "index1,index2,index3,index4,index5" - val testUrl = "http://localhost:9200/_cluster/health/index1,index2,index3,index4,index5" - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // WHEN - val params = clusterMetricsInput.parsePathParams() - - // THEN - assertEquals(pathParams, params) - assertEquals(testUrl, clusterMetricsInput.constructedUri.toString()) - } - - @Test - fun `test parsePathParams with path params in url`() { - // GIVEN - path = "" - val testParams = "index1,index2,index3,index4,index5" - url = "http://localhost:9200/_cluster/health/index1,index2,index3,index4,index5" - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // WHEN - val params = clusterMetricsInput.parsePathParams() - - // THEN - assertEquals(testParams, params) - assertEquals(url, clusterMetricsInput.constructedUri.toString()) - } - - @Test - fun `test parsePathParams with no path params for ApiType that requires path params`() { - // GIVEN - path = "/_cat/snapshots" - - // WHEN + THEN - assertFailsWith("The API requires path parameters.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test parsePathParams with path params for ApiType that doesn't support path params`() { - // GIVEN - path = "/_cluster/settings" - pathParams = "index1,index2,index3,index4,index5" - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // WHEN + THEN - assertFailsWith("The API does not use path parameters.") { - clusterMetricsInput.parsePathParams() - } - } - - @Test - fun `test parsePathParams with path params containing illegal characters`() { - var testCount = 0 // Start off with count of 1 to account for ApiType.BLANK - ILLEGAL_PATH_PARAMETER_CHARACTERS.forEach { character -> - // GIVEN - pathParams = "index1,index2,$character,index4,index5" - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // WHEN + THEN - assertFailsWith( - "The provided path parameters contain invalid characters or spaces. Please omit: " + - "${ILLEGAL_PATH_PARAMETER_CHARACTERS.joinToString(" ")}" - ) { - clusterMetricsInput.parsePathParams() - } - testCount++ - } - assertEquals(ILLEGAL_PATH_PARAMETER_CHARACTERS.size, testCount) - } - - @Test - fun `test ClusterMetricsInput correctly determines ApiType when path is provided as URI component`() { - var testCount = 1 // Start off with count of 1 to account for ApiType.BLANK - ClusterMetricsInput.ClusterMetricType.values() - .filter { enum -> enum != ClusterMetricsInput.ClusterMetricType.BLANK } - .forEach { testApiType -> - // GIVEN - path = testApiType.defaultPath - pathParams = if (testApiType.supportsPathParams) "index1,index2,index3,index4,index5" else "" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(testApiType, clusterMetricsInput.clusterMetricType) - testCount++ - } - assertEquals(ClusterMetricsInput.ClusterMetricType.values().size, testCount) - } - - @Test - fun `test ClusterMetricsInput correctly determines ApiType when path and path params are provided as URI components`() { - var testCount = 1 // Start off with count of 1 to account for ApiType.BLANK - ClusterMetricsInput.ClusterMetricType.values() - .filter { enum -> enum != ClusterMetricsInput.ClusterMetricType.BLANK } - .forEach { testApiType -> - // GIVEN - path = testApiType.defaultPath - pathParams = "index1,index2,index3,index4,index5" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(testApiType, clusterMetricsInput.clusterMetricType) - testCount++ - } - assertEquals(ClusterMetricsInput.ClusterMetricType.values().size, testCount) - } - - @Test - fun `test ClusterMetricsInput correctly determines ApiType when path is provided in URL field`() { - var testCount = 1 // Start off with count of 1 to account for ApiType.BLANK - ClusterMetricsInput.ClusterMetricType.values() - .filter { enum -> enum != ClusterMetricsInput.ClusterMetricType.BLANK } - .forEach { testApiType -> - // GIVEN - path = "" - pathParams = if (testApiType.supportsPathParams) "index1,index2,index3,index4,index5" else "" - url = "http://localhost:9200${testApiType.defaultPath}" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(testApiType, clusterMetricsInput.clusterMetricType) - testCount++ - } - assertEquals(ClusterMetricsInput.ClusterMetricType.values().size, testCount) - } - - @Test - fun `test ClusterMetricsInput correctly determines ApiType when path and path params are provided in URL field`() { - var testCount = 1 // Start off with count of 1 to account for ApiType.BLANK - ClusterMetricsInput.ClusterMetricType.values() - .filter { enum -> enum != ClusterMetricsInput.ClusterMetricType.BLANK } - .forEach { testApiType -> - // GIVEN - path = "" - pathParams = if (testApiType.supportsPathParams) "/index1,index2,index3,index4,index5" else "" - url = "http://localhost:9200${testApiType.defaultPath}$pathParams" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(testApiType, clusterMetricsInput.clusterMetricType) - testCount++ - } - assertEquals(ClusterMetricsInput.ClusterMetricType.values().size, testCount) - } - - @Test - fun `test ClusterMetricsInput cannot determine ApiType when invalid path is provided as URI component`() { - // GIVEN - path = "/_cat/paws" - - // WHEN + THEN - assertFailsWith("The API could not be determined from the provided URI.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test ClusterMetricsInput cannot determine ApiType when invalid path and path params are provided as URI components`() { - // GIVEN - path = "/_cat/paws" - pathParams = "index1,index2,index3,index4,index5" - - // WHEN + THEN - assertFailsWith("The API could not be determined from the provided URI.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test ClusterMetricsInput cannot determine ApiType when invaid path is provided in URL`() { - // GIVEN - path = "" - url = "http://localhost:9200/_cat/paws" - - // WHEN + THEN - assertFailsWith("The API could not be determined from the provided URI.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test ClusterMetricsInput cannot determine ApiType when invaid path and path params are provided in URL`() { - // GIVEN - path = "" - url = "http://localhost:9200/_cat/paws/index1,index2,index3,index4,index5" - - // WHEN + THEN - assertFailsWith("The API could not be determined from the provided URI.") { - ClusterMetricsInput(path, pathParams, url) - } - } - - @Test - fun `test parseEmptyFields populates empty path and path_params when url is provided`() { - // GIVEN - path = "" - pathParams = "" - val testPath = "/_cluster/health" - val testPathParams = "index1,index2,index3,index4,index5" - url = "http://localhost:9200$testPath$testPathParams" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(testPath, clusterMetricsInput.path) - assertEquals(testPathParams, clusterMetricsInput.pathParams) - assertEquals(url, clusterMetricsInput.url) - } - - @Test - fun `test parseEmptyFields populates empty url field when path and path_params are provided`() { - // GIVEN - path = "/_cluster/health/" - pathParams = "index1,index2,index3,index4,index5" - val testUrl = "http://localhost:9200$path$pathParams" - - // WHEN - val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) - - // THEN - assertEquals(path, clusterMetricsInput.path) - assertEquals(pathParams, clusterMetricsInput.pathParams) - assertEquals(testUrl, clusterMetricsInput.url) - } -} diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/model/MockScheduledJob.kt b/core/src/test/kotlin/org/opensearch/alerting/core/model/MockScheduledJob.kt index 26a2c18d2..5d9b9237f 100644 --- a/core/src/test/kotlin/org/opensearch/alerting/core/model/MockScheduledJob.kt +++ b/core/src/test/kotlin/org/opensearch/alerting/core/model/MockScheduledJob.kt @@ -1,13 +1,10 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - package org.opensearch.alerting.core.model import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.commons.alerting.model.Schedule +import org.opensearch.commons.alerting.model.ScheduledJob import java.io.IOException import java.time.Instant diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/model/ScheduleTest.kt b/core/src/test/kotlin/org/opensearch/alerting/core/model/ScheduleTest.kt deleted file mode 100644 index 604178d31..000000000 --- a/core/src/test/kotlin/org/opensearch/alerting/core/model/ScheduleTest.kt +++ /dev/null @@ -1,355 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core.model - -import org.opensearch.alerting.opensearchapi.string -import org.opensearch.common.xcontent.ToXContent -import java.time.Clock -import java.time.Instant -import java.time.ZoneId -import java.time.ZonedDateTime -import java.time.temporal.ChronoUnit -import kotlin.test.Test -import kotlin.test.assertEquals -import kotlin.test.assertFailsWith -import kotlin.test.assertFalse -import kotlin.test.assertNotNull -import kotlin.test.assertTrue - -class ScheduleTest : XContentTestBase { - @Test - fun `test time zone conversion`() { - val cronExpression = "31 * * * *" // Run at minute 31. - // This is 2018-09-27 20:00:58 GMT which will in conversion lead to 30min 58 seconds IST - val testInstance = Instant.ofEpochSecond(1538164858L) - - val cronSchedule = CronSchedule(cronExpression, ZoneId.of("Asia/Kolkata"), testInstance) - val nextTimeToExecute = cronSchedule.nextTimeToExecute(Instant.now()) - assertNotNull(nextTimeToExecute, "There should be next execute time.") - assertEquals(2L, nextTimeToExecute.seconds, "Execute time should be 2 seconds") - } - - @Test - fun `test time zone`() { - val cronExpression = "0 11 * * 3" // Run at 11:00 on Wednesday. - // This is 2018-09-26 01:59:58 GMT which will in conversion lead to Wednesday 10:59:58 JST - val testInstance = Instant.ofEpochSecond(1537927198L) - - val cronSchedule = CronSchedule(cronExpression, ZoneId.of("Asia/Tokyo"), testInstance) - val nextTimeToExecute = cronSchedule.nextTimeToExecute(Instant.now()) - assertNotNull(nextTimeToExecute, "There should be next execute time.") - assertEquals(2L, nextTimeToExecute.seconds, "Execute time should be 2 seconds") - } - - @Test - fun `test cron calculates next time to execute after restart`() { - val cronExpression = "* * * * *" - // This is 2018-09-26 01:59:58 GMT - val testInstance = Instant.ofEpochSecond(1537927198L) - // This enabled time represents GMT: Wednesday, September 19, 2018 3:19:51 AM - val enabledTimeInstance = Instant.ofEpochSecond(1537327191) - - val cronSchedule = CronSchedule(cronExpression, ZoneId.of("America/Los_Angeles"), testInstance) - // The nextTimeToExecute should be the minute after the test instance, not enabledTimeInstance, replicating a cluster restart - val nextTimeToExecute = cronSchedule.getExpectedNextExecutionTime(enabledTimeInstance, null) - assertNotNull(nextTimeToExecute, "There should be next execute time") - assertEquals( - testInstance.plusSeconds(2L), nextTimeToExecute, - "nextTimeToExecute should be 2 seconds after test instance" - ) - } - - @Test - fun `test cron calculates next time to execute using cached previous time`() { - val cronExpression = "* * * * *" - // This is 2018-09-26 01:59:58 GMT - val previousExecutionTimeInstance = Instant.ofEpochSecond(1537927198L) - // This enabled time represents GMT: Wednesday, September 19, 2018 3:19:51 AM - val enabledTimeInstance = Instant.ofEpochSecond(1537327191) - - val cronSchedule = CronSchedule(cronExpression, ZoneId.of("America/Los_Angeles")) - // The nextTimeToExecute should be the minute after the previous execution time instance, not enabledTimeInstance - val nextTimeToExecute = cronSchedule.getExpectedNextExecutionTime(enabledTimeInstance, previousExecutionTimeInstance) - assertNotNull(nextTimeToExecute, "There should be next execute time") - assertEquals( - previousExecutionTimeInstance.plusSeconds(2L), nextTimeToExecute, - "nextTimeToExecute should be 2 seconds after test instance" - ) - } - - @Test - fun `test interval calculates next time to execute using enabled time`() { - // This enabled time represents 2018-09-26 01:59:58 GMT - val enabledTimeInstance = Instant.ofEpochSecond(1537927138L) - // This is 2018-09-26 01:59:59 GMT, which is 61 seconds after enabledTime - val testInstance = Instant.ofEpochSecond(1537927199L) - - val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES, testInstance) - - // The nextTimeToExecute should be 120 seconds after the enabled time - val nextTimeToExecute = intervalSchedule.getExpectedNextExecutionTime(enabledTimeInstance, null) - assertNotNull(nextTimeToExecute, "There should be next execute time") - assertEquals( - enabledTimeInstance.plusSeconds(120L), nextTimeToExecute, - "nextTimeToExecute should be 120 seconds seconds after enabled time" - ) - } - - @Test - fun `test interval calculates next time to execute using cached previous time`() { - // This is 2018-09-26 01:59:58 GMT - val previousExecutionTimeInstance = Instant.ofEpochSecond(1537927198L) - // This is 2018-09-26 02:00:00 GMT - val testInstance = Instant.ofEpochSecond(1537927200L) - // This enabled time represents 2018-09-26 01:58:58 GMT - val enabledTimeInstance = Instant.ofEpochSecond(1537927138L) - - val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES, testInstance) - - // The nextTimeToExecute should be the minute after the previous execution time instance - val nextTimeToExecute = intervalSchedule.getExpectedNextExecutionTime(enabledTimeInstance, previousExecutionTimeInstance) - assertNotNull(nextTimeToExecute, "There should be next execute time") - assertEquals( - previousExecutionTimeInstance.plusSeconds(60L), nextTimeToExecute, - "nextTimeToExecute should be 60 seconds after previous execution time" - ) - } - - @Test - fun `test cron schedule round trip`() { - val cronExpression = "0 * * * *" - val cronSchedule = CronSchedule(cronExpression, ZoneId.of("Asia/Tokyo")) - - val scheduleString = cronSchedule.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedSchedule = Schedule.parse(parser(scheduleString)) - - assertTrue(parsedSchedule is CronSchedule, "Parsed scheduled is not Cron Scheduled Type.") - assertEquals(cronSchedule, parsedSchedule, "Round tripping Cron Schedule doesn't work") - } - - @Test - fun `test interval schedule round trip`() { - val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES) - - val scheduleString = intervalSchedule.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() - val parsedSchedule = Schedule.parse(parser(scheduleString)) - assertTrue(parsedSchedule is IntervalSchedule, "Parsed scheduled is not Interval Scheduled Type.") - assertEquals(intervalSchedule, parsedSchedule, "Round tripping Interval Schedule doesn't work") - } - - @Test - fun `test cron invalid missing timezone`() { - val scheduleString = "{\"cron\":{\"expression\":\"0 * * * *\"}}" - assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { - Schedule.parse(parser(scheduleString)) - } - } - - @Test - fun `test cron invalid timezone rule`() { - val scheduleString = "{\"cron\":{\"expression\":\"0 * * * *\",\"timezone\":\"Going/Nowhere\"}}" - assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { - Schedule.parse(parser(scheduleString)) - } - } - - @Test - fun `test cron invalid timezone offset`() { - val scheduleString = "{\"cron\":{\"expression\":\"0 * * * *\",\"timezone\":\"+++9\"}}" - assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { - Schedule.parse(parser(scheduleString)) - } - } - - @Test - fun `test invalid type`() { - val scheduleString = "{\"foobarzzz\":{\"expression\":\"0 * * * *\",\"timezone\":\"+++9\"}}" - assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { - Schedule.parse(parser(scheduleString)) - } - } - - @Test - fun `test two types`() { - val scheduleString = "{\"cron\":{\"expression\":\"0 * * * *\",\"timezone\":\"Asia/Tokyo\"}, " + - "\"period\":{\"interval\":\"1\",\"unit\":\"Minutes\"}}" - assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { - Schedule.parse(parser(scheduleString)) - } - } - - @Test - fun `test invalid cron expression`() { - val scheduleString = "{\"cron\":{\"expression\":\"5 * 1 * * *\",\"timezone\":\"Asia/Tokyo\"}}" - assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { - Schedule.parse(parser(scheduleString)) - } - } - - @Test - fun `test interval period starting at`() { - val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES) - - val (periodStartTime, periodEndTime) = intervalSchedule.getPeriodStartingAt(null) - - assertEquals(periodStartTime, periodEndTime.minus(1, ChronoUnit.MINUTES), "Period didn't match interval") - - val startTime = Instant.now() - // Kotlin has destructuring declarations but no destructuring assignments? Gee, thanks... - val (periodStartTime2, _) = intervalSchedule.getPeriodStartingAt(startTime) - assertEquals(startTime, periodStartTime2, "Periods doesn't start at provided start time") - } - - @Test - fun `test interval period ending at`() { - val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES) - - val (periodStartTime, periodEndTime) = intervalSchedule.getPeriodEndingAt(null) - - assertEquals(periodStartTime, periodEndTime.minus(1, ChronoUnit.MINUTES), "Period didn't match interval") - - val endTime = Instant.now() - // destructuring declarations but no destructuring assignments? Gee, thanks... https://youtrack.jetbrains.com/issue/KT-11362 - val (_, periodEndTime2) = intervalSchedule.getPeriodEndingAt(endTime) - assertEquals(endTime, periodEndTime2, "Periods doesn't end at provided end time") - } - - @Test - fun `test cron period starting at`() { - val cronSchedule = CronSchedule("0 * * * *", ZoneId.of("Asia/Tokyo")) - - val (startTime1, endTime) = cronSchedule.getPeriodStartingAt(null) - assertTrue(startTime1 <= Instant.now(), "startTime is in future; should be the last execution time") - assertTrue(cronSchedule.executionTime.isMatch(ZonedDateTime.ofInstant(endTime, ZoneId.of("Asia/Tokyo")))) - - val (startTime, _) = cronSchedule.getPeriodStartingAt(endTime) - assertEquals(startTime, endTime, "Subsequent period doesn't start at provided end time") - } - - @Test - fun `test cron period ending at`() { - val cronSchedule = CronSchedule("0 * * * *", ZoneId.of("Asia/Tokyo")) - - val (startTime, endTime1) = cronSchedule.getPeriodEndingAt(null) - assertTrue(endTime1 >= Instant.now(), "endTime is in past; should be the next execution time") - assertTrue(cronSchedule.executionTime.isMatch(ZonedDateTime.ofInstant(startTime, ZoneId.of("Asia/Tokyo")))) - - val (_, endTime2) = cronSchedule.getPeriodEndingAt(startTime) - assertEquals(endTime2, startTime, "Previous period doesn't end at provided start time") - } - - @Test - fun `cron job not running on time`() { - val cronSchedule = createTestCronSchedule() - - val lastExecutionTime = 1539715560L - assertFalse(cronSchedule.runningOnTime(Instant.ofEpochSecond(lastExecutionTime))) - } - - @Test - fun `cron job running on time`() { - val cronSchedule = createTestCronSchedule() - - val lastExecutionTime = 1539715620L - assertTrue(cronSchedule.runningOnTime(Instant.ofEpochSecond(lastExecutionTime))) - } - - @Test - fun `period job running exactly at interval`() { - val testInstance = Instant.ofEpochSecond(1539715678L) - val enabledTime = Instant.ofEpochSecond(1539615178L) - val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES, testInstance) - - val nextTimeToExecute = intervalSchedule.nextTimeToExecute(enabledTime) - assertNotNull(nextTimeToExecute, "There should be next execute time.") - assertEquals(60L, nextTimeToExecute.seconds, "Excepted 60 seconds but was ${nextTimeToExecute.seconds}") - } - - @Test - fun `period job 3 minutes`() { - val testInstance = Instant.ofEpochSecond(1539615226L) - val enabledTime = Instant.ofEpochSecond(1539615144L) - val intervalSchedule = IntervalSchedule(3, ChronoUnit.MINUTES, testInstance) - - val nextTimeToExecute = intervalSchedule.nextTimeToExecute(enabledTime) - assertNotNull(nextTimeToExecute, "There should be next execute time.") - assertEquals(98L, nextTimeToExecute.seconds, "Excepted 98 seconds but was ${nextTimeToExecute.seconds}") - } - - @Test - fun `period job running on time`() { - val intervalSchedule = createTestIntervalSchedule() - - val lastExecutionTime = 1539715620L - assertTrue(intervalSchedule.runningOnTime(Instant.ofEpochSecond(lastExecutionTime))) - } - - @Test - fun `period job not running on time`() { - val intervalSchedule = createTestIntervalSchedule() - - val lastExecutionTime = 1539715560L - assertFalse(intervalSchedule.runningOnTime(Instant.ofEpochSecond(lastExecutionTime))) - } - - @Test - fun `period job test null last execution time`() { - val intervalSchedule = createTestIntervalSchedule() - - assertTrue(intervalSchedule.runningOnTime(null)) - } - - @Test - fun `execution time matches across different time zones`() { - val now = Instant.now() - val pdtClock = Clock.fixed(now, ZoneId.of("America/Los_Angeles")) - val utcClock = Clock.fixed(now, ZoneId.of("UTC")) - val pdtClockCronSchedule = CronSchedule("* * * * *", ZoneId.of("America/Los_Angeles")) - val utcClockCronSchedule = CronSchedule("* * * * *", ZoneId.of("UTC")) - val pdtNextExecution = pdtClockCronSchedule.getExpectedNextExecutionTime(pdtClock.instant(), null) - val utcNextExecution = utcClockCronSchedule.getExpectedNextExecutionTime(utcClock.instant(), null) - assertEquals(pdtNextExecution, utcNextExecution) - } - - private fun createTestIntervalSchedule(): IntervalSchedule { - val testInstance = Instant.ofEpochSecond(1539715678L) - val enabledTime = Instant.ofEpochSecond(1539615146L) - val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES, testInstance) - - val nextTimeToExecute = intervalSchedule.nextTimeToExecute(enabledTime) - assertNotNull(nextTimeToExecute, "There should be next execute time.") - assertEquals(28L, nextTimeToExecute.seconds, "Excepted 28 seconds but was ${nextTimeToExecute.seconds}") - - return intervalSchedule - } - - private fun createTestCronSchedule(): CronSchedule { - val cronExpression = "* * * * *" - val testInstance = Instant.ofEpochSecond(1539715678L) - - val cronSchedule = CronSchedule(cronExpression, ZoneId.of("UTC"), testInstance) - val nextTimeToExecute = cronSchedule.nextTimeToExecute(Instant.now()) - assertNotNull(nextTimeToExecute, "There should be next execute time.") - assertEquals(2L, nextTimeToExecute.seconds, "Execute time should be 2 seconds") - - return cronSchedule - } - - @Test - fun `test invalid interval units`() { - assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { - IntervalSchedule(1, ChronoUnit.SECONDS) - } - - assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { - IntervalSchedule(1, ChronoUnit.MONTHS) - } - - assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { - IntervalSchedule(-1, ChronoUnit.MINUTES) - } - } -} diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/model/XContentTestBase.kt b/core/src/test/kotlin/org/opensearch/alerting/core/model/XContentTestBase.kt deleted file mode 100644 index 4a4140954..000000000 --- a/core/src/test/kotlin/org/opensearch/alerting/core/model/XContentTestBase.kt +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.core.model - -import org.opensearch.common.settings.Settings -import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.common.xcontent.NamedXContentRegistry -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentType -import org.opensearch.search.SearchModule - -interface XContentTestBase { - fun builder(): XContentBuilder { - return XContentBuilder.builder(XContentType.JSON.xContent()) - } - - fun parser(xc: String): XContentParser { - val parser = XContentType.JSON.xContent().createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, xc) - parser.nextToken() - return parser - } - - fun xContentRegistry(): NamedXContentRegistry { - return NamedXContentRegistry( - listOf(SearchInput.XCONTENT_REGISTRY) + - SearchModule(Settings.EMPTY, emptyList()).namedXContents - ) - } -} diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerTest.kt b/core/src/test/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerTest.kt index 4f673fbd4..a0453e935 100644 --- a/core/src/test/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerTest.kt +++ b/core/src/test/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerTest.kt @@ -6,10 +6,10 @@ package org.opensearch.alerting.core.schedule import org.junit.Before -import org.opensearch.alerting.core.model.CronSchedule -import org.opensearch.alerting.core.model.IntervalSchedule import org.opensearch.alerting.core.model.MockScheduledJob import org.opensearch.common.settings.Settings +import org.opensearch.commons.alerting.model.CronSchedule +import org.opensearch.commons.alerting.model.IntervalSchedule import org.opensearch.threadpool.ThreadPool import java.time.Instant import java.time.ZoneId diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/schedule/MockJobRunner.kt b/core/src/test/kotlin/org/opensearch/alerting/core/schedule/MockJobRunner.kt index 6d7ff68da..15fe770b9 100644 --- a/core/src/test/kotlin/org/opensearch/alerting/core/schedule/MockJobRunner.kt +++ b/core/src/test/kotlin/org/opensearch/alerting/core/schedule/MockJobRunner.kt @@ -6,7 +6,7 @@ package org.opensearch.alerting.core.schedule import org.opensearch.alerting.core.JobRunner -import org.opensearch.alerting.core.model.ScheduledJob +import org.opensearch.commons.alerting.model.ScheduledJob import java.time.Instant class MockJobRunner : JobRunner { diff --git a/release-notes/opensearch-alerting.release-notes-2.3.0.0.md b/release-notes/opensearch-alerting.release-notes-2.3.0.0.md new file mode 100644 index 000000000..39478bb07 --- /dev/null +++ b/release-notes/opensearch-alerting.release-notes-2.3.0.0.md @@ -0,0 +1,12 @@ +## Version 2.3.0.0 2022-09-08 + +Compatible with OpenSearch 2.3.0 + +### Infrastructure +* Deprecate the Master nomenclature. ([#548](https://github.com/opensearch-project/alerting/pull/548)) + +### Maintenance +* Bumped version to 2.3.0. ([#547](https://github.com/opensearch-project/alerting/pull/547)) + +### Documentation +* Added 2.3 release notes. ([#551](https://github.com/opensearch-project/alerting/pull/551)) \ No newline at end of file