diff --git a/alerting/build.gradle b/alerting/build.gradle index d2bbb8bc1..5488525e2 100644 --- a/alerting/build.gradle +++ b/alerting/build.gradle @@ -94,6 +94,8 @@ dependencies { testImplementation "org.jetbrains.kotlin:kotlin-test:${kotlin_version}" testImplementation "org.mockito:mockito-core:4.7.0" testImplementation "org.opensearch.plugin:reindex-client:${opensearch_version}" + testImplementation "org.opensearch.plugin:lang-painless:${opensearch_version}" + testImplementation "org.opensearch.plugin:lang-mustache-client:${opensearch_version}" } javadoc.enabled = false // turn off javadoc as it barfs on Kotlin code @@ -259,7 +261,7 @@ String bwcRemoteFile = 'https://ci.opensearch.org/ci/dbc/bundle-build/1.1.0/2021 testClusters { "${baseName}$i" { testDistribution = "ARCHIVE" - versions = ["1.1.0", "2.4.0-SNAPSHOT"] + versions = ["1.1.0", "2.5.0-SNAPSHOT"] numberOfNodes = 3 plugin(provider(new Callable(){ @Override diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index 66ebd5eb3..ff52d69e8 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -8,6 +8,7 @@ package org.opensearch.alerting import org.opensearch.action.ActionRequest import org.opensearch.action.ActionResponse import org.opensearch.alerting.action.ExecuteMonitorAction +import org.opensearch.alerting.action.ExecuteWorkflowAction import org.opensearch.alerting.action.GetDestinationsAction import org.opensearch.alerting.action.GetEmailAccountAction import org.opensearch.alerting.action.GetEmailGroupAction @@ -27,6 +28,7 @@ import org.opensearch.alerting.core.settings.ScheduledJobSettings import org.opensearch.alerting.resthandler.RestAcknowledgeAlertAction import org.opensearch.alerting.resthandler.RestDeleteMonitorAction import org.opensearch.alerting.resthandler.RestExecuteMonitorAction +import org.opensearch.alerting.resthandler.RestExecuteWorkflowAction import org.opensearch.alerting.resthandler.RestGetAlertsAction import org.opensearch.alerting.resthandler.RestGetDestinationsAction import org.opensearch.alerting.resthandler.RestGetEmailAccountAction @@ -44,19 +46,24 @@ import org.opensearch.alerting.settings.LegacyOpenDistroAlertingSettings import org.opensearch.alerting.settings.LegacyOpenDistroDestinationSettings import org.opensearch.alerting.transport.TransportAcknowledgeAlertAction import org.opensearch.alerting.transport.TransportDeleteMonitorAction +import org.opensearch.alerting.transport.TransportDeleteWorkflowAction import org.opensearch.alerting.transport.TransportExecuteMonitorAction +import org.opensearch.alerting.transport.TransportExecuteWorkflowAction import org.opensearch.alerting.transport.TransportGetAlertsAction import org.opensearch.alerting.transport.TransportGetDestinationsAction import org.opensearch.alerting.transport.TransportGetEmailAccountAction import org.opensearch.alerting.transport.TransportGetEmailGroupAction import org.opensearch.alerting.transport.TransportGetFindingsSearchAction import org.opensearch.alerting.transport.TransportGetMonitorAction +import org.opensearch.alerting.transport.TransportGetWorkflowAction import org.opensearch.alerting.transport.TransportIndexMonitorAction +import org.opensearch.alerting.transport.TransportIndexWorkflowAction import org.opensearch.alerting.transport.TransportSearchEmailAccountAction import org.opensearch.alerting.transport.TransportSearchEmailGroupAction import org.opensearch.alerting.transport.TransportSearchMonitorAction import org.opensearch.alerting.util.DocLevelMonitorQueries import org.opensearch.alerting.util.destinationmigration.DestinationMigrationCoordinator +import org.opensearch.alerting.workflow.WorkflowRunnerService import org.opensearch.client.Client import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.node.DiscoveryNodes @@ -80,6 +87,7 @@ import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.QueryLevelTrigger import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.model.Workflow import org.opensearch.env.Environment import org.opensearch.env.NodeEnvironment import org.opensearch.index.IndexModule @@ -117,6 +125,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R @JvmField val OPEN_SEARCH_DASHBOARDS_USER_AGENT = "OpenSearch-Dashboards" @JvmField val UI_METADATA_EXCLUDE = arrayOf("monitor.${Monitor.UI_METADATA_FIELD}") @JvmField val MONITOR_BASE_URI = "/_plugins/_alerting/monitors" + @JvmField val WORKFLOW_BASE_URI = "/_plugins/_alerting/workflows" @JvmField val DESTINATION_BASE_URI = "/_plugins/_alerting/destinations" @JvmField val LEGACY_OPENDISTRO_MONITOR_BASE_URI = "/_opendistro/_alerting/monitors" @JvmField val LEGACY_OPENDISTRO_DESTINATION_BASE_URI = "/_opendistro/_alerting/destinations" @@ -129,6 +138,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R } lateinit var runner: MonitorRunnerService + lateinit var workflowRunner: WorkflowRunnerService lateinit var scheduler: JobScheduler lateinit var sweeper: JobSweeper lateinit var scheduledJobIndices: ScheduledJobIndices @@ -153,6 +163,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R RestIndexMonitorAction(), RestSearchMonitorAction(settings, clusterService), RestExecuteMonitorAction(), + RestExecuteWorkflowAction(), RestAcknowledgeAlertAction(), RestScheduledJobStatsHandler("_alerting"), RestSearchEmailAccountAction(), @@ -180,8 +191,11 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R ActionPlugin.ActionHandler(SearchEmailGroupAction.INSTANCE, TransportSearchEmailGroupAction::class.java), ActionPlugin.ActionHandler(GetDestinationsAction.INSTANCE, TransportGetDestinationsAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_ALERTS_ACTION_TYPE, TransportGetAlertsAction::class.java), - ActionPlugin.ActionHandler(AlertingActions.GET_FINDINGS_ACTION_TYPE, TransportGetFindingsSearchAction::class.java) - + ActionPlugin.ActionHandler(AlertingActions.GET_FINDINGS_ACTION_TYPE, TransportGetFindingsSearchAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, TransportIndexWorkflowAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.GET_WORKFLOW_ACTION_TYPE, TransportGetWorkflowAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, TransportDeleteWorkflowAction::class.java), + ActionPlugin.ActionHandler(ExecuteWorkflowAction.INSTANCE, TransportExecuteWorkflowAction::class.java) ) } @@ -193,7 +207,8 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R QueryLevelTrigger.XCONTENT_REGISTRY, BucketLevelTrigger.XCONTENT_REGISTRY, ClusterMetricsInput.XCONTENT_REGISTRY, - DocumentLevelTrigger.XCONTENT_REGISTRY + DocumentLevelTrigger.XCONTENT_REGISTRY, + Workflow.XCONTENT_REGISTRY ) } @@ -227,6 +242,21 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R .registerDocLevelMonitorQueries(DocLevelMonitorQueries(client, clusterService)) .registerConsumers() .registerDestinationSettings() + workflowRunner = WorkflowRunnerService + .registerClusterService(clusterService) + .registerClient(client) + .registerNamedXContentRegistry(xContentRegistry) + .registerScriptService(scriptService) + .registerSettings(settings) + .registerThreadPool(threadPool) + .registerAlertIndices(alertIndices) + .registerInputService(InputService(client, scriptService, namedWriteableRegistry, xContentRegistry)) + .registerTriggerService(TriggerService(scriptService)) + .registerAlertService(AlertService(client, xContentRegistry, alertIndices)) + .registerDocLevelMonitorQueries(DocLevelMonitorQueries(client, clusterService)) + .registerWorkflowService(WorkflowService(client, xContentRegistry)) + .registerConsumers() + .registerDestinationSettings() scheduledJobIndices = ScheduledJobIndices(client.admin(), clusterService) docLevelMonitorQueries = DocLevelMonitorQueries(client, clusterService) scheduler = JobScheduler(threadPool, runner) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt index 3c4fc6425..45eebc332 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt @@ -25,6 +25,7 @@ import org.opensearch.alerting.util.defaultToPerExecutionAction import org.opensearch.alerting.util.getActionExecutionPolicy import org.opensearch.alerting.util.getBucketKeysHash import org.opensearch.alerting.util.getCombinedTriggerRunResult +import org.opensearch.alerting.workflow.WorkflowRunContext import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder @@ -59,7 +60,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, - dryrun: Boolean + dryrun: Boolean, + workflowRunContext: WorkflowRunContext? ): MonitorRunResult { val roles = MonitorRunnerService.getRolesForMonitor(monitor) logger.debug("Running monitor: ${monitor.name} with roles: $roles Thread: ${Thread.currentThread().name}") @@ -118,7 +120,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitor, periodStart, periodEnd, - monitorResult.inputResults + monitorResult.inputResults, + workflowRunContext ) if (firstIteration) { firstPageOfInputResults = inputResults @@ -154,7 +157,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitorCtx, periodStart, periodEnd, - !dryrun && monitor.id != Monitor.NO_ID + !dryrun && monitor.id != Monitor.NO_ID, + workflowRunContext ) } else { emptyList() @@ -335,7 +339,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, - shouldCreateFinding: Boolean + shouldCreateFinding: Boolean, + workflowRunContext: WorkflowRunContext? = null ): List { monitor.inputs.forEach { input -> if (input is SearchInput) { @@ -346,14 +351,14 @@ object BucketLevelMonitorRunner : MonitorRunner() { for (aggFactory in (query.aggregations() as AggregatorFactories.Builder).aggregatorFactories) { when (aggFactory) { is CompositeAggregationBuilder -> { - var grouByFields = 0 // if number of fields used to group by > 1 we won't calculate findings + var groupByFields = 0 // if number of fields used to group by > 1 we won't calculate findings val sources = aggFactory.sources() for (source in sources) { - if (grouByFields > 0) { + if (groupByFields > 0) { logger.error("grouByFields > 0. not generating findings for bucket level monitor ${monitor.id}") return listOf() } - grouByFields++ + groupByFields++ fieldName = source.field() } } @@ -392,7 +397,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { sr.source().query(queryBuilder) } val searchResponse: SearchResponse = monitorCtx.client!!.suspendUntil { monitorCtx.client!!.search(sr, it) } - return createFindingPerIndex(searchResponse, monitor, monitorCtx, shouldCreateFinding) + return createFindingPerIndex(searchResponse, monitor, monitorCtx, shouldCreateFinding, workflowRunContext?.executionId) } else { logger.error("Couldn't resolve groupBy field. Not generating bucket level monitor findings for monitor %${monitor.id}") } @@ -405,7 +410,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { searchResponse: SearchResponse, monitor: Monitor, monitorCtx: MonitorRunnerExecutionContext, - shouldCreateFinding: Boolean + shouldCreateFinding: Boolean, + workflowExecutionId: String? = null ): List { val docIdsByIndexName: MutableMap> = mutableMapOf() for (hit in searchResponse.hits.hits) { @@ -424,7 +430,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitorName = monitor.name, index = it.key, timestamp = Instant.now(), - docLevelQueries = listOf() + docLevelQueries = listOf(), + executionId = workflowExecutionId ) val findingStr = finding.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt index c45548c6b..787a43ddb 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt @@ -25,6 +25,7 @@ import org.opensearch.alerting.util.AlertingException import org.opensearch.alerting.util.defaultToPerExecutionAction import org.opensearch.alerting.util.getActionExecutionPolicy import org.opensearch.alerting.util.updateMonitorMetadata +import org.opensearch.alerting.workflow.WorkflowRunContext import org.opensearch.client.Client import org.opensearch.cluster.routing.ShardRouting import org.opensearch.cluster.service.ClusterService @@ -63,7 +64,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, - dryrun: Boolean + dryrun: Boolean, + workflowRunContext: WorkflowRunContext? ): MonitorRunResult { logger.debug("Document-level-monitor is running ...") var monitorResult = MonitorRunResult(monitor.name, periodStart, periodEnd) @@ -98,7 +100,7 @@ object DocumentLevelMonitorRunner : MonitorRunner() { var monitorMetadata = getMonitorMetadata(monitorCtx.client!!, monitorCtx.xContentRegistry!!, "${monitor.id}-metadata") if (monitorMetadata == null) { - monitorMetadata = createMonitorMetadata(monitor.id) + monitorMetadata = createMonitorMetadata(monitor.id, workflowRunContext?.workflowId) } val isTempMonitor = dryrun || monitor.id == Monitor.NO_ID @@ -154,7 +156,16 @@ object DocumentLevelMonitorRunner : MonitorRunner() { // Prepare DocumentExecutionContext for each index val docExecutionContext = DocumentExecutionContext(queries, indexLastRunContext, indexUpdatedRunContext) - val matchingDocs = getMatchingDocs(monitor, monitorCtx, docExecutionContext, indexName) + // If monitor execution is triggered from a workflow + val indexToRelatedDocIdsMap = workflowRunContext?.matchingDocIdsPerIndex + + val matchingDocs = getMatchingDocs( + monitor, + monitorCtx, + docExecutionContext, + indexName, + indexToRelatedDocIdsMap?.get(index) + ) if (matchingDocs.isNotEmpty()) { val matchedQueriesForDocs = getMatchedQueries(monitorCtx, matchingDocs.map { it.second }, monitor, indexName) @@ -202,7 +213,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { idQueryMap, docsToQueries, queryToDocIds, - dryrun + dryrun, + workflowRunContext?.executionId ) } @@ -223,7 +235,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { idQueryMap: Map, docsToQueries: Map>, queryToDocIds: Map>, - dryrun: Boolean + dryrun: Boolean, + workflowExecutionId: String? = null ): DocumentLevelTriggerRunResult { val triggerCtx = DocumentLevelTriggerExecutionContext(monitor, trigger) val triggerResult = monitorCtx.triggerService!!.runDocLevelTrigger(monitor, trigger, queryToDocIds) @@ -234,7 +247,14 @@ object DocumentLevelMonitorRunner : MonitorRunner() { // TODO: Implement throttling for findings docsToQueries.forEach { val triggeredQueries = it.value.map { queryId -> idQueryMap[queryId]!! } - val findingId = createFindings(monitor, monitorCtx, triggeredQueries, it.key, !dryrun && monitor.id != Monitor.NO_ID) + val findingId = createFindings( + monitor, + monitorCtx, + triggeredQueries, + it.key, + !dryrun && monitor.id != Monitor.NO_ID, + workflowExecutionId + ) findings.add(findingId) if (triggerResult.triggeredDocs.contains(it.key)) { @@ -304,7 +324,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { monitorCtx: MonitorRunnerExecutionContext, docLevelQueries: List, matchingDocId: String, - shouldCreateFinding: Boolean + shouldCreateFinding: Boolean, + workflowExecutionId: String? = null, ): String { // Before the "|" is the doc id and after the "|" is the index val docIndex = matchingDocId.split("|") @@ -316,7 +337,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { monitorName = monitor.name, index = docIndex[1], docLevelQueries = docLevelQueries, - timestamp = Instant.now() + timestamp = Instant.now(), + executionId = workflowExecutionId ) val findingStr = finding.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() @@ -433,7 +455,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { monitor: Monitor, monitorCtx: MonitorRunnerExecutionContext, docExecutionCtx: DocumentExecutionContext, - index: String + index: String, + docIds: List? = null ): List> { val count: Int = docExecutionCtx.updatedLastRunContext["shards_count"] as Int val matchingDocs = mutableListOf>() @@ -449,7 +472,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { shard, prevSeqNo, maxSeqNo, - null + null, + docIds ) if (hits.hits.isNotEmpty()) { @@ -468,7 +492,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { shard: String, prevSeqNo: Long?, maxSeqNo: Long, - query: String? + query: String?, + docIds: List? = null ): SearchHits { if (prevSeqNo?.equals(maxSeqNo) == true && maxSeqNo != 0L) { return SearchHits.empty() @@ -480,6 +505,10 @@ object DocumentLevelMonitorRunner : MonitorRunner() { boolQueryBuilder.must(QueryBuilders.queryStringQuery(query)) } + if (!docIds.isNullOrEmpty()) { + boolQueryBuilder.filter(QueryBuilders.termsQuery("_id", docIds)) + } + val request: SearchRequest = SearchRequest() .indices(index) .preference("_shards:$shard") diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt index b7e86ee90..6401ca6b8 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt @@ -16,6 +16,7 @@ import org.opensearch.alerting.util.AggregationQueryRewriter import org.opensearch.alerting.util.addUserBackendRolesFilter import org.opensearch.alerting.util.executeTransportAction import org.opensearch.alerting.util.toMap +import org.opensearch.alerting.workflow.WorkflowRunContext import org.opensearch.client.Client import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput @@ -26,6 +27,11 @@ import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.alerting.model.ClusterMetricsInput import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.index.query.BoolQueryBuilder +import org.opensearch.index.query.MatchQueryBuilder +import org.opensearch.index.query.QueryBuilder +import org.opensearch.index.query.QueryBuilders +import org.opensearch.index.query.TermsQueryBuilder import org.opensearch.script.Script import org.opensearch.script.ScriptService import org.opensearch.script.ScriptType @@ -47,12 +53,16 @@ class InputService( monitor: Monitor, periodStart: Instant, periodEnd: Instant, - prevResult: InputRunResults? = null + prevResult: InputRunResults? = null, + workflowRunContext: WorkflowRunContext? = null ): InputRunResults { return try { val results = mutableListOf>() val aggTriggerAfterKey: MutableMap = mutableMapOf() + // If monitor execution is triggered from a workflow + val matchingDocIdsPerIndex = workflowRunContext?.matchingDocIdsPerIndex + // TODO: If/when multiple input queries are supported for Bucket-Level Monitor execution, aggTriggerAfterKeys will // need to be updated to account for it monitor.inputs.forEach { input -> @@ -63,9 +73,17 @@ class InputService( "period_start" to periodStart.toEpochMilli(), "period_end" to periodEnd.toEpochMilli() ) + // Deep copying query before passing it to rewriteQuery since otherwise, the monitor.input is modified directly // which causes a strange bug where the rewritten query persists on the Monitor across executions val rewrittenQuery = AggregationQueryRewriter.rewriteQuery(deepCopyQuery(input.query), prevResult, monitor.triggers) + + // Rewrite query to consider the doc ids per given index + if (chainedFindingExist(matchingDocIdsPerIndex) && rewrittenQuery.query() != null) { + val updatedSourceQuery = updateInputQueryWithFindingDocIds(rewrittenQuery.query(), matchingDocIdsPerIndex!!) + rewrittenQuery.query(updatedSourceQuery) + } + val searchSource = scriptService.compile( Script( ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, @@ -105,6 +123,35 @@ class InputService( } } + /** + * Extends the given query builder with query that filters the given indices with the given doc ids per index + * Used whenever we want to select the documents that were found in chained delegate execution of the current workflow run + * + * @param query Original bucket monitor query + * @param indexToDocIds Map of finding doc ids grouped by index + */ + private fun updateInputQueryWithFindingDocIds( + query: QueryBuilder, + indexToDocIds: Map>, + ): QueryBuilder { + val queryBuilder = QueryBuilders.boolQuery().must(query) + val shouldQuery = QueryBuilders.boolQuery() + + indexToDocIds.forEach { entry -> + shouldQuery + .should() + .add( + BoolQueryBuilder() + .must(MatchQueryBuilder("_index", entry.key)) + .must(TermsQueryBuilder("_id", entry.value)) + ) + } + return queryBuilder.must(shouldQuery) + } + + private fun chainedFindingExist(indexToDocIds: Map>?) = + !indexToDocIds.isNullOrEmpty() + private fun deepCopyQuery(query: SearchSourceBuilder): SearchSourceBuilder { val out = BytesStreamOutput() query.writeTo(out) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt index c7887e466..432b34a40 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt @@ -26,6 +26,7 @@ import org.opensearch.alerting.util.destinationmigration.publishLegacyNotificati import org.opensearch.alerting.util.destinationmigration.sendNotification import org.opensearch.alerting.util.isAllowed import org.opensearch.alerting.util.isTestAction +import org.opensearch.alerting.workflow.WorkflowRunContext import org.opensearch.client.node.NodeClient import org.opensearch.common.Strings import org.opensearch.commons.alerting.model.Monitor @@ -41,7 +42,8 @@ abstract class MonitorRunner { monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, - dryRun: Boolean + dryRun: Boolean, + workflowRunContext: WorkflowRunContext? = null ): MonitorRunResult<*> suspend fun runAction( @@ -181,7 +183,11 @@ abstract class MonitorRunner { return NotificationActionConfigs(destination, channel) } - protected fun createMonitorMetadata(monitorId: String): MonitorMetadata { - return MonitorMetadata("$monitorId-metadata", monitorId, emptyList(), emptyMap()) + protected fun createMonitorMetadata(monitorId: String, workflowId: String? = null): MonitorMetadata { + return if (workflowId.isNullOrEmpty()) { + MonitorMetadata("$monitorId-metadata", monitorId, emptyList(), emptyMap()) + } else { + MonitorMetadata("$monitorId-$workflowId-metadata", monitorId, emptyList(), emptyMap()) + } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt index 55624d66e..e68512351 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt @@ -33,6 +33,7 @@ data class MonitorRunnerExecutionContext( var triggerService: TriggerService? = null, var alertService: AlertService? = null, var docLevelMonitorQueries: DocLevelMonitorQueries? = null, + var workflowService: WorkflowService? = null, @Volatile var retryPolicy: BackoffPolicy? = null, @Volatile var moveAlertsRetryPolicy: BackoffPolicy? = null, diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt index 9864d4a9d..faa2ffde6 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt @@ -12,6 +12,7 @@ import org.opensearch.alerting.opensearchapi.InjectorContextElement import org.opensearch.alerting.opensearchapi.withClosableContext import org.opensearch.alerting.script.QueryLevelTriggerExecutionContext import org.opensearch.alerting.util.isADMonitor +import org.opensearch.alerting.workflow.WorkflowRunContext import org.opensearch.commons.alerting.model.Alert import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.QueryLevelTrigger @@ -25,7 +26,8 @@ object QueryLevelMonitorRunner : MonitorRunner() { monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, - dryrun: Boolean + dryrun: Boolean, + workflowRunContext: WorkflowRunContext? ): MonitorRunResult { val roles = MonitorRunnerService.getRolesForMonitor(monitor) logger.debug("Running monitor: ${monitor.name} with roles: $roles Thread: ${Thread.currentThread().name}") @@ -48,7 +50,7 @@ object QueryLevelMonitorRunner : MonitorRunner() { if (!isADMonitor(monitor)) { withClosableContext(InjectorContextElement(monitor.id, monitorCtx.settings!!, monitorCtx.threadPool!!.threadContext, roles)) { monitorResult = monitorResult.copy( - inputResults = monitorCtx.inputService!!.collectInputResults(monitor, periodStart, periodEnd) + inputResults = monitorCtx.inputService!!.collectInputResults(monitor, periodStart, periodEnd, null, workflowRunContext) ) } } else { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt new file mode 100644 index 000000000..ae018c843 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt @@ -0,0 +1,130 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting + +import org.apache.logging.log4j.LogManager +import org.opensearch.OpenSearchException +import org.opensearch.action.search.SearchRequest +import org.opensearch.action.search.SearchResponse +import org.opensearch.alerting.opensearchapi.suspendUntil +import org.opensearch.alerting.util.AlertingException +import org.opensearch.client.Client +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.NamedXContentRegistry +import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.common.xcontent.XContentParser +import org.opensearch.common.xcontent.XContentParserUtils +import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.Finding +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.index.query.QueryBuilders +import org.opensearch.search.builder.SearchSourceBuilder + +private val log = LogManager.getLogger(WorkflowService::class.java) + +/** + * Contains util methods used in workflow execution + */ +class WorkflowService( + val client: Client, + val xContentRegistry: NamedXContentRegistry, +) { + /** + * Returns finding doc ids per index for the given workflow execution + * Used for pre-filtering the dataset in the case of creating a workflow with chained findings + * + * @param chainedMonitor Monitor that is previously executed + * @param workflowExecutionId Execution id of the current workflow + */ + suspend fun getFindingDocIdsByExecutionId(chainedMonitor: Monitor, workflowExecutionId: String): Map> { + try { + // Search findings index per monitor and workflow execution id + val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(Finding.MONITOR_ID_FIELD, chainedMonitor.id)) + .filter(QueryBuilders.termQuery(Finding.EXECUTION_ID_FIELD, workflowExecutionId)) + val searchRequest = SearchRequest() + .source( + SearchSourceBuilder() + .query(bqb) + .version(true) + .seqNoAndPrimaryTerm(true) + ) + .indices(chainedMonitor.dataSources.findingsIndex) + val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + + // Get the findings docs + val findings = mutableListOf() + for (hit in searchResponse.hits) { + val xcp = XContentFactory.xContent(XContentType.JSON) + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val finding = Finding.parse(xcp) + findings.add(finding) + } + // Based on the findings get the document ids + val indexToRelatedDocIdsMap = mutableMapOf>() + for (finding in findings) { + indexToRelatedDocIdsMap.getOrPut(finding.index) { mutableListOf() }.addAll(finding.relatedDocIds) + } + return indexToRelatedDocIdsMap + } catch (t: Exception) { + log.error("Error getting finding doc ids: ${t.message}") + throw AlertingException.wrap(t) + } + } + + /** + * Returns the list of monitors for the given ids + * Used in workflow execution in order to figure out the monitor type + * + * @param monitors List of monitor ids + * @param size Expected number of monitors + */ + suspend fun getMonitorsById(monitors: List, size: Int): List { + try { + val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", monitors)) + + val searchRequest = SearchRequest() + .source( + SearchSourceBuilder() + .query(bqb) + .version(true) + .seqNoAndPrimaryTerm(true) + .size(size) + ) + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + + val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + return parseMonitors(searchResponse) + } catch (e: Exception) { + log.error("Error getting monitors: ${e.message}") + throw AlertingException.wrap(e) + } + } + + private fun parseMonitors(response: SearchResponse): List { + if (response.isTimedOut) { + log.error("Request for getting monitors timeout") + throw OpenSearchException("Cannot determine that the ${ScheduledJob.SCHEDULED_JOBS_INDEX} index is healthy") + } + val monitors = mutableListOf() + try { + for (hit in response.hits) { + XContentType.JSON.xContent().createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, hit.sourceAsString + ).use { hitsParser -> + val monitor = ScheduledJob.parse(hitsParser, hit.id, hit.version) as Monitor + monitors.add(monitor) + } + } + } catch (e: Exception) { + log.error("Error parsing monitors: ${e.message}") + throw AlertingException.wrap(e) + } + return monitors + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowAction.kt new file mode 100644 index 000000000..a6e986ea3 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowAction.kt @@ -0,0 +1,15 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.action + +import org.opensearch.action.ActionType + +class ExecuteWorkflowAction private constructor() : ActionType(NAME, ::ExecuteWorkflowResponse) { + companion object { + val INSTANCE = ExecuteWorkflowAction() + const val NAME = "cluster:admin/opendistro/alerting/workflow/execute" + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt new file mode 100644 index 000000000..2d97bbdcc --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt @@ -0,0 +1,70 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.action.ValidateActions +import org.opensearch.common.io.stream.StreamInput +import org.opensearch.common.io.stream.StreamOutput +import org.opensearch.common.unit.TimeValue +import org.opensearch.commons.alerting.model.Workflow +import java.io.IOException + +/** + * A class containing workflow details. + */ +class ExecuteWorkflowRequest : ActionRequest { + val dryrun: Boolean + val requestEnd: TimeValue + val workflowId: String? + val workflow: Workflow? + + constructor( + dryrun: Boolean, + requestEnd: TimeValue, + workflowId: String?, + workflow: Workflow?, + ) : super() { + this.dryrun = dryrun + this.requestEnd = requestEnd + this.workflowId = workflowId + this.workflow = workflow + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readBoolean(), + sin.readTimeValue(), + sin.readOptionalString(), + if (sin.readBoolean()) { + Workflow.readFrom(sin) + } else null + ) + + override fun validate(): ActionRequestValidationException? { + var validationException: ActionRequestValidationException? = null + if (workflowId == null && workflow == null) { + validationException = ValidateActions.addValidationError( + "Both workflow and workflow id are missing", validationException + ) + } + return validationException + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeBoolean(dryrun) + out.writeTimeValue(requestEnd) + out.writeOptionalString(workflowId) + if (workflow != null) { + out.writeBoolean(true) + workflow.writeTo(out) + } else { + out.writeBoolean(false) + } + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt new file mode 100644 index 000000000..a58eff9ae --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt @@ -0,0 +1,39 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.action + +import org.opensearch.action.ActionResponse +import org.opensearch.alerting.model.WorkflowRunResult +import org.opensearch.common.io.stream.StreamInput +import org.opensearch.common.io.stream.StreamOutput +import org.opensearch.common.xcontent.ToXContent +import org.opensearch.common.xcontent.ToXContentObject +import org.opensearch.common.xcontent.XContentBuilder +import java.io.IOException + +class ExecuteWorkflowResponse : ActionResponse, ToXContentObject { + val workflowRunResult: WorkflowRunResult + constructor( + workflowRunResult: WorkflowRunResult + ) : super() { + this.workflowRunResult = workflowRunResult + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + WorkflowRunResult(sin) + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + workflowRunResult.writeTo(out) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return workflowRunResult.toXContent(builder, params) + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt index 7a96d2a44..654ee7d00 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt @@ -51,6 +51,28 @@ class AlertingConfigAccessor { } } + suspend fun getWorkflowMetadata(client: Client, xContentRegistry: NamedXContentRegistry, metadataId: String): WorkflowMetadata? { + return try { + val jobSource = getAlertingConfigDocumentSource(client, "Workflow Metadata", metadataId) + withContext(Dispatchers.IO) { + val xcp = XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + jobSource, XContentType.JSON + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + WorkflowMetadata.parse(xcp) + } + } catch (e: IllegalStateException) { + if (e.message?.equals("Workflow Metadata document with id $metadataId not found or source is empty") == true) { + return null + } else throw e + } catch (e: IndexNotFoundException) { + if (e.message?.equals("no such index [.opendistro-alerting-config]") == true) { + return null + } else throw e + } + } + suspend fun getEmailAccountInfo(client: Client, xContentRegistry: NamedXContentRegistry, emailAccountId: String): EmailAccount { val source = getAlertingConfigDocumentSource(client, "Email account", emailAccountId) return withContext(Dispatchers.IO) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt new file mode 100644 index 000000000..c07bcfdb4 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt @@ -0,0 +1,103 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.model + +import org.opensearch.common.io.stream.StreamInput +import org.opensearch.common.io.stream.StreamOutput +import org.opensearch.common.io.stream.Writeable +import org.opensearch.common.xcontent.ToXContent +import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.common.xcontent.XContentParser +import org.opensearch.common.xcontent.XContentParserUtils +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField +import java.io.IOException +import java.time.Instant + +data class WorkflowMetadata( + val id: String, + val workflowId: String, + val monitorIds: List, + val latestRunTime: Instant, + val latestExecutionId: String +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + workflowId = sin.readString(), + monitorIds = sin.readStringList(), + latestRunTime = sin.readInstant(), + latestExecutionId = sin.readString() + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(workflowId) + out.writeStringCollection(monitorIds) + out.writeInstant(latestRunTime) + out.writeString(latestExecutionId) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + if (params.paramAsBoolean("with_type", false)) builder.startObject(METADATA) + builder.field(WORKFLOW_ID_FIELD, workflowId) + .field(MONITOR_IDS_FIELD, monitorIds) + .optionalTimeField(LATEST_RUN_TIME, latestRunTime) + .field(LATEST_EXECUTION_ID, latestExecutionId) + if (params.paramAsBoolean("with_type", false)) builder.endObject() + return builder.endObject() + } + + companion object { + const val METADATA = "workflow_metadata" + const val WORKFLOW_ID_FIELD = "workflow_id" + const val MONITOR_IDS_FIELD = "monitor_ids" + const val LATEST_RUN_TIME = "latest_run_time" + const val LATEST_EXECUTION_ID = "latest_execution_id" + + @JvmStatic @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser): WorkflowMetadata { + lateinit var workflowId: String + var monitorIds = mutableListOf() + lateinit var latestRunTime: Instant + lateinit var latestExecutionId: String + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + WORKFLOW_ID_FIELD -> workflowId = xcp.text() + MONITOR_IDS_FIELD -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + monitorIds.add(xcp.text()) + } + } + LATEST_RUN_TIME -> latestRunTime = xcp.instant()!! + LATEST_EXECUTION_ID -> latestExecutionId = xcp.text() + } + } + return WorkflowMetadata( + "$workflowId-metadata", + workflowId = workflowId, + monitorIds = monitorIds, + latestRunTime = latestRunTime, + latestExecutionId = latestExecutionId + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): WorkflowMetadata { + return WorkflowMetadata(sin) + } + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt new file mode 100644 index 000000000..762b097f1 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt @@ -0,0 +1,53 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.model + +import org.opensearch.common.io.stream.StreamInput +import org.opensearch.common.io.stream.StreamOutput +import org.opensearch.common.io.stream.Writeable +import org.opensearch.common.xcontent.ToXContent +import org.opensearch.common.xcontent.XContentBuilder +import java.io.IOException +import java.lang.Exception +import java.time.Instant + +data class WorkflowRunResult( + val workflowRunResult: List> = mutableListOf(), + val executionStartTime: Instant, + val executionEndTime: Instant? = null, + val executionId: String, + val error: Exception? = null +) : Writeable, ToXContent { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + sin.readList> { s: StreamInput -> MonitorRunResult.readFrom(s) }, + sin.readInstant(), + sin.readInstant(), + sin.readString(), + sin.readException() + ) + + override fun writeTo(out: StreamOutput) { + out.writeList(workflowRunResult) + out.writeInstant(executionStartTime) + out.writeInstant(executionEndTime) + out.writeString(executionId) + out.writeException(error) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject().startArray("workflow_run_result") + for (monitorResult in workflowRunResult) { + monitorResult.toXContent(builder, ToXContent.EMPTY_PARAMS) + } + builder.endArray().field("execution_start_time", executionStartTime) + .field("execution_end_time", executionEndTime) + .field("error", error?.message).endObject() + return builder + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt deleted file mode 100644 index dc643e716..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt +++ /dev/null @@ -1,4 +0,0 @@ -package org.opensearch.alerting.model.workflow - -data class WorkflowRunResult { -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt new file mode 100644 index 000000000..bf93bc590 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt @@ -0,0 +1,57 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.resthandler + +import org.apache.logging.log4j.LogManager +import org.opensearch.alerting.AlertingPlugin +import org.opensearch.alerting.action.ExecuteWorkflowAction +import org.opensearch.alerting.action.ExecuteWorkflowRequest +import org.opensearch.client.node.NodeClient +import org.opensearch.common.unit.TimeValue +import org.opensearch.common.xcontent.XContentParser +import org.opensearch.common.xcontent.XContentParserUtils +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.rest.BaseRestHandler +import org.opensearch.rest.RestHandler +import org.opensearch.rest.RestRequest +import org.opensearch.rest.action.RestToXContentListener +import java.time.Instant + +private val log = LogManager.getLogger(RestExecuteWorkflowAction::class.java) + +class RestExecuteWorkflowAction : BaseRestHandler() { + + override fun getName(): String = "execute_workflow_action" + + override fun routes(): List { + return listOf() + } + + override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + log.debug("${request.method()} ${AlertingPlugin.WORKFLOW_BASE_URI}/_execute") + + return RestChannelConsumer { channel -> + val dryrun = request.paramAsBoolean("dryrun", false) + val requestEnd = request.paramAsTime("period_end", TimeValue(Instant.now().toEpochMilli())) + + if (request.hasParam("workflowID")) { + val workflowId = request.param("workflowID") + val execWorkflowRequest = ExecuteWorkflowRequest(dryrun, requestEnd, workflowId, null) + client.execute(ExecuteWorkflowAction.INSTANCE, execWorkflowRequest, RestToXContentListener(channel)) + } else { + val xcp = request.contentParser() + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val workflow = Workflow.parse(xcp, Workflow.NO_ID, Workflow.NO_VERSION) + val execWorkflowRequest = ExecuteWorkflowRequest(dryrun, requestEnd, null, workflow) + client.execute(ExecuteWorkflowAction.INSTANCE, execWorkflowRequest, RestToXContentListener(channel)) + } + } + } + + override fun responseParams(): Set { + return setOf("dryrun", "period_end", "workflowID") + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt index ab57a0d45..89c0133c7 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt @@ -10,6 +10,7 @@ import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.GlobalScope import kotlinx.coroutines.launch import org.apache.logging.log4j.LogManager +import org.apache.lucene.search.join.ScoreMode import org.opensearch.OpenSearchStatusException import org.opensearch.action.ActionListener import org.opensearch.action.ActionRequest @@ -17,6 +18,8 @@ import org.opensearch.action.delete.DeleteRequest import org.opensearch.action.delete.DeleteResponse import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse +import org.opensearch.action.search.SearchRequest +import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction import org.opensearch.alerting.opensearchapi.suspendUntil @@ -35,6 +38,7 @@ import org.opensearch.commons.alerting.action.DeleteMonitorRequest import org.opensearch.commons.alerting.action.DeleteMonitorResponse import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.Workflow import org.opensearch.commons.authuser.User import org.opensearch.commons.utils.recreateObject import org.opensearch.index.query.QueryBuilders @@ -42,6 +46,7 @@ import org.opensearch.index.reindex.BulkByScrollResponse import org.opensearch.index.reindex.DeleteByQueryAction import org.opensearch.index.reindex.DeleteByQueryRequestBuilder import org.opensearch.rest.RestStatus +import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import kotlin.coroutines.resume @@ -95,9 +100,10 @@ class TransportDeleteMonitorAction @Inject constructor( try { val monitor = getMonitor() - val canDelete = user == null || - !doFilterForUser(user) || - checkUserPermissionsWithResource(user, monitor.user, actionListener, "monitor", monitorId) + val canDelete = monitorIsNotInWorkflows(monitor.id) && ( + user == null || !doFilterForUser(user) || + checkUserPermissionsWithResource(user, monitor.user, actionListener, "monitor", monitorId) + ) if (canDelete) { val deleteResponse = deleteMonitor(monitor) @@ -114,6 +120,34 @@ class TransportDeleteMonitorAction @Inject constructor( } } + /** + * Checks if the monitor is part of the workflow + * + * @param monitorId - id of monitor that is checked if it is a workflow delegate + */ + private suspend fun monitorIsNotInWorkflows(monitorId: String): Boolean { + val queryBuilder = QueryBuilders.nestedQuery( + Workflow.WORKFLOW_DELEGATE_PATH, + QueryBuilders.boolQuery().must( + QueryBuilders.matchQuery( + Workflow.WORKFLOW_MONITOR_PATH, + monitorId + ) + ), + ScoreMode.None + ) + + val searchRequest = SearchRequest() + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + .source(SearchSourceBuilder().query(queryBuilder).fetchSource(true)) + + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + if (searchResponse.hits.totalHits?.value == 0L) { + return true + } + return false + } + private suspend fun getMonitor(): Monitor { val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorId) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt new file mode 100644 index 000000000..5d6086a1d --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt @@ -0,0 +1,248 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.transport + +import kotlinx.coroutines.CoroutineName +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.GlobalScope +import kotlinx.coroutines.launch +import org.apache.logging.log4j.LogManager +import org.apache.lucene.search.join.ScoreMode +import org.opensearch.OpenSearchStatusException +import org.opensearch.action.ActionListener +import org.opensearch.action.ActionRequest +import org.opensearch.action.delete.DeleteRequest +import org.opensearch.action.delete.DeleteResponse +import org.opensearch.action.get.GetRequest +import org.opensearch.action.get.GetResponse +import org.opensearch.action.search.SearchRequest +import org.opensearch.action.search.SearchResponse +import org.opensearch.action.support.ActionFilters +import org.opensearch.action.support.HandledTransportAction +import org.opensearch.action.support.WriteRequest.RefreshPolicy +import org.opensearch.alerting.opensearchapi.suspendUntil +import org.opensearch.alerting.settings.AlertingSettings +import org.opensearch.alerting.util.AlertingException +import org.opensearch.client.Client +import org.opensearch.client.node.NodeClient +import org.opensearch.cluster.service.ClusterService +import org.opensearch.common.inject.Inject +import org.opensearch.common.settings.Settings +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.NamedXContentRegistry +import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.common.xcontent.XContentParser +import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.AlertingPluginInterface +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteMonitorRequest +import org.opensearch.commons.alerting.action.DeleteMonitorResponse +import org.opensearch.commons.alerting.action.DeleteWorkflowRequest +import org.opensearch.commons.alerting.action.DeleteWorkflowResponse +import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.authuser.User +import org.opensearch.commons.utils.recreateObject +import org.opensearch.index.IndexNotFoundException +import org.opensearch.index.query.QueryBuilders +import org.opensearch.rest.RestStatus +import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.tasks.Task +import org.opensearch.transport.TransportService + +private val log = LogManager.getLogger(TransportIndexMonitorAction::class.java) + +/** + * Transport class that deletes the workflow. + * If the deleteDelegateMonitor flag is set to true, deletes the workflow delegates that are not part of another workflow + */ +class TransportDeleteWorkflowAction @Inject constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val clusterService: ClusterService, + settings: Settings, + val xContentRegistry: NamedXContentRegistry +) : HandledTransportAction( + AlertingActions.DELETE_WORKFLOW_ACTION_NAME, transportService, actionFilters, ::DeleteWorkflowRequest +), + SecureTransportAction { + + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + listenFilterBySettingChange(clusterService) + } + + override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { + val transformedRequest = request as? DeleteWorkflowRequest + ?: recreateObject(request) { DeleteWorkflowRequest(it) } + + val user = readUserFromThreadContext(client) + val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, transformedRequest.workflowId) + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + + if (!validateUserBackendRoles(user, actionListener)) { + return + } + + GlobalScope.launch(Dispatchers.IO + CoroutineName("DeleteWorkflowAction")) { + DeleteWorkflowHandler( + client, + actionListener, + deleteRequest, + transformedRequest.deleteDelegateMonitors, + user, + transformedRequest.workflowId + ).resolveUserAndStart() + } + } + + inner class DeleteWorkflowHandler( + private val client: Client, + private val actionListener: ActionListener, + private val deleteRequest: DeleteRequest, + private val deleteDelegateMonitors: Boolean?, + private val user: User?, + private val workflowId: String + ) { + suspend fun resolveUserAndStart() { + try { + val workflow = getWorkflow() + + val canDelete = user == null || + !doFilterForUser(user) || + checkUserPermissionsWithResource( + user, + workflow.user, + actionListener, + "workflow", + workflowId + ) + + if (canDelete) { + val deleteResponse = deleteWorkflow(workflow) + deleteMetadata(workflow) + if (deleteDelegateMonitors == true) { + val delegateMonitorIds = (workflow.inputs[0] as CompositeInput).getMonitorIds() + val monitorIdsToBeDeleted = getDeletableDelegates(workflowId, delegateMonitorIds) + + // Delete the monitor ids + if (!monitorIdsToBeDeleted.isNullOrEmpty()) { + deleteMonitors(monitorIdsToBeDeleted, RefreshPolicy.IMMEDIATE) + } + } + actionListener.onResponse(DeleteWorkflowResponse(deleteResponse.id, deleteResponse.version)) + } else { + actionListener.onFailure( + AlertingException( + "Not allowed to delete this workflow!", + RestStatus.FORBIDDEN, + IllegalStateException() + ) + ) + } + } catch (t: Exception) { + if (t is IndexNotFoundException) { + actionListener.onFailure( + OpenSearchStatusException( + "Workflow not found.", + RestStatus.NOT_FOUND + ) + ) + } else { + actionListener.onFailure(AlertingException.wrap(t)) + } + } + } + + private suspend fun deleteMonitors(monitorIds: List, refreshPolicy: RefreshPolicy) { + if (monitorIds.isNullOrEmpty()) + return + + for (monitorId in monitorIds) { + val deleteRequest = DeleteMonitorRequest(monitorId, refreshPolicy) + val searchResponse: DeleteMonitorResponse = client.suspendUntil { + AlertingPluginInterface.deleteMonitor(this as NodeClient, deleteRequest, it) + } + } + } + + /** + * Returns lit of monitor ids belonging only to a given workflow + * @param workflowIdToBeDeleted Id of the workflow that should be deleted + * @param monitorIds List of delegate monitor ids (underlying monitor ids) + */ + private suspend fun getDeletableDelegates(workflowIdToBeDeleted: String, monitorIds: List): List { + // Retrieve monitors belonging to another workflows + val queryBuilder = QueryBuilders.boolQuery().mustNot(QueryBuilders.termQuery("_id", workflowIdToBeDeleted)).filter( + QueryBuilders.nestedQuery( + Workflow.WORKFLOW_DELEGATE_PATH, + QueryBuilders.boolQuery().must( + QueryBuilders.termsQuery( + Workflow.WORKFLOW_MONITOR_PATH, + monitorIds + ) + ), + ScoreMode.None + ) + ) + + val searchRequest = SearchRequest() + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + .source(SearchSourceBuilder().query(queryBuilder).fetchSource(true)) + + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + + val workflows = searchResponse.hits.hits.map { hit -> + val xcp = XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, XContentType.JSON + ).also { it.nextToken() } + lateinit var workflow: Workflow + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + xcp.nextToken() + when (xcp.currentName()) { + "workflow" -> workflow = Workflow.parse(xcp) + } + } + workflow.copy(id = hit.id, version = hit.version) + } + val workflowMonitors = workflows.filter { it.id != workflowIdToBeDeleted }.flatMap { (it.inputs[0] as CompositeInput).getMonitorIds() }.distinct() + // Monitors that can be deleted -> all monitors - monitors belonging to another workflows + return monitorIds.minus(workflowMonitors.toSet()) + } + + private suspend fun getWorkflow(): Workflow { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, workflowId) + + val getResponse: GetResponse = client.suspendUntil { get(getRequest, it) } + if (getResponse.isExists == false) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Workflow not found.", RestStatus.NOT_FOUND) + ) + ) + } + val xcp = XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, XContentType.JSON + ) + return ScheduledJob.parse(xcp, getResponse.id, getResponse.version) as Workflow + } + + private suspend fun deleteWorkflow(workflow: Workflow): DeleteResponse { + log.debug("Deleting the workflow with id ${deleteRequest.id()}") + return client.suspendUntil { delete(deleteRequest, it) } + } + + private suspend fun deleteMetadata(workflow: Workflow) { + val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, "${workflow.id}-metadata") + val deleteResponse: DeleteResponse = client.suspendUntil { delete(deleteRequest, it) } + } + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt new file mode 100644 index 000000000..3a7fb56b6 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt @@ -0,0 +1,121 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.transport + +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.launch +import kotlinx.coroutines.withContext +import org.apache.logging.log4j.LogManager +import org.opensearch.OpenSearchStatusException +import org.opensearch.action.ActionListener +import org.opensearch.action.get.GetRequest +import org.opensearch.action.get.GetResponse +import org.opensearch.action.support.ActionFilters +import org.opensearch.action.support.HandledTransportAction +import org.opensearch.alerting.MonitorRunnerService +import org.opensearch.alerting.action.ExecuteWorkflowAction +import org.opensearch.alerting.action.ExecuteWorkflowRequest +import org.opensearch.alerting.action.ExecuteWorkflowResponse +import org.opensearch.alerting.util.AlertingException +import org.opensearch.alerting.workflow.WorkflowRunnerService +import org.opensearch.client.Client +import org.opensearch.common.inject.Inject +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.NamedXContentRegistry +import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.ConfigConstants +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.authuser.User +import org.opensearch.rest.RestStatus +import org.opensearch.tasks.Task +import org.opensearch.transport.TransportService +import java.time.Instant + +private val log = LogManager.getLogger(TransportExecuteWorkflowAction::class.java) + +class TransportExecuteWorkflowAction @Inject constructor( + transportService: TransportService, + private val client: Client, + private val runner: MonitorRunnerService, + actionFilters: ActionFilters, + val xContentRegistry: NamedXContentRegistry, +) : HandledTransportAction( + ExecuteWorkflowAction.NAME, transportService, actionFilters, ::ExecuteWorkflowRequest +) { + override fun doExecute(task: Task, execWorkflowRequest: ExecuteWorkflowRequest, actionListener: ActionListener) { + val userStr = client.threadPool().threadContext.getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT) + log.debug("User and roles string from thread context: $userStr") + val user: User? = User.parse(userStr) + + client.threadPool().threadContext.stashContext().use { + val executeWorkflow = fun(workflow: Workflow) { + runner.launch { + val (periodStart, periodEnd) = + workflow.schedule.getPeriodEndingAt(Instant.ofEpochMilli(execWorkflowRequest.requestEnd.millis)) + try { + val workflowRunResult = + WorkflowRunnerService.runJob(workflow, periodStart, periodEnd, execWorkflowRequest.dryrun) + withContext(Dispatchers.IO) { + actionListener.onResponse( + ExecuteWorkflowResponse( + workflowRunResult + ) + ) + } + } catch (e: Exception) { + log.error("Unexpected error running workflow", e) + withContext(Dispatchers.IO) { + actionListener.onFailure(AlertingException.wrap(e)) + } + } + } + } + + if (execWorkflowRequest.workflowId != null) { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX).id(execWorkflowRequest.workflowId) + client.get( + getRequest, + object : ActionListener { + override fun onResponse(response: GetResponse) { + if (!response.isExists) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Can't find workflow with id: ${response.id}", + RestStatus.NOT_FOUND + ) + ) + ) + return + } + if (!response.isSourceEmpty) { + XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, XContentType.JSON + ).use { xcp -> + val workflow = ScheduledJob.parse(xcp, response.id, response.version) as Workflow + executeWorkflow(workflow) + } + } + } + + override fun onFailure(t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } + } + ) + } else { + val workflow = when (user?.name.isNullOrEmpty()) { + true -> execWorkflowRequest.workflow as Workflow + false -> (execWorkflowRequest.workflow as Workflow).copy(user = user) + } + executeWorkflow(workflow) + } + } + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt index 80c61ad85..a816e2396 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt @@ -1,3 +1,8 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + package org.opensearch.alerting.transport import org.opensearch.OpenSearchStatusException @@ -6,9 +11,6 @@ import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction -import org.opensearch.alerting.action.GetMonitorAction -import org.opensearch.alerting.action.GetMonitorRequest -import org.opensearch.alerting.action.GetMonitorResponse import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.AlertingException import org.opensearch.client.Client @@ -19,8 +21,12 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType -import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetWorkflowRequest +import org.opensearch.commons.alerting.action.GetWorkflowResponse import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.index.IndexNotFoundException import org.opensearch.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService @@ -32,8 +38,8 @@ class TransportGetWorkflowAction @Inject constructor( val xContentRegistry: NamedXContentRegistry, val clusterService: ClusterService, settings: Settings -) : HandledTransportAction( - GetMonitorAction.NAME, transportService, actionFilters, ::GetMonitorRequest +) : HandledTransportAction( + AlertingActions.GET_WORKFLOW_ACTION_NAME, transportService, actionFilters, ::GetWorkflowRequest ), SecureTransportAction { @@ -43,23 +49,17 @@ class TransportGetWorkflowAction @Inject constructor( listenFilterBySettingChange(clusterService) } - override fun doExecute(task: Task, getMonitorRequest: GetMonitorRequest, actionListener: ActionListener) { + override fun doExecute(task: Task, getWorkflowRequest: GetWorkflowRequest, actionListener: ActionListener) { val user = readUserFromThreadContext(client) - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, getMonitorRequest.monitorId) - .version(getMonitorRequest.version) - .fetchSourceContext(getMonitorRequest.srcContext) + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, getWorkflowRequest.workflowId) + .version(getWorkflowRequest.version) + .fetchSourceContext(getWorkflowRequest.srcContext) if (!validateUserBackendRoles(user, actionListener)) { return } - /* - * Remove security context before you call elasticsearch api's. By this time, permissions required - * to call this api are validated. - * Once system-indices [https://github.com/opendistro-for-elasticsearch/security/issues/666] is done, we - * might further improve this logic. Also change try to kotlin-use for auto-closable. - */ client.threadPool().threadContext.stashContext().use { client.get( getRequest, @@ -69,7 +69,7 @@ class TransportGetWorkflowAction @Inject constructor( actionListener.onFailure( AlertingException.wrap( OpenSearchStatusException( - "Monitor not found.", + "Workflow not found.", RestStatus.NOT_FOUND ) ) @@ -77,21 +77,21 @@ class TransportGetWorkflowAction @Inject constructor( return } - var monitor: Monitor? = null + var workflow: Workflow? = null if (!response.isSourceEmpty) { XContentHelper.createParser( xContentRegistry, LoggingDeprecationHandler.INSTANCE, response.sourceAsBytesRef, XContentType.JSON ).use { xcp -> - monitor = ScheduledJob.parse(xcp, response.id, response.version) as Monitor + workflow = ScheduledJob.parse(xcp, response.id, response.version) as Workflow // security is enabled and filterby is enabled if (!checkUserPermissionsWithResource( user, - monitor?.user, + workflow?.user, actionListener, - "monitor", - getMonitorRequest.monitorId + "workflow", + getWorkflowRequest.workflowId ) ) { return @@ -100,19 +100,28 @@ class TransportGetWorkflowAction @Inject constructor( } actionListener.onResponse( - GetMonitorResponse( + GetWorkflowResponse( response.id, response.version, response.seqNo, response.primaryTerm, RestStatus.OK, - monitor + workflow ) ) } override fun onFailure(t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + if (t is IndexNotFoundException) { + actionListener.onFailure( + OpenSearchStatusException( + "Workflow not found", + RestStatus.NOT_FOUND + ) + ) + } else { + actionListener.onFailure(AlertingException.wrap(t)) + } } } ) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexWorkflowAction.kt similarity index 86% rename from alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt rename to alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexWorkflowAction.kt index a6273a91c..a4fa9ce76 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexWorkflowAction.kt @@ -33,6 +33,7 @@ import org.opensearch.alerting.settings.DestinationSettings.Companion.ALLOW_LIST import org.opensearch.alerting.util.AlertingException import org.opensearch.alerting.util.DocLevelMonitorQueries import org.opensearch.alerting.util.IndexUtils +import org.opensearch.alerting.util.isQueryLevelMonitor import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject @@ -55,6 +56,7 @@ import org.opensearch.commons.alerting.model.ScheduledJob.Companion.SCHEDULED_JO import org.opensearch.commons.alerting.model.Workflow import org.opensearch.commons.authuser.User import org.opensearch.commons.utils.recreateObject +import org.opensearch.index.IndexNotFoundException import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.RestRequest import org.opensearch.rest.RestStatus @@ -63,10 +65,10 @@ import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.util.stream.Collectors -private val log = LogManager.getLogger(TransportIndexCompositeWorkflowAction::class.java) +private val log = LogManager.getLogger(TransportIndexWorkflowAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportIndexCompositeWorkflowAction @Inject constructor( +class TransportIndexWorkflowAction @Inject constructor( transportService: TransportService, val client: Client, actionFilters: ActionFilters, @@ -166,7 +168,22 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( ) { fun resolveUserAndStart() { scope.launch { - validateRequest(request, actionListener) + try { + validateRequest(request) + } catch (e: Exception) { + if (e is IndexNotFoundException) { + actionListener.onFailure( + OpenSearchStatusException( + "Monitors not found", + RestStatus.NOT_FOUND + ) + ) + } else { + actionListener.onFailure(e) + } + return@launch + } + if (user == null) { // Security is disabled, add empty user to Monitor. user is null for older versions. request.workflow = request.workflow @@ -456,27 +473,41 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( } } - suspend fun validateRequest(request: IndexWorkflowRequest, listener: ActionListener) { - val compositeInput = request.workflow.inputs.get(0) as CompositeInput + suspend fun validateRequest(request: IndexWorkflowRequest) { + if (request.workflow.inputs.isEmpty()) + throw AlertingException.wrap(IllegalArgumentException("Input list can not be empty.")) + + if (request.workflow.inputs[0] !is CompositeInput) + throw AlertingException.wrap(IllegalArgumentException("When creating a workflow input must be CompositeInput")) + + val compositeInput = request.workflow.inputs[0] as CompositeInput val monitorIds = compositeInput.sequence.delegates.stream().map { it.monitorId }.collect(Collectors.toList()) - validateDuplicateDelegateMonitorReferenceExists(monitorIds, listener) - validateSequenceOrdering(compositeInput.sequence.delegates, listener) - validateChainedFindings(compositeInput.sequence.delegates, listener) - val delegateMonitors = getDelegateMonitors(monitorIds, listener) - validateDelegateMonitorsExist(monitorIds, delegateMonitors, listener) - // todo: validate that user has roles to reference delegate monitors + + if (monitorIds.isNullOrEmpty()) + throw AlertingException.wrap(IllegalArgumentException("Delegates list can not be empty.")) + + validateDuplicateDelegateMonitorReferenceExists(monitorIds) + validateSequenceOrdering(compositeInput.sequence.delegates) + validateChainedFindings(compositeInput.sequence.delegates) + val delegateMonitors = getDelegateMonitors(monitorIds) + validateDelegateMonitorsExist(monitorIds, delegateMonitors) + validateChainedFindingsMonitors(compositeInput.sequence.delegates, delegateMonitors) } - private fun validateChainedFindings(delegates: List, listener: ActionListener) { + private fun validateChainedFindings(delegates: List) { val monitorIdOrderMap: Map = delegates.associate { it.monitorId to it.order } delegates.forEach { if (it.chainedFindings != null) { if (monitorIdOrderMap.containsKey(it.chainedFindings!!.monitorId) == false) { - listener.onFailure(Exception("Chained Findings Monitor ${it.chainedFindings!!.monitorId} doesn't exist in sequence")) + throw AlertingException.wrap( + IllegalArgumentException( + "Chained Findings Monitor ${it.chainedFindings!!.monitorId} doesn't exist in sequence" + ) + ) } - if (it.order <= monitorIdOrderMap.get(it.chainedFindings!!.monitorId)!!) { - listener.onFailure( - Exception( + if (it.order <= monitorIdOrderMap[it.chainedFindings!!.monitorId]!!) { + throw AlertingException.wrap( + IllegalArgumentException( "Chained Findings Monitor ${it.chainedFindings!!.monitorId} should be executed before monitor ${it.monitorId}" ) ) @@ -485,39 +516,51 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( } } - private fun validateSequenceOrdering(delegates: List, listener: ActionListener) { + private fun validateChainedFindingsMonitors(delegates: List, monitorDelegates: List) { + val monitorsById = monitorDelegates.associateBy { it.id } + delegates.forEach { + if (it.chainedFindings != null) { + val chainedFindingMonitor = monitorsById[it.chainedFindings!!.monitorId] ?: throw AlertingException.wrap( + IllegalArgumentException("Chained finding monitor doesn't exist") + ) + + if (chainedFindingMonitor.isQueryLevelMonitor()) { + throw AlertingException.wrap(IllegalArgumentException("Query level monitor can't be part of chained findings")) + } + } + } + } + + private fun validateSequenceOrdering(delegates: List) { val orderSet = delegates.stream().filter { it.order > 0 }.map { it.order }.collect(Collectors.toSet()) if (orderSet.size != delegates.size) { - listener.onFailure(Exception("Sequence ordering of delegate monitor shouldn't contain duplicate order values")) + throw AlertingException.wrap(IllegalArgumentException("Sequence ordering of delegate monitor shouldn't contain duplicate order values")) } } private fun validateDuplicateDelegateMonitorReferenceExists( - monitorIds: MutableList, - listener: ActionListener + monitorIds: MutableList ) { if (monitorIds.toSet().size != monitorIds.size) { - listener.onFailure(Exception("duplicate is not allowed")) + throw AlertingException.wrap(IllegalArgumentException("Duplicate delegates not allowed")) } } private fun validateDelegateMonitorsExist( monitorIds: List, - delegateMonitors: List, - actionListener: ActionListener + delegateMonitors: List ) { val reqMonitorIds: MutableList = monitorIds as MutableList delegateMonitors.forEach { reqMonitorIds.remove(it.id) } if (reqMonitorIds.isNotEmpty()) { - actionListener.onFailure(Exception("${reqMonitorIds.joinToString { "," }} are not valid monitor ids")) + throw AlertingException.wrap(IllegalArgumentException(("${reqMonitorIds.joinToString()} are not valid monitor ids"))) } } private suspend fun getDelegateMonitors( - monitorIds: MutableList, - actionListener: ActionListener + monitorIds: MutableList ): List { val query = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", monitorIds)) val searchSource = SearchSourceBuilder().query(query) @@ -527,20 +570,15 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( if (response.isTimedOut) { return monitors } - try { - for (hit in response.hits) { - XContentType.JSON.xContent().createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, hit.sourceAsString - ).use { hitsParser -> - val monitor = ScheduledJob.parse(hitsParser, hit.id, hit.version) - monitors.add(monitor as Monitor) - } + for (hit in response.hits) { + XContentType.JSON.xContent().createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, hit.sourceAsString + ).use { hitsParser -> + val monitor = ScheduledJob.parse(hitsParser, hit.id, hit.version) + monitors.add(monitor as Monitor) } - return monitors - } catch (e: Exception) { - actionListener.onFailure(e) - return listOf() } + return monitors } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt index 086c1302c..e3c3299ca 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt @@ -11,6 +11,7 @@ import org.opensearch.action.index.IndexResponse import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.model.BucketLevelTriggerRunResult import org.opensearch.alerting.model.MonitorMetadata +import org.opensearch.alerting.model.WorkflowMetadata import org.opensearch.alerting.model.destination.Destination import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.settings.AlertingSettings @@ -54,6 +55,8 @@ fun Destination.isTestAction(): Boolean = this.type == DestinationType.TEST_ACTI fun Monitor.isDocLevelMonitor(): Boolean = this.monitorType == Monitor.MonitorType.DOC_LEVEL_MONITOR +fun Monitor.isQueryLevelMonitor(): Boolean = this.monitorType == Monitor.MonitorType.QUERY_LEVEL_MONITOR + /** * Since buckets can have multi-value keys, this converts the bucket key values to a string that can be used * as the key for a HashMap to easily retrieve [AggregationResultBucket] based on the bucket key values. @@ -132,3 +135,13 @@ suspend fun updateMonitorMetadata(client: Client, settings: Settings, monitorMet return client.suspendUntil { client.index(indexRequest, it) } } + +suspend fun updateWorkflowMetadata(client: Client, settings: Settings, workflowMetadata: WorkflowMetadata): IndexResponse { + val indexRequest = IndexRequest(ScheduledJob.SCHEDULED_JOBS_INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(workflowMetadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) + .id(workflowMetadata.id) + .timeout(AlertingSettings.INDEX_TIMEOUT.get(settings)) + + return client.suspendUntil { client.index(indexRequest, it) } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt index 2ad4cd23b..c183d3125 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt @@ -1,30 +1,165 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + package org.opensearch.alerting.workflow +import org.apache.logging.log4j.LogManager +import org.opensearch.alerting.BucketLevelMonitorRunner +import org.opensearch.alerting.DocumentLevelMonitorRunner import org.opensearch.alerting.MonitorRunnerExecutionContext +import org.opensearch.alerting.QueryLevelMonitorRunner +import org.opensearch.alerting.model.AlertingConfigAccessor import org.opensearch.alerting.model.MonitorRunResult +import org.opensearch.alerting.model.WorkflowMetadata +import org.opensearch.alerting.model.WorkflowRunResult +import org.opensearch.alerting.util.AlertingException +import org.opensearch.alerting.util.isDocLevelMonitor +import org.opensearch.alerting.util.isQueryLevelMonitor +import org.opensearch.alerting.util.updateWorkflowMetadata +import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.Delegate +import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.util.isBucketLevelMonitor import java.time.Instant +import java.time.LocalDateTime +import java.util.UUID + +object CompositeWorkflowRunner : WorkflowRunner() { + private val logger = LogManager.getLogger(javaClass) -class CompositeWorkflowRunner : WorkflowRunner() { override suspend fun runWorkflow( workflow: Workflow, monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, dryRun: Boolean - ): MonitorRunResult<*> { - TODO("Not yet implemented") + ): WorkflowRunResult { + val workflowExecutionStartTime = Instant.now() + + val executionId = workflow.id.plus(LocalDateTime.now()).plus(UUID.randomUUID().toString()) + var workflowResult = WorkflowRunResult(mutableListOf(), workflowExecutionStartTime, null, executionId) + val isTempMonitor = dryRun || workflow.id == Workflow.NO_ID + + logger.debug("Workflow ${workflow.id} in $executionId execution is running") + val delegates = (workflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } + var monitors: List + + try { + monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) + } catch (e: Exception) { + logger.error("Failed to execute workflow. Error: ${e.message}") + return workflowResult.copy(error = AlertingException.wrap(e)) + } + // Validate the monitors size + validateMonitorSize(delegates, monitors, workflow) + + var workflowMetadata = AlertingConfigAccessor.getWorkflowMetadata( + monitorCtx.client!!, + monitorCtx.xContentRegistry!!, + "${workflow.id}-metadata" + ) + if (workflowMetadata == null) { + workflowMetadata = createWorkflowMetadata(workflow.id, delegates.map { it.monitorId }, executionId) + } + + val monitorsById = monitors.associateBy { it.id } + val resultList = mutableListOf>() + var lastErrorDelegateRun: Exception? = null + + for (delegate in delegates) { + var indexToDocIds = mapOf>() + var delegateMonitor: Monitor + delegateMonitor = monitorsById[delegate.monitorId] + ?: throw AlertingException.wrap( + IllegalStateException("Delegate monitor not found ${delegate.monitorId} for the workflow $workflow.id") + ) + if (delegate.chainedFindings != null) { + val chainedMonitor = monitorsById[delegate.chainedFindings!!.monitorId] + ?: throw AlertingException.wrap( + IllegalStateException("Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id") + ) + + try { + indexToDocIds = monitorCtx.workflowService!!.getFindingDocIdsByExecutionId(chainedMonitor, executionId) + } catch (e: Exception) { + logger.error("Failed to execute workflow. Error: ${e.message}") + return workflowResult.copy(error = AlertingException.wrap(e)) + } + } + + val workflowRunContext = WorkflowRunContext(workflow.id, delegate.chainedFindings?.monitorId, executionId, indexToDocIds) + + var delegateRunResult: MonitorRunResult<*>? + try { + delegateRunResult = if (delegateMonitor.isBucketLevelMonitor()) { + BucketLevelMonitorRunner.runMonitor( + delegateMonitor, + monitorCtx, + periodStart, + periodEnd, + dryRun, + workflowRunContext + ) + } else if (delegateMonitor.isDocLevelMonitor()) { + DocumentLevelMonitorRunner.runMonitor( + delegateMonitor, + monitorCtx, + periodStart, + periodEnd, + dryRun, + workflowRunContext + ) + } else if (delegateMonitor.isQueryLevelMonitor()) { + QueryLevelMonitorRunner.runMonitor( + delegateMonitor, + monitorCtx, + periodStart, + periodEnd, + dryRun, + workflowRunContext + ) + } else { + throw AlertingException.wrap( + IllegalStateException("Unsupported monitor type") + ) + } + } catch (ex: Exception) { + logger.error("Error executing workflow delegate. Error: ${ex.message}") + lastErrorDelegateRun = AlertingException.wrap(ex) + continue + } + if (delegateRunResult != null) resultList.add(delegateRunResult) + } + logger.debug("Workflow ${workflow.id} in $executionId finished") + // Update metadata only if the workflow is not temp + if (!isTempMonitor) { + updateWorkflowMetadata( + monitorCtx.client!!, + monitorCtx.settings!!, + workflowMetadata.copy(latestRunTime = workflowExecutionStartTime, latestExecutionId = executionId) + ) + } + + return workflowResult.copy(workflowRunResult = resultList, executionEndTime = Instant.now(), error = lastErrorDelegateRun) } - companion object { - fun runWorkflow( - workflow: Workflow, - monitorCtx: MonitorRunnerExecutionContext, - periodStart: Instant, - periodEnd: Instant, - dryrun: Boolean - ): MonitorRunResult<*> { - TODO("Not yet implemented") + private fun validateMonitorSize( + delegates: List, + monitors: List, + workflow: Workflow, + ) { + if (delegates.size != monitors.size) { + val diffMonitorIds = delegates.map { it.monitorId }.minus(monitors.map { it.id }.toSet()).joinToString() + throw AlertingException.wrap( + IllegalStateException("Delegate monitors don't exist $diffMonitorIds for the workflow $workflow.id") + ) } } + + private fun createWorkflowMetadata(workflowId: String, monitors: List, executionId: String): WorkflowMetadata { + return WorkflowMetadata("$workflowId-metadata", workflowId, monitors, Instant.now(), executionId) + } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt new file mode 100644 index 000000000..27cbb6e11 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt @@ -0,0 +1,13 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.workflow + +data class WorkflowRunContext( + val workflowId: String, + val chainedMonitorId: String?, + val executionId: String, + val matchingDocIdsPerIndex: Map> +) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt index a09d1cc99..a7272a3dc 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt @@ -1,8 +1,12 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + package org.opensearch.alerting.workflow import org.opensearch.alerting.MonitorRunnerExecutionContext -import org.opensearch.alerting.model.MonitorRunResult -import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.commons.alerting.model.Workflow import java.time.Instant @@ -13,5 +17,5 @@ abstract class WorkflowRunner { periodStart: Instant, periodEnd: Instant, dryRun: Boolean - ): MonitorRunResult<*> + ): WorkflowRunResult } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt index f99d9a802..bd9e4a0c2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt @@ -16,9 +16,10 @@ import org.opensearch.alerting.AlertService import org.opensearch.alerting.InputService import org.opensearch.alerting.MonitorRunnerExecutionContext import org.opensearch.alerting.TriggerService +import org.opensearch.alerting.WorkflowService import org.opensearch.alerting.alerts.AlertIndices import org.opensearch.alerting.core.JobRunner -import org.opensearch.alerting.model.MonitorRunResult +import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.alerting.model.destination.DestinationContextFactory import org.opensearch.alerting.script.TriggerExecutionContext import org.opensearch.alerting.settings.AlertingSettings.Companion.ALERT_BACKOFF_COUNT @@ -97,6 +98,11 @@ object WorkflowRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompo return this } + fun registerWorkflowService(workflowService: WorkflowService): WorkflowRunnerService { + monitorCtx.workflowService = workflowService + return this + } + fun registerTriggerService(triggerService: TriggerService): WorkflowRunnerService { monitorCtx.triggerService = triggerService return this @@ -180,7 +186,6 @@ object WorkflowRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompo } override fun postDelete(jobId: String) { - } override fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant) { @@ -192,10 +197,9 @@ object WorkflowRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompo } } - suspend fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant, dryrun: Boolean): MonitorRunResult<*> { + suspend fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant, dryrun: Boolean): WorkflowRunResult { val workflow = job as Workflow return CompositeWorkflowRunner.runWorkflow(workflow, monitorCtx, periodStart, periodEnd, dryrun) - } // TODO: See if we can move below methods (or few of these) to a common utils diff --git a/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json b/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json index c9386b2ef..bd916199b 100644 --- a/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json +++ b/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json @@ -51,6 +51,9 @@ }, "timestamp": { "type": "long" + }, + "execution_id": { + "type": "keyword" } } } \ No newline at end of file diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt index 5afc1b7a5..019d99407 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt @@ -203,7 +203,6 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val finding = Finding.parse(xcp) findings1.add(finding) } - logger.error("sashank: response: {}", finalQueryResponse) val indexToRelatedDocIdsMap = mutableMapOf>() for (finding in findings1) { val ids = indexToRelatedDocIdsMap.getOrDefault(index, mutableListOf()) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt index 4ce7dcd23..289b3b1e6 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt @@ -1329,7 +1329,10 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { testIndex, listOf( "test_value_1", - "test_value_2" + "test_value_1", // adding duplicate to verify aggregation + "test_value_2", + "test_value_2", + "test_value_3" ) ) @@ -1340,7 +1343,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val termAgg = TermsAggregationBuilder("test_field").field("test_field") val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(termAgg)) val triggerScript = """ - params.docCount > 0 + params.docCount > 1 """.trimIndent() // For the Actions ensure that there is at least one and any PER_ALERT actions contain ACTIVE, DEDUPED and COMPLETED in its policy diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt index a4e3eb347..0723fd554 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt @@ -39,8 +39,11 @@ import org.opensearch.commons.alerting.model.ActionExecutionResult import org.opensearch.commons.alerting.model.AggregationResultBucket import org.opensearch.commons.alerting.model.Alert import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.ChainedFindings import org.opensearch.commons.alerting.model.ClusterMetricsInput +import org.opensearch.commons.alerting.model.CompositeInput import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.Delegate import org.opensearch.commons.alerting.model.DocLevelMonitorInput import org.opensearch.commons.alerting.model.DocLevelQuery import org.opensearch.commons.alerting.model.DocumentLevelTrigger @@ -51,7 +54,10 @@ import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.QueryLevelTrigger import org.opensearch.commons.alerting.model.Schedule import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.model.Sequence import org.opensearch.commons.alerting.model.Trigger +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.model.Workflow.WorkflowType import org.opensearch.commons.alerting.model.action.Action import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy import org.opensearch.commons.alerting.model.action.ActionExecutionScope @@ -84,7 +90,7 @@ fun randomQueryLevelMonitor( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false + withMetadata: Boolean = false, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -102,7 +108,7 @@ fun randomQueryLevelMonitorWithoutUser( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false + withMetadata: Boolean = false, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -126,7 +132,7 @@ fun randomBucketLevelMonitor( triggers: List = (1..randomInt(10)).map { randomBucketLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false + withMetadata: Boolean = false, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -151,7 +157,7 @@ fun randomBucketLevelMonitor( enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), withMetadata: Boolean = false, - dataSources: DataSources + dataSources: DataSources, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -170,7 +176,7 @@ fun randomClusterMetricsMonitor( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false + withMetadata: Boolean = false, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.CLUSTER_METRICS_MONITOR, enabled = enabled, inputs = inputs, @@ -188,7 +194,7 @@ fun randomDocumentLevelMonitor( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false + withMetadata: Boolean = false, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -208,7 +214,7 @@ fun randomDocumentLevelMonitor( lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), withMetadata: Boolean = false, dataSources: DataSources, - owner: String? = null + owner: String? = null, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -217,13 +223,68 @@ fun randomDocumentLevelMonitor( ) } +fun randomWorkflow( + id: String = Workflow.NO_ID, + monitorIds: List, + name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), + user: User? = randomUser(), + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = randomBoolean(), + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS) +): Workflow { + val delegates = mutableListOf() + if (!monitorIds.isNullOrEmpty()) { + delegates.add(Delegate(1, monitorIds[0])) + for (i in 1 until monitorIds.size) { + // Order of monitors in workflow will be the same like forwarded meaning that the first monitorId will be used as second monitor chained finding + delegates.add(Delegate(i + 1, monitorIds [i], ChainedFindings(monitorIds[i - 1]))) + } + } + + return Workflow( + id = id, + name = name, + enabled = enabled, + schedule = schedule, + lastUpdateTime = lastUpdateTime, + enabledTime = enabledTime, + workflowType = WorkflowType.COMPOSITE, + user = user, + inputs = listOf(CompositeInput(Sequence(delegates))) + ) +} + +fun randomWorkflowWithDelegates( + id: String = Workflow.NO_ID, + delegates: List, + name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), + user: User? = randomUser(), + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = randomBoolean(), + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), +): Workflow { + return Workflow( + id = id, + name = name, + enabled = enabled, + schedule = schedule, + lastUpdateTime = lastUpdateTime, + enabledTime = enabledTime, + workflowType = WorkflowType.COMPOSITE, + user = user, + inputs = listOf(CompositeInput(Sequence(delegates))) + ) +} + fun randomQueryLevelTrigger( id: String = UUIDs.base64UUID(), name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), severity: String = "1", condition: Script = randomScript(), actions: List = mutableListOf(), - destinationId: String = "" + destinationId: String = "", ): QueryLevelTrigger { return QueryLevelTrigger( id = id, @@ -240,7 +301,7 @@ fun randomBucketLevelTrigger( severity: String = "1", bucketSelector: BucketSelectorExtAggregationBuilder = randomBucketSelectorExtAggregationBuilder(name = id), actions: List = mutableListOf(), - destinationId: String = "" + destinationId: String = "", ): BucketLevelTrigger { return BucketLevelTrigger( id = id, @@ -260,7 +321,7 @@ fun randomDocumentLevelTrigger( severity: String = "1", condition: Script = randomScript(), actions: List = mutableListOf(), - destinationId: String = "" + destinationId: String = "", ): DocumentLevelTrigger { return DocumentLevelTrigger( id = id, @@ -278,14 +339,14 @@ fun randomBucketSelectorExtAggregationBuilder( bucketsPathsMap: MutableMap = mutableMapOf("avg" to "10"), script: Script = randomBucketSelectorScript(params = bucketsPathsMap), parentBucketPath: String = "testPath", - filter: BucketSelectorExtFilter = BucketSelectorExtFilter(IncludeExclude("foo*", "bar*")) + filter: BucketSelectorExtFilter = BucketSelectorExtFilter(IncludeExclude("foo*", "bar*")), ): BucketSelectorExtAggregationBuilder { return BucketSelectorExtAggregationBuilder(name, bucketsPathsMap, script, parentBucketPath, filter) } fun randomBucketSelectorScript( idOrCode: String = "params.avg >= 0", - params: Map = mutableMapOf("avg" to "10") + params: Map = mutableMapOf("avg" to "10"), ): Script { return Script(Script.DEFAULT_SCRIPT_TYPE, Script.DEFAULT_SCRIPT_LANG, idOrCode, emptyMap(), params) } @@ -298,7 +359,7 @@ fun randomEmailAccount( port: Int = randomIntBetween(1, 100), method: EmailAccount.MethodType = randomEmailAccountMethod(), username: SecureString? = null, - password: SecureString? = null + password: SecureString? = null, ): EmailAccount { return EmailAccount( name = name, @@ -316,7 +377,7 @@ fun randomEmailGroup( name: String = salt + OpenSearchRestTestCase.randomAlphaOfLength(10), emails: List = (1..randomInt(10)).map { EmailEntry(email = salt + OpenSearchRestTestCase.randomAlphaOfLength(5) + "@email.com") - } + }, ): EmailGroup { return EmailGroup(name = name, emails = emails) } @@ -342,7 +403,7 @@ val TERM_DLS_QUERY = """{\"term\": { \"accessible\": true}}""" fun randomTemplateScript( source: String, - params: Map = emptyMap() + params: Map = emptyMap(), ): Script = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, source, params) fun randomAction( @@ -350,7 +411,7 @@ fun randomAction( template: Script = randomTemplateScript("Hello World"), destinationId: String = "", throttleEnabled: Boolean = false, - throttle: Throttle = randomThrottle() + throttle: Throttle = randomThrottle(), ) = Action(name, destinationId, template, template, throttleEnabled, throttle, actionExecutionPolicy = null) fun randomActionWithPolicy( @@ -359,7 +420,7 @@ fun randomActionWithPolicy( destinationId: String = "", throttleEnabled: Boolean = false, throttle: Throttle = randomThrottle(), - actionExecutionPolicy: ActionExecutionPolicy? = randomActionExecutionPolicy() + actionExecutionPolicy: ActionExecutionPolicy? = randomActionExecutionPolicy(), ): Action { return if (actionExecutionPolicy?.actionExecutionScope is PerExecutionActionScope) { // Return null for throttle when using PerExecutionActionScope since throttling is currently not supported for it @@ -371,11 +432,11 @@ fun randomActionWithPolicy( fun randomThrottle( value: Int = randomIntBetween(60, 120), - unit: ChronoUnit = ChronoUnit.MINUTES + unit: ChronoUnit = ChronoUnit.MINUTES, ) = Throttle(value, unit) fun randomActionExecutionPolicy( - actionExecutionScope: ActionExecutionScope = randomActionExecutionScope() + actionExecutionScope: ActionExecutionScope = randomActionExecutionScope(), ) = ActionExecutionPolicy(actionExecutionScope) fun randomActionExecutionScope(): ActionExecutionScope { @@ -400,7 +461,7 @@ fun randomDocLevelQuery( id: String = OpenSearchRestTestCase.randomAlphaOfLength(10), query: String = OpenSearchRestTestCase.randomAlphaOfLength(10), name: String = "${randomInt(5)}", - tags: List = mutableListOf(0..randomInt(10)).map { OpenSearchRestTestCase.randomAlphaOfLength(10) } + tags: List = mutableListOf(0..randomInt(10)).map { OpenSearchRestTestCase.randomAlphaOfLength(10) }, ): DocLevelQuery { return DocLevelQuery(id = id, query = query, name = name, tags = tags) } @@ -408,7 +469,7 @@ fun randomDocLevelQuery( fun randomDocLevelMonitorInput( description: String = OpenSearchRestTestCase.randomAlphaOfLength(randomInt(10)), indices: List = listOf(1..randomInt(10)).map { OpenSearchRestTestCase.randomAlphaOfLength(10) }, - queries: List = listOf(1..randomInt(10)).map { randomDocLevelQuery() } + queries: List = listOf(1..randomInt(10)).map { randomDocLevelQuery() }, ): DocLevelMonitorInput { return DocLevelMonitorInput(description = description, indices = indices, queries = queries) } @@ -420,7 +481,7 @@ fun randomFinding( monitorName: String = OpenSearchRestTestCase.randomAlphaOfLength(10), index: String = OpenSearchRestTestCase.randomAlphaOfLength(10), docLevelQueries: List = listOf(randomDocLevelQuery()), - timestamp: Instant = Instant.now() + timestamp: Instant = Instant.now(), ): Finding { return Finding( id = id, @@ -456,7 +517,7 @@ fun randomEmailAccountMethod(): EmailAccount.MethodType { fun randomActionExecutionResult( actionId: String = UUIDs.base64UUID(), lastExecutionTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - throttledCount: Int = randomInt() + throttledCount: Int = randomInt(), ) = ActionExecutionResult(actionId, lastExecutionTime, throttledCount) fun randomQueryLevelMonitorRunResult(): MonitorRunResult { @@ -518,7 +579,7 @@ fun randomQueryLevelTriggerRunResult(): QueryLevelTriggerRunResult { fun randomClusterMetricsInput( path: String = ClusterMetricsInput.ClusterMetricType.CLUSTER_HEALTH.defaultPath, pathParams: String = "", - url: String = "" + url: String = "", ): ClusterMetricsInput { return ClusterMetricsInput(path, pathParams, url) } @@ -617,7 +678,7 @@ fun RestClient.makeRequest( endpoint: String, params: Map = emptyMap(), entity: HttpEntity? = null, - vararg headers: Header + vararg headers: Header, ): Response { val request = Request(method, endpoint) // TODO: remove PERMISSIVE option after moving system index access to REST API call @@ -642,7 +703,7 @@ fun RestClient.makeRequest( method: String, endpoint: String, entity: HttpEntity? = null, - vararg headers: Header + vararg headers: Header, ): Response { val request = Request(method, endpoint) val options = RequestOptions.DEFAULT.toBuilder() @@ -686,3 +747,7 @@ fun assertUserNull(map: Map) { fun assertUserNull(monitor: Monitor) { assertNull("User is not null", monitor.user) } + +fun assertUserNull(workflow: Workflow) { + assertNull("User is not null", workflow.user) +} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt new file mode 100644 index 000000000..1a5d94e22 --- /dev/null +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt @@ -0,0 +1,1125 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting + +import org.opensearch.alerting.transport.WorkflowSingleNodeTestCase +import org.opensearch.commons.alerting.model.ChainedFindings +import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.Delegate +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.rest.RestRequest +import java.time.ZonedDateTime +import java.time.format.DateTimeFormatter +import java.time.temporal.ChronoUnit +import java.util.Collections + +class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { + + fun `test create workflow success`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse1 = createMonitor(monitor1)!! + val monitorResponse2 = createMonitor(monitor2)!! + + val workflow = randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + assertNotNull(workflowResponse.workflow) + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowResponse.id) + assertTrue("incorrect version", workflowResponse.version > 0) + + val workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + // Verify workflow + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowById.id) + assertTrue("incorrect version", workflowById.version > 0) + assertEquals("Workflow name not correct", workflow.name, workflowById.name) + assertEquals("Workflow owner not correct", workflow.owner, workflowById.owner) + assertEquals("Workflow input not correct", workflow.inputs, workflowById.inputs) + + // Delegate verification + @Suppress("UNCHECKED_CAST") + val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } + assertEquals("Delegates size not correct", 2, delegates.size) + + val delegate1 = delegates[0] + assertNotNull(delegate1) + assertEquals("Delegate1 order not correct", 1, delegate1.order) + assertEquals("Delegate1 id not correct", monitorResponse1.id, delegate1.monitorId) + + val delegate2 = delegates[1] + assertNotNull(delegate2) + assertEquals("Delegate2 order not correct", 2, delegate2.order) + assertEquals("Delegate2 id not correct", monitorResponse2.id, delegate2.monitorId) + assertEquals( + "Delegate2 Chained finding not correct", monitorResponse1.id, delegate2.chainedFindings!!.monitorId + ) + } + + fun `test update workflow add monitor success`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse1 = createMonitor(monitor1)!! + val monitorResponse2 = createMonitor(monitor2)!! + + val workflow = randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + assertNotNull(workflowResponse.workflow) + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowResponse.id) + assertTrue("incorrect version", workflowResponse.version > 0) + + var workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + val monitor3 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + val monitorResponse3 = createMonitor(monitor3)!! + + val updatedWorkflowResponse = upsertWorkflow( + randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id, monitorResponse3.id) + ), + workflowResponse.id, + RestRequest.Method.PUT + )!! + + assertNotNull("Workflow creation failed", updatedWorkflowResponse) + assertNotNull(updatedWorkflowResponse.workflow) + assertEquals("Workflow id changed", workflowResponse.id, updatedWorkflowResponse.id) + assertTrue("incorrect version", updatedWorkflowResponse.version > 0) + + workflowById = searchWorkflow(updatedWorkflowResponse.id)!! + + // Verify workflow + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowById.id) + assertTrue("incorrect version", workflowById.version > 0) + assertEquals("Workflow name not correct", updatedWorkflowResponse.workflow.name, workflowById.name) + assertEquals("Workflow owner not correct", updatedWorkflowResponse.workflow.owner, workflowById.owner) + assertEquals("Workflow input not correct", updatedWorkflowResponse.workflow.inputs, workflowById.inputs) + + // Delegate verification + @Suppress("UNCHECKED_CAST") + val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } + assertEquals("Delegates size not correct", 3, delegates.size) + + val delegate1 = delegates[0] + assertNotNull(delegate1) + assertEquals("Delegate1 order not correct", 1, delegate1.order) + assertEquals("Delegate1 id not correct", monitorResponse1.id, delegate1.monitorId) + + val delegate2 = delegates[1] + assertNotNull(delegate2) + assertEquals("Delegate2 order not correct", 2, delegate2.order) + assertEquals("Delegate2 id not correct", monitorResponse2.id, delegate2.monitorId) + assertEquals( + "Delegate2 Chained finding not correct", monitorResponse1.id, delegate2.chainedFindings!!.monitorId + ) + + val delegate3 = delegates[2] + assertNotNull(delegate3) + assertEquals("Delegate3 order not correct", 3, delegate3.order) + assertEquals("Delegate3 id not correct", monitorResponse3.id, delegate3.monitorId) + assertEquals( + "Delegate3 Chained finding not correct", monitorResponse2.id, delegate3.chainedFindings!!.monitorId + ) + } + + fun `test update workflow change order of delegate monitors`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse1 = createMonitor(monitor1)!! + val monitorResponse2 = createMonitor(monitor2)!! + + val workflow = randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + assertNotNull(workflowResponse.workflow) + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowResponse.id) + assertTrue("incorrect version", workflowResponse.version > 0) + + var workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + val updatedWorkflowResponse = upsertWorkflow( + randomWorkflow( + monitorIds = listOf(monitorResponse2.id, monitorResponse1.id) + ), + workflowResponse.id, + RestRequest.Method.PUT + )!! + + assertNotNull("Workflow creation failed", updatedWorkflowResponse) + assertNotNull(updatedWorkflowResponse.workflow) + assertEquals("Workflow id changed", workflowResponse.id, updatedWorkflowResponse.id) + assertTrue("incorrect version", updatedWorkflowResponse.version > 0) + + workflowById = searchWorkflow(updatedWorkflowResponse.id)!! + + // Verify workflow + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowById.id) + assertTrue("incorrect version", workflowById.version > 0) + assertEquals("Workflow name not correct", updatedWorkflowResponse.workflow.name, workflowById.name) + assertEquals("Workflow owner not correct", updatedWorkflowResponse.workflow.owner, workflowById.owner) + assertEquals("Workflow input not correct", updatedWorkflowResponse.workflow.inputs, workflowById.inputs) + + // Delegate verification + @Suppress("UNCHECKED_CAST") + val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } + assertEquals("Delegates size not correct", 2, delegates.size) + + val delegate1 = delegates[0] + assertNotNull(delegate1) + assertEquals("Delegate1 order not correct", 1, delegate1.order) + assertEquals("Delegate1 id not correct", monitorResponse2.id, delegate1.monitorId) + + val delegate2 = delegates[1] + assertNotNull(delegate2) + assertEquals("Delegate2 order not correct", 2, delegate2.order) + assertEquals("Delegate2 id not correct", monitorResponse1.id, delegate2.monitorId) + assertEquals( + "Delegate2 Chained finding not correct", monitorResponse2.id, delegate2.chainedFindings!!.monitorId + ) + } + + fun `test update workflow remove monitor success`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse1 = createMonitor(monitor1)!! + val monitorResponse2 = createMonitor(monitor2)!! + + val workflow = randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + assertNotNull(workflowResponse.workflow) + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowResponse.id) + assertTrue("incorrect version", workflowResponse.version > 0) + + var workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + val updatedWorkflowResponse = upsertWorkflow( + randomWorkflow( + monitorIds = listOf(monitorResponse1.id) + ), + workflowResponse.id, + RestRequest.Method.PUT + )!! + + assertNotNull("Workflow creation failed", updatedWorkflowResponse) + assertNotNull(updatedWorkflowResponse.workflow) + assertEquals("Workflow id changed", workflowResponse.id, updatedWorkflowResponse.id) + assertTrue("incorrect version", updatedWorkflowResponse.version > 0) + + workflowById = searchWorkflow(updatedWorkflowResponse.id)!! + + // Verify workflow + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowById.id) + assertTrue("incorrect version", workflowById.version > 0) + assertEquals("Workflow name not correct", updatedWorkflowResponse.workflow.name, workflowById.name) + assertEquals("Workflow owner not correct", updatedWorkflowResponse.workflow.owner, workflowById.owner) + assertEquals("Workflow input not correct", updatedWorkflowResponse.workflow.inputs, workflowById.inputs) + + // Delegate verification + @Suppress("UNCHECKED_CAST") + val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } + assertEquals("Delegates size not correct", 1, delegates.size) + + val delegate1 = delegates[0] + assertNotNull(delegate1) + assertEquals("Delegate1 order not correct", 1, delegate1.order) + assertEquals("Delegate1 id not correct", monitorResponse1.id, delegate1.monitorId) + } + + fun `test update workflow doesn't exist failure`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse1 = createMonitor(monitor1)!! + + val workflow = randomWorkflow( + monitorIds = listOf(monitorResponse1.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + try { + upsertWorkflow(workflow, "testId", RestRequest.Method.PUT) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow with testId is not found") + ) + } + } + } + + fun `test get workflow`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) + + val monitorResponse = createMonitor(monitor)!! + + val workflowRequest = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + + val workflowResponse = upsertWorkflow(workflowRequest)!! + assertNotNull("Workflow creation failed", workflowResponse) + assertNotNull(workflowResponse.workflow) + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowResponse.id) + assertTrue("incorrect version", workflowResponse.version > 0) + + val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) + assertNotNull(getWorkflowResponse) + + val workflowById = getWorkflowResponse.workflow!! + // Verify workflow + assertNotEquals("response is missing Id", Monitor.NO_ID, getWorkflowResponse.id) + assertTrue("incorrect version", getWorkflowResponse.version > 0) + assertEquals("Workflow name not correct", workflowRequest.name, workflowById.name) + assertEquals("Workflow owner not correct", workflowRequest.owner, workflowById.owner) + assertEquals("Workflow input not correct", workflowRequest.inputs, workflowById.inputs) + + // Delegate verification + @Suppress("UNCHECKED_CAST") + val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } + assertEquals("Delegates size not correct", 1, delegates.size) + + val delegate = delegates[0] + assertNotNull(delegate) + assertEquals("Delegate order not correct", 1, delegate.order) + assertEquals("Delegate id not correct", monitorResponse.id, delegate.monitorId) + } + + fun `test get workflow for invalid id monitor index doesn't exist`() { + // Get workflow for non existing workflow id + try { + getWorkflowById(id = "-1") + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found") + ) + } + } + } + + fun `test get workflow for invalid id monitor index exists`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) + createMonitor(monitor) + // Get workflow for non existing workflow id + try { + getWorkflowById(id = "-1") + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found") + ) + } + } + } + + fun `test delete workflow delegate monitor deleted`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitorResponse = createMonitor(monitor)!! + + val workflowRequest = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflowRequest)!! + val workflowId = workflowResponse.id + val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) + + assertNotNull(getWorkflowResponse) + assertEquals(workflowId, getWorkflowResponse.id) + + deleteWorkflow(workflowId, true) + // Verify that the workflow is deleted + try { + getWorkflowById(workflowId) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + // Verify that the monitor is deleted + try { + getMonitorResponse(monitorResponse.id) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetMonitor Action error ", + it.contains("Monitor not found") + ) + } + } + } + + fun `test delete executed workflow with metadata deleted`() { + val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3") + val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) + val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1) + ) + val monitorResponse = createMonitor(monitor1)!! + + val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4") + val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) + val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + ) + + val monitorResponse2 = createMonitor(monitor2)!! + + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id) + assertNotNull(workflowById) + + var testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Matches monitor1 + val testDoc1 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16644, + "test_strict_date_time" : "$testTime", + "test_field_1" : "us-west-2" + }""" + indexDoc(index, "1", testDoc1) + + val workflowId = workflowResponse.id + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! + val monitorsRunResults = executeWorkflowResponse.workflowRunResult.workflowRunResult + assertEquals(2, monitorsRunResults.size) + + deleteWorkflow(workflowId, true) + // Verify that the workflow is deleted + try { + getWorkflowById(workflowId) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + // Verify that the workflow metadata is deleted + try { + searchWorkflowMetadata(workflowId) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetMonitor Action error ", + it.contains("List is empty") + ) + } + } + } + + fun `test delete workflow delegate monitor not deleted`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + var monitorResponse = createMonitor(monitor)!! + + val workflowRequest = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflowRequest)!! + val workflowId = workflowResponse.id + val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) + + assertNotNull(getWorkflowResponse) + assertEquals(workflowId, getWorkflowResponse.id) + + val workflowRequest2 = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse2 = upsertWorkflow(workflowRequest2)!! + val workflowId2 = workflowResponse2.id + val getWorkflowResponse2 = getWorkflowById(id = workflowResponse2.id) + + assertNotNull(getWorkflowResponse2) + assertEquals(workflowId2, getWorkflowResponse2.id) + + deleteWorkflow(workflowId, true) + // Verify that the workflow is deleted + try { + getWorkflowById(workflowId) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + val existingMonitor = getMonitorResponse(monitorResponse.id) + assertNotNull(existingMonitor) + } + + fun `test trying to delete monitor that is part of workflow sequence`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitorResponse = createMonitor(monitor)!! + + val workflowRequest = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + + val workflowResponse = upsertWorkflow(workflowRequest)!! + val workflowId = workflowResponse.id + val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) + + assertNotNull(getWorkflowResponse) + assertEquals(workflowId, getWorkflowResponse.id) + + // Verify that the monitor can't be deleted because it's included in the workflow + try { + deleteMonitor(monitorResponse.id) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning DeleteMonitor Action error ", + it.contains("Not allowed to delete this monitor!") + ) + } + } + } + + fun `test delete workflow for invalid id monitor index doesn't exists`() { + // Try deleting non-existing workflow + try { + deleteWorkflow("-1") + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning DeleteWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + } + + fun `test delete workflow for invalid id monitor index exists`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) + createMonitor(monitor) + // Try deleting non-existing workflow + try { + deleteWorkflow("-1") + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning DeleteWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + } + + fun `test create workflow without delegate failure`() { + val workflow = randomWorkflow( + monitorIds = Collections.emptyList() + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Delegates list can not be empty.") + ) + } + } + } + + fun `test update workflow without delegate failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) + + val monitorResponse1 = createMonitor(monitor1)!! + val monitorResponse2 = createMonitor(monitor2)!! + + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + workflow = randomWorkflow( + id = workflowResponse.id, + monitorIds = Collections.emptyList() + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Delegates list can not be empty.") + ) + } + } + } + + fun `test create workflow duplicate delegate failure`() { + val workflow = randomWorkflow( + monitorIds = listOf("1", "1", "2") + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Duplicate delegates not allowed") + ) + } + } + } + + fun `test update workflow duplicate delegate failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitorResponse = createMonitor(monitor)!! + + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + workflow = randomWorkflow( + id = workflowResponse.id, + monitorIds = listOf("1", "1", "2") + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Duplicate delegates not allowed") + ) + } + } + } + + fun `test create workflow delegate monitor doesn't exist failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val monitorResponse = createMonitor(monitor)!! + + val workflow = randomWorkflow( + monitorIds = listOf("-1", monitorResponse.id) + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("are not valid monitor ids") + ) + } + } + } + + fun `test update workflow delegate monitor doesn't exist failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val monitorResponse = createMonitor(monitor)!! + + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + workflow = randomWorkflow( + id = workflowResponse.id, + monitorIds = listOf("-1", monitorResponse.id) + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("are not valid monitor ids") + ) + } + } + } + + fun `test create workflow sequence order not correct failure`() { + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(1, "monitor-2"), + Delegate(2, "monitor-3") + ) + val workflow = randomWorkflowWithDelegates( + delegates = delegates + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values") + ) + } + } + } + + fun `test update workflow sequence order not correct failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val monitorResponse = createMonitor(monitor)!! + + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(1, "monitor-2"), + Delegate(2, "monitor-3") + ) + workflow = randomWorkflowWithDelegates( + id = workflowResponse.id, + delegates = delegates + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values") + ) + } + } + } + + fun `test create workflow chained findings monitor not in sequence failure`() { + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(2, "monitor-2", ChainedFindings("monitor-1")), + Delegate(3, "monitor-3", ChainedFindings("monitor-x")) + ) + val workflow = randomWorkflowWithDelegates( + delegates = delegates + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence") + ) + } + } + } + + fun `test create workflow query monitor chained findings monitor failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val docMonitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val docMonitorResponse = createMonitor(docMonitor)!! + + val queryMonitor = randomQueryLevelMonitor() + val queryMonitorResponse = createMonitor(queryMonitor)!! + + var workflow = randomWorkflow( + monitorIds = listOf(queryMonitorResponse.id, docMonitorResponse.id) + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Query level monitor can't be part of chained findings") + ) + } + } + } + + fun `test create workflow when monitor index not initialized failure`() { + val delegates = listOf( + Delegate(1, "monitor-1") + ) + val workflow = randomWorkflowWithDelegates( + delegates = delegates + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Monitors not found") + ) + } + } + } + + fun `test update workflow chained findings monitor not in sequence failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val monitorResponse = createMonitor(monitor)!! + + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(2, "monitor-2", ChainedFindings("monitor-1")), + Delegate(3, "monitor-3", ChainedFindings("monitor-x")) + ) + workflow = randomWorkflowWithDelegates( + id = workflowResponse.id, + delegates = delegates + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence") + ) + } + } + } + + fun `test create workflow chained findings order not correct failure`() { + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(3, "monitor-2", ChainedFindings("monitor-1")), + Delegate(2, "monitor-3", ChainedFindings("monitor-2")) + ) + val workflow = randomWorkflowWithDelegates( + delegates = delegates + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3") + ) + } + } + } + + fun `test update workflow chained findings order not correct failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val monitorResponse = createMonitor(monitor)!! + + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(3, "monitor-2", ChainedFindings("monitor-1")), + Delegate(2, "monitor-3", ChainedFindings("monitor-2")) + ) + workflow = randomWorkflowWithDelegates( + delegates = delegates + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3") + ) + } + } + } +} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt new file mode 100644 index 000000000..466e62d35 --- /dev/null +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt @@ -0,0 +1,638 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting + +import org.junit.Assert +import org.opensearch.action.support.WriteRequest +import org.opensearch.alerting.model.DocumentLevelTriggerRunResult +import org.opensearch.alerting.transport.WorkflowSingleNodeTestCase +import org.opensearch.alerting.util.AlertingException +import org.opensearch.commons.alerting.action.AcknowledgeAlertRequest +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetAlertsRequest +import org.opensearch.commons.alerting.action.IndexMonitorResponse +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder +import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.model.Table +import org.opensearch.index.query.QueryBuilders +import org.opensearch.rest.RestStatus +import org.opensearch.script.Script +import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder +import org.opensearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder +import org.opensearch.search.builder.SearchSourceBuilder +import java.lang.Exception +import java.time.ZonedDateTime +import java.time.format.DateTimeFormatter +import java.time.temporal.ChronoUnit +import java.util.NoSuchElementException +import java.util.concurrent.ExecutionException + +class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { + + fun `test execute workflow with custom alerts and finding index with doc level delegates`() { + val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3") + val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) + val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customAlertsIndex1 = "custom_alerts_index" + val customFindingsIndex1 = "custom_findings_index" + val customFindingsIndexPattern1 = "custom_findings_index-1" + var monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + dataSources = DataSources( + alertsIndex = customAlertsIndex1, + findingsIndex = customFindingsIndex1, + findingsIndexPattern = customFindingsIndexPattern1 + ) + ) + val monitorResponse = createMonitor(monitor1)!! + + val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4") + val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) + val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customAlertsIndex2 = "custom_alerts_index_2" + val customFindingsIndex2 = "custom_findings_index_2" + val customFindingsIndexPattern2 = "custom_findings_index-2" + var monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + dataSources = DataSources( + alertsIndex = customAlertsIndex2, + findingsIndex = customFindingsIndex2, + findingsIndexPattern = customFindingsIndexPattern2 + ) + ) + + val monitorResponse2 = createMonitor(monitor2)!! + + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id) + assertNotNull(workflowById) + + var testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Matches monitor1 + val testDoc1 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16644, + "test_strict_date_time" : "$testTime", + "test_field_1" : "us-west-2" + }""" + indexDoc(index, "1", testDoc1) + + testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Matches monitor1 and monitor2 + val testDoc2 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16645, + "test_strict_date_time" : "$testTime", + "test_field_1" : "us-west-2" + }""" + indexDoc(index, "2", testDoc2) + + testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Doesn't match + val testDoc3 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16645, + "test_strict_date_time" : "$testTime", + "test_field_1" : "us-east-1" + }""" + indexDoc(index, "3", testDoc3) + + val workflowId = workflowResponse.id + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! + val monitorsRunResults = executeWorkflowResponse.workflowRunResult.workflowRunResult + assertEquals(2, monitorsRunResults.size) + + assertEquals(monitor1.name, monitorsRunResults[0].monitorName) + assertEquals(1, monitorsRunResults[0].triggerResults.size) + + Assert.assertEquals(monitor2.name, monitorsRunResults[1].monitorName) + Assert.assertEquals(1, monitorsRunResults[1].triggerResults.size) + + assertAlerts(monitorResponse, customAlertsIndex1, 2) + assertFindings(monitorResponse.id, customFindingsIndex1, 2, 2, listOf("1", "2")) + + assertAlerts(monitorResponse2, customAlertsIndex2, 1) + assertFindings(monitorResponse2.id, customFindingsIndex2, 1, 1, listOf("2")) + } + + fun `test execute workflow verify workflow metadata`() { + val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3") + val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) + val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1) + ) + val monitorResponse = createMonitor(monitor1)!! + + val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4") + val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) + val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + ) + + val monitorResponse2 = createMonitor(monitor2)!! + + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id) + assertNotNull(workflowById) + + var testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Matches monitor1 + val testDoc1 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16644, + "test_strict_date_time" : "$testTime", + "test_field_1" : "us-west-2" + }""" + indexDoc(index, "1", testDoc1) + // First execution + val workflowId = workflowResponse.id + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! + val monitorsRunResults = executeWorkflowResponse.workflowRunResult.workflowRunResult + assertEquals(2, monitorsRunResults.size) + + val workflowMetadata = searchWorkflowMetadata(id = workflowId) + assertNotNull("Workflow metadata not initialized", workflowMetadata) + assertEquals( + "Workflow metadata execution id not correct", + executeWorkflowResponse.workflowRunResult.executionId, + workflowMetadata!!.latestExecutionId + ) + // Second execution + val executeWorkflowResponse1 = executeWorkflow(workflowById, workflowId, false)!! + val monitorsRunResults1 = executeWorkflowResponse1.workflowRunResult.workflowRunResult + assertEquals(2, monitorsRunResults1.size) + + val workflowMetadata1 = searchWorkflowMetadata(id = workflowId) + assertNotNull("Workflow metadata not initialized", workflowMetadata) + assertEquals( + "Workflow metadata execution id not correct", + executeWorkflowResponse1.workflowRunResult.executionId, + workflowMetadata1!!.latestExecutionId + ) + } + + fun `test execute workflow dryrun verify workflow metadata not created`() { + val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3") + val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) + val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1) + ) + val monitorResponse = createMonitor(monitor1)!! + + val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4") + val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) + val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + ) + + val monitorResponse2 = createMonitor(monitor2)!! + + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id) + assertNotNull(workflowById) + + var testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Matches monitor1 + val testDoc1 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16644, + "test_strict_date_time" : "$testTime", + "test_field_1" : "us-west-2" + }""" + indexDoc(index, "1", testDoc1) + // First execution + val workflowId = workflowResponse.id + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, true) + assertNotNull("Workflow run result is null", executeWorkflowResponse) + val monitorsRunResults = executeWorkflowResponse!!.workflowRunResult.workflowRunResult + assertEquals(2, monitorsRunResults.size) + + var exception: Exception? = null + try { + searchWorkflowMetadata(id = workflowId) + } catch (ex: Exception) { + exception = ex + } + assertTrue(exception is NoSuchElementException) + } + + fun `test execute workflow with custom alerts and finding index with bucket level doc level delegates when bucket level delegate is used in chained finding`() { + val query = QueryBuilders.rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = listOf( + TermsValuesSourceBuilder("test_field_1").field("test_field_1") + ) + val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) + val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) + // Bucket level monitor will reduce the size of matched doc ids on those that belong to a bucket that contains more than 1 document after term grouping + val triggerScript = """ + params.docCount > 1 + """.trimIndent() + + var trigger = randomBucketLevelTrigger() + trigger = trigger.copy( + bucketSelector = BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ) + ) + val bucketCustomAlertsIndex = "custom_alerts_index" + val bucketCustomFindingsIndex = "custom_findings_index" + val bucketCustomFindingsIndexPattern = "custom_findings_index-1" + + val bucketLevelMonitorResponse = createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = DataSources( + findingsEnabled = true, + alertsIndex = bucketCustomAlertsIndex, + findingsIndex = bucketCustomFindingsIndex, + findingsIndexPattern = bucketCustomFindingsIndexPattern + ) + ) + )!! + + val docQuery1 = DocLevelQuery(query = "test_field_1:\"test_value_2\"", name = "1") + val docQuery2 = DocLevelQuery(query = "test_field_1:\"test_value_1\"", name = "2") + val docQuery3 = DocLevelQuery(query = "test_field_1:\"test_value_3\"", name = "3") + val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1, docQuery2, docQuery3)) + val docTrigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val docCustomAlertsIndex = "custom_alerts_index" + val docCustomFindingsIndex = "custom_findings_index" + val docCustomFindingsIndexPattern = "custom_findings_index-1" + var docLevelMonitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(docTrigger), + dataSources = DataSources( + alertsIndex = docCustomAlertsIndex, + findingsIndex = docCustomFindingsIndex, + findingsIndexPattern = docCustomFindingsIndexPattern + ) + ) + + val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! + // 1. bucketMonitor (chainedFinding = null) 2. docMonitor (chainedFinding = bucketMonitor) + var workflow = randomWorkflow( + monitorIds = listOf(bucketLevelMonitorResponse.id, docLevelMonitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id) + assertNotNull(workflowById) + + // Creates 5 documents + insertSampleTimeSerializedData( + index, + listOf( + "test_value_1", + "test_value_1", // adding duplicate to verify aggregation + "test_value_2", + "test_value_2", + "test_value_3" + ) + ) + + val workflowId = workflowResponse.id + // 1. bucket level monitor should reduce the doc findings to 4 (1, 2, 3, 4) + // 2. Doc level monitor will match those 4 documents although it contains rules for matching all 5 documents (docQuery3 matches the fifth) + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! + assertNotNull(executeWorkflowResponse) + + for (monitorRunResults in executeWorkflowResponse.workflowRunResult.workflowRunResult) { + if (bucketLevelMonitorResponse.monitor.name == monitorRunResults.monitorName) { + val searchResult = monitorRunResults.inputResults.results.first() + @Suppress("UNCHECKED_CAST") + val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> + assertEquals("Incorrect search result", 3, buckets.size) + + assertAlerts(bucketLevelMonitorResponse, bucketCustomAlertsIndex, 2) + assertFindings(bucketLevelMonitorResponse.id, bucketCustomFindingsIndex, 1, 4, listOf("1", "2", "3", "4")) + } else { + assertEquals(1, monitorRunResults.inputResults.results.size) + val values = monitorRunResults.triggerResults.values + assertEquals(1, values.size) + @Suppress("UNCHECKED_CAST") + val docLevelTrigger = values.iterator().next() as DocumentLevelTriggerRunResult + val triggeredDocIds = docLevelTrigger.triggeredDocs.map { it.split("|")[0] } + val expectedTriggeredDocIds = listOf("1", "2", "3", "4") + assertEquals(expectedTriggeredDocIds, triggeredDocIds.sorted()) + + assertAlerts(docLevelMonitorResponse, docCustomAlertsIndex, 4) + assertFindings(docLevelMonitorResponse.id, docCustomFindingsIndex, 4, 4, listOf("1", "2", "3", "4")) + } + } + } + + fun `test execute workflow with custom alerts and finding index with bucket level and doc level delegates when doc level delegate is used in chained finding`() { + val docQuery1 = DocLevelQuery(query = "test_field_1:\"test_value_2\"", name = "1") + val docQuery2 = DocLevelQuery(query = "test_field_1:\"test_value_3\"", name = "2") + + var docLevelMonitor = randomDocumentLevelMonitor( + inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery1, docQuery2))), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + dataSources = DataSources( + alertsIndex = "custom_alerts_index", + findingsIndex = "custom_findings_index", + findingsIndexPattern = "custom_findings_index-1" + ) + ) + + val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! + + val query = QueryBuilders.rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = listOf( + TermsValuesSourceBuilder("test_field_1").field("test_field_1") + ) + val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) + val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) + // Bucket level monitor will reduce the size of matched doc ids on those that belong to a bucket that contains more than 1 document after term grouping + val triggerScript = """ + params.docCount > 1 + """.trimIndent() + + var trigger = randomBucketLevelTrigger() + trigger = trigger.copy( + bucketSelector = BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ) + ) + + val bucketLevelMonitorResponse = createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = DataSources( + findingsEnabled = true, + alertsIndex = "custom_alerts_index", + findingsIndex = "custom_findings_index", + findingsIndexPattern = "custom_findings_index-1" + ) + ) + )!! + + var docLevelMonitor1 = randomDocumentLevelMonitor( + // Match the documents with test_field_1: test_value_3 + inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery2))), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + dataSources = DataSources( + findingsEnabled = true, + alertsIndex = "custom_alerts_index_1", + findingsIndex = "custom_findings_index_1", + findingsIndexPattern = "custom_findings_index_1-1" + ) + ) + + val docLevelMonitorResponse1 = createMonitor(docLevelMonitor1)!! + + val queryMonitorInput = SearchInput( + indices = listOf(index), + query = SearchSourceBuilder().query( + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + ) + ) + val queryTriggerScript = """ + return ctx.results[0].hits.hits.size() > 0 + """.trimIndent() + + val queryLevelTrigger = randomQueryLevelTrigger(condition = Script(queryTriggerScript)) + val queryMonitorResponse = createMonitor(randomQueryLevelMonitor(inputs = listOf(queryMonitorInput), triggers = listOf(queryLevelTrigger)))!! + + // 1. docMonitor (chainedFinding = null) 2. bucketMonitor (chainedFinding = docMonitor) 3. docMonitor (chainedFinding = bucketMonitor) 4. queryMonitor (chainedFinding = docMonitor 3) + var workflow = randomWorkflow( + monitorIds = listOf(docLevelMonitorResponse.id, bucketLevelMonitorResponse.id, docLevelMonitorResponse1.id, queryMonitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id) + assertNotNull(workflowById) + + // Creates 5 documents + insertSampleTimeSerializedData( + index, + listOf( + "test_value_1", + "test_value_1", // adding duplicate to verify aggregation + "test_value_2", + "test_value_2", + "test_value_3", + "test_value_3" + ) + ) + + val workflowId = workflowResponse.id + // 1. Doc level monitor should reduce the doc findings to 4 (3 - test_value_2, 4 - test_value_2, 5 - test_value_3, 6 - test_value_3) + // 2. Bucket level monitor will match the fetch the docs from current findings execution, although it contains rules for matching documents which has test_value_2 and test value_3 + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! + assertNotNull(executeWorkflowResponse) + + for (monitorRunResults in executeWorkflowResponse.workflowRunResult.workflowRunResult) { + when (monitorRunResults.monitorName) { + // Verify first doc level monitor execution, alerts and findings + docLevelMonitorResponse.monitor.name -> { + assertEquals(1, monitorRunResults.inputResults.results.size) + val values = monitorRunResults.triggerResults.values + assertEquals(1, values.size) + @Suppress("UNCHECKED_CAST") + val docLevelTrigger = values.iterator().next() as DocumentLevelTriggerRunResult + val triggeredDocIds = docLevelTrigger.triggeredDocs.map { it.split("|")[0] } + val expectedTriggeredDocIds = listOf("3", "4", "5", "6") + assertEquals(expectedTriggeredDocIds, triggeredDocIds.sorted()) + + assertAlerts(docLevelMonitorResponse, docLevelMonitorResponse.monitor.dataSources.alertsIndex, 4) + assertFindings(docLevelMonitorResponse.id, docLevelMonitorResponse.monitor.dataSources.findingsIndex, 4, 4, listOf("3", "4", "5", "6")) + } + // Verify second bucket level monitor execution, alerts and findings + bucketLevelMonitorResponse.monitor.name -> { + val searchResult = monitorRunResults.inputResults.results.first() + @Suppress("UNCHECKED_CAST") + val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> + assertEquals("Incorrect search result", 2, buckets.size) + + assertAlerts(bucketLevelMonitorResponse, bucketLevelMonitorResponse.monitor.dataSources.alertsIndex, 2) + assertFindings(bucketLevelMonitorResponse.id, bucketLevelMonitorResponse.monitor.dataSources.findingsIndex, 1, 4, listOf("3", "4", "5", "6")) + } + // Verify third doc level monitor execution, alerts and findings + docLevelMonitorResponse1.monitor.name -> { + assertEquals(1, monitorRunResults.inputResults.results.size) + val values = monitorRunResults.triggerResults.values + assertEquals(1, values.size) + @Suppress("UNCHECKED_CAST") + val docLevelTrigger = values.iterator().next() as DocumentLevelTriggerRunResult + val triggeredDocIds = docLevelTrigger.triggeredDocs.map { it.split("|")[0] } + val expectedTriggeredDocIds = listOf("5", "6") + assertEquals(expectedTriggeredDocIds, triggeredDocIds.sorted()) + + assertAlerts(docLevelMonitorResponse1, docLevelMonitorResponse1.monitor.dataSources.alertsIndex, 2) + assertFindings(docLevelMonitorResponse1.id, docLevelMonitorResponse1.monitor.dataSources.findingsIndex, 2, 2, listOf("5", "6")) + } + // Verify fourth query level monitor execution + queryMonitorResponse.monitor.name -> { + assertEquals(1, monitorRunResults.inputResults.results.size) + val values = monitorRunResults.triggerResults.values + assertEquals(1, values.size) + @Suppress("UNCHECKED_CAST") + val totalHits = ((monitorRunResults.inputResults.results[0]["hits"] as Map)["total"] as Map) ["value"] + assertEquals(2, totalHits) + @Suppress("UNCHECKED_CAST") + val docIds = ((monitorRunResults.inputResults.results[0]["hits"] as Map)["hits"] as List>).map { it["_id"]!! } + assertEquals(listOf("5", "6"), docIds.sorted()) + } + } + } + } + + fun `test execute workflow input error`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitorResponse = createMonitor(monitor)!! + var workflow = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id) + assertNotNull(workflowById) + + deleteIndex(index) + + val response = executeWorkflow(workflowById, workflowById!!.id, false)!! + + assertNotNull(response.workflowRunResult.error) + assertTrue(response.workflowRunResult.error is AlertingException) + assertEquals(RestStatus.NOT_FOUND, (response.workflowRunResult.error as AlertingException).status) + assertEquals("Configured indices are not found: [$index]", (response.workflowRunResult.error as AlertingException).message) + } + + fun `test execute workflow wrong workflow id`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitorResponse = createMonitor(monitor)!! + + val workflowRequest = randomWorkflow( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflowRequest)!! + val workflowId = workflowResponse.id + val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) + + assertNotNull(getWorkflowResponse) + assertEquals(workflowId, getWorkflowResponse.id) + + var exception: Exception? = null + val badWorkflowId = getWorkflowResponse.id + "bad" + try { + executeWorkflow(id = badWorkflowId) + } catch (ex: Exception) { + exception = ex + } + assertTrue(exception is ExecutionException) + assertTrue(exception!!.cause is AlertingException) + assertEquals(RestStatus.NOT_FOUND, (exception.cause as AlertingException).status) + assertEquals("Can't find workflow with id: $badWorkflowId", exception.cause!!.message) + } + + private fun assertFindings( + monitorId: String, + customFindingsIndex: String, + findingSize: Int, + matchedQueryNumber: Int, + relatedDocIds: List + ) { + val findings = searchFindings(monitorId, customFindingsIndex) + assertEquals("Findings saved for test monitor", findingSize, findings.size) + + val findingDocIds = findings.flatMap { it.relatedDocIds } + + assertEquals("Didn't match $matchedQueryNumber query", matchedQueryNumber, findingDocIds.size) + assertTrue("Findings saved for test monitor", relatedDocIds.containsAll(findingDocIds)) + } + + private fun assertAlerts( + monitorResponse: IndexMonitorResponse, + customAlertsIndex: String, + alertSize: Int + ) { + val monitorId = monitorResponse.id + val alerts = searchAlerts(monitorId, customAlertsIndex) + assertEquals("Alert saved for test monitor", alertSize, alerts.size) + val table = Table("asc", "id", null, alertSize, 0, "") + var getAlertsResponse = client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex) + ) + .get() + assertTrue(getAlertsResponse != null) + assertTrue(getAlertsResponse.alerts.size == alertSize) + getAlertsResponse = client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, null)) + .get() + assertTrue(getAlertsResponse != null) + assertTrue(getAlertsResponse.alerts.size == alertSize) + + val alertIds = getAlertsResponse.alerts.map { it.id } + val acknowledgeAlertResponse = client().execute( + AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_TYPE, + AcknowledgeAlertRequest(monitorId, alertIds, WriteRequest.RefreshPolicy.IMMEDIATE) + ).get() + + assertEquals(alertSize, acknowledgeAlertResponse.acknowledged.size) + } +} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt index 61e788a32..2b763a464 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt @@ -6,6 +6,7 @@ package org.opensearch.alerting.transport import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope +import org.opensearch.action.admin.indices.delete.DeleteIndexRequest import org.opensearch.action.admin.indices.get.GetIndexRequestBuilder import org.opensearch.action.admin.indices.get.GetIndexResponse import org.opensearch.action.admin.indices.refresh.RefreshAction @@ -20,9 +21,13 @@ import org.opensearch.alerting.action.GetMonitorRequest import org.opensearch.alerting.alerts.AlertIndices import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue +import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentType import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteMonitorRequest +import org.opensearch.commons.alerting.action.DeleteMonitorResponse import org.opensearch.commons.alerting.action.GetFindingsRequest import org.opensearch.commons.alerting.action.GetFindingsResponse import org.opensearch.commons.alerting.action.IndexMonitorRequest @@ -31,15 +36,21 @@ import org.opensearch.commons.alerting.model.Alert import org.opensearch.commons.alerting.model.Finding import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.Table +import org.opensearch.index.IndexService import org.opensearch.index.query.TermQueryBuilder import org.opensearch.index.reindex.ReindexPlugin import org.opensearch.index.seqno.SequenceNumbers +import org.opensearch.painless.PainlessPlugin import org.opensearch.plugins.Plugin import org.opensearch.rest.RestRequest +import org.opensearch.script.mustache.MustachePlugin import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchSingleNodeTestCase import java.time.Instant +import java.time.ZonedDateTime +import java.time.format.DateTimeFormatter +import java.time.temporal.ChronoUnit import java.util.* /** @@ -70,19 +81,60 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { return client().execute(ExecuteMonitorAction.INSTANCE, request).get() } + protected fun insertSampleTimeSerializedData(index: String, data: List) { + data.forEachIndexed { i, value -> + val twoMinsAgo = ZonedDateTime.now().minus(2, ChronoUnit.MINUTES).truncatedTo(ChronoUnit.MILLIS) + val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(twoMinsAgo) + val testDoc = """ + { + "test_strict_date_time": "$testTime", + "test_field_1": "$value", + "number": "$i" + } + """.trimIndent() + // Indexing documents with deterministic doc id to allow for easy selected deletion during testing + indexDoc(index, (i + 1).toString(), testDoc) + } + } + + @Suppress("UNCHECKED_CAST") + fun Map.stringMap(key: String): Map? { + val map = this as Map> + return map[key] + } + /** A test index that can be used across tests. Feel free to add new fields but don't remove any. */ protected fun createTestIndex() { + val mapping = XContentFactory.jsonBuilder() + mapping.startObject() + .startObject("properties") + .startObject("test_strict_date_time") + .field("type", "date") + .field("format", "strict_date_time") + .endObject() + .startObject("test_field_1") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + createIndex( - index, Settings.EMPTY, - """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" } - } - """.trimIndent() + index, Settings.EMPTY, mapping ) } + private fun createIndex( + index: String?, + settings: Settings?, + mappings: XContentBuilder?, + ): IndexService? { + val createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings) + if (mappings != null) { + createIndexRequestBuilder.setMapping(mappings) + } + return this.createIndex(index, createIndexRequestBuilder) + } + protected fun indexDoc(index: String, id: String, doc: String) { client().prepareIndex(index).setId(id) .setSource(doc, XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get() @@ -137,7 +189,7 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { protected fun searchFindings( id: String, indices: String = AlertIndices.ALL_FINDING_INDEX_PATTERN, - refresh: Boolean = true + refresh: Boolean = true, ): List { if (refresh) refreshIndex(indices) @@ -172,14 +224,23 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { protected fun getMonitorResponse( monitorId: String, version: Long = 1L, - fetchSourceContext: FetchSourceContext = FetchSourceContext.FETCH_SOURCE + fetchSourceContext: FetchSourceContext = FetchSourceContext.FETCH_SOURCE, ) = client().execute( GetMonitorAction.INSTANCE, GetMonitorRequest(monitorId, version, RestRequest.Method.GET, fetchSourceContext) ).get() + protected fun deleteMonitor(monitorId: String): DeleteMonitorResponse = client().execute( + AlertingActions.DELETE_MONITOR_ACTION_TYPE, DeleteMonitorRequest(monitorId, WriteRequest.RefreshPolicy.IMMEDIATE) + ).get() + + protected fun deleteIndex(index: String) { + val response = client().admin().indices().delete(DeleteIndexRequest(index)).get() + assertTrue("Unable to delete index", response.isAcknowledged()) + } + override fun getPlugins(): List> { - return listOf(AlertingPlugin::class.java, ReindexPlugin::class.java) + return listOf(AlertingPlugin::class.java, ReindexPlugin::class.java, MustachePlugin::class.java, PainlessPlugin::class.java) } override fun resetNodeAfterTest(): Boolean { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt new file mode 100644 index 000000000..21af929cb --- /dev/null +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt @@ -0,0 +1,137 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.transport + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope +import org.opensearch.action.support.WriteRequest +import org.opensearch.alerting.action.ExecuteWorkflowAction +import org.opensearch.alerting.action.ExecuteWorkflowRequest +import org.opensearch.alerting.action.ExecuteWorkflowResponse +import org.opensearch.alerting.model.WorkflowMetadata +import org.opensearch.common.unit.TimeValue +import org.opensearch.common.xcontent.XContentParser +import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteWorkflowRequest +import org.opensearch.commons.alerting.action.GetWorkflowRequest +import org.opensearch.commons.alerting.action.GetWorkflowResponse +import org.opensearch.commons.alerting.action.IndexWorkflowRequest +import org.opensearch.commons.alerting.action.IndexWorkflowResponse +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.index.query.TermQueryBuilder +import org.opensearch.index.seqno.SequenceNumbers +import org.opensearch.rest.RestRequest +import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.search.fetch.subphase.FetchSourceContext +import java.time.Instant + +/** + * A test that keep a singleton node started for all tests that can be used to get + * references to Guice injectors in unit tests. + */ + +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) +abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { + + protected fun searchWorkflow( + id: String, + indices: String = ScheduledJob.SCHEDULED_JOBS_INDEX, + refresh: Boolean = true, + ): Workflow? { + try { + if (refresh) refreshIndex(indices) + } catch (e: Exception) { + logger.warn("Could not refresh index $indices because: ${e.message}") + return null + } + val ssb = SearchSourceBuilder() + ssb.version(true) + ssb.query(TermQueryBuilder("_id", id)) + val searchResponse = client().prepareSearch(indices).setRouting(id).setSource(ssb).get() + + return searchResponse.hits.hits.map { it -> + val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } + lateinit var workflow: Workflow + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + xcp.nextToken() + when (xcp.currentName()) { + "workflow" -> workflow = Workflow.parse(xcp) + } + } + workflow.copy(id = it.id, version = it.version) + }.first() + } + + protected fun searchWorkflowMetadata( + id: String, + indices: String = ScheduledJob.SCHEDULED_JOBS_INDEX, + refresh: Boolean = true, + ): WorkflowMetadata? { + try { + if (refresh) refreshIndex(indices) + } catch (e: Exception) { + logger.warn("Could not refresh index $indices because: ${e.message}") + return null + } + val ssb = SearchSourceBuilder() + ssb.version(true) + ssb.query(TermQueryBuilder("workflow_metadata.workflow_id", id)) + val searchResponse = client().prepareSearch(indices).setRouting(id).setSource(ssb).get() + + return searchResponse.hits.hits.map { it -> + val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } + lateinit var workflowMetadata: WorkflowMetadata + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + xcp.nextToken() + when (xcp.currentName()) { + "workflow_metadata" -> workflowMetadata = WorkflowMetadata.parse(xcp) + } + } + workflowMetadata.copy(id = it.id) + }.first() + } + + protected fun upsertWorkflow( + workflow: Workflow, + id: String = Workflow.NO_ID, + method: RestRequest.Method = RestRequest.Method.POST, + ): IndexWorkflowResponse? { + val request = IndexWorkflowRequest( + workflowId = id, + seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, + primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + refreshPolicy = WriteRequest.RefreshPolicy.parse("true"), + method = method, + workflow = workflow + ) + + return client().execute(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, request).actionGet() + } + + protected fun getWorkflowById( + id: String, + version: Long = 1L, + fetchSourceContext: FetchSourceContext = FetchSourceContext.FETCH_SOURCE, + ): GetWorkflowResponse { + return client().execute( + AlertingActions.GET_WORKFLOW_ACTION_TYPE, + GetWorkflowRequest(id, version, RestRequest.Method.GET, fetchSourceContext) + ).get() + } + + protected fun deleteWorkflow(workflowId: String, deleteDelegateMonitors: Boolean? = null) { + client().execute( + AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, + DeleteWorkflowRequest(workflowId, deleteDelegateMonitors, WriteRequest.RefreshPolicy.IMMEDIATE) + ).get() + } + + protected fun executeWorkflow(workflow: Workflow? = null, id: String? = null, dryRun: Boolean = true): ExecuteWorkflowResponse? { + val request = ExecuteWorkflowRequest(dryRun, TimeValue(Instant.now().toEpochMilli()), id, workflow) + return client().execute(ExecuteWorkflowAction.INSTANCE, request).get() + } +} diff --git a/build.gradle b/build.gradle index 6cb2b83e6..fe7ed38bc 100644 --- a/build.gradle +++ b/build.gradle @@ -7,7 +7,7 @@ buildscript { apply from: 'build-tools/repositories.gradle' ext { - opensearch_version = System.getProperty("opensearch.version", "2.4.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.5.0-SNAPSHOT") buildVersionQualifier = System.getProperty("build.version_qualifier", "") isSnapshot = "true" == System.getProperty("build.snapshot", "true") // 2.4.0-SNAPSHOT -> 2.4.0.0-SNAPSHOT diff --git a/core/build.gradle b/core/build.gradle index ce258112b..f4432bb06 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -15,7 +15,7 @@ dependencies { implementation "com.cronutils:cron-utils:9.1.6" api "org.opensearch.client:opensearch-rest-client:${opensearch_version}" implementation 'com.google.googlejavaformat:google-java-format:1.10.0' - api files("/Users/snistala/Documents/opensearch/common-utils/build/libs/common-utils-3.0.0.0-SNAPSHOT.jar") + api files("/home/stevan/git/opensearch/repo/common-utils/build/libs/common-utils-2.5.0.0-SNAPSHOT.jar") implementation 'commons-validator:commons-validator:1.7' testImplementation "org.opensearch.test:framework:${opensearch_version}" testImplementation "org.jetbrains.kotlin:kotlin-test:${kotlin_version}" diff --git a/core/src/main/resources/mappings/scheduled-jobs.json b/core/src/main/resources/mappings/scheduled-jobs.json index 768f73a9a..630f99cfd 100644 --- a/core/src/main/resources/mappings/scheduled-jobs.json +++ b/core/src/main/resources/mappings/scheduled-jobs.json @@ -404,20 +404,30 @@ "inputs": { "type": "nested", "properties": { - "search": { + "composite_input": { + "type": "nested", "properties": { - "indices": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 + "sequence": { + "properties": { + "delegates": { + "type": "nested", + "properties": { + "order": { + "type": "integer" + }, + "monitor_id": { + "type": "keyword" + }, + "chained_findings": { + "properties": { + "monitor_id": { + "type": "keyword" + } + } + } + } } } - }, - "query": { - "type": "object", - "enabled": false } } } @@ -810,6 +820,29 @@ "enabled": false } } + }, + "workflow_metadata" : { + "properties": { + "workflow_id": { + "type": "keyword" + }, + "monitor_ids": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "latest_run_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "latest_execution_id": { + "type": "keyword" + } + } } } }