From 670b18879c26e401fee3d57782afc8a06003bb7b Mon Sep 17 00:00:00 2001 From: Petar Dzepina Date: Tue, 9 May 2023 21:20:47 +0200 Subject: [PATCH] DocLevelMonitor Error Alert revisit (#905) (#909) * DocLevelMonitor Error Alert revisit (#905) * refactored to process multiple alerts when clearing/moving * limited upserting error alerts only in alertsIndex Signed-off-by: Petar Dzepina --- .../org/opensearch/alerting/AlertService.kt | 166 +++++++++++++++++- .../alerting/DocumentLevelMonitorRunner.kt | 13 ++ .../alerting/alerts/AlertIndices.kt | 4 +- .../opensearch/alerting/util/AlertingUtils.kt | 2 + .../alerting/MonitorDataSourcesIT.kt | 156 +++++++++++++++- 5 files changed, 331 insertions(+), 10 deletions(-) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt index 14e867245..494708882 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt @@ -7,6 +7,7 @@ package org.opensearch.alerting import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper +import org.opensearch.action.ActionListener import org.opensearch.action.DocWriteRequest import org.opensearch.action.bulk.BackoffPolicy import org.opensearch.action.bulk.BulkRequest @@ -26,9 +27,11 @@ import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.script.DocumentLevelTriggerExecutionContext import org.opensearch.alerting.script.QueryLevelTriggerExecutionContext import org.opensearch.alerting.util.IndexUtils +import org.opensearch.alerting.util.MAX_SEARCH_SIZE import org.opensearch.alerting.util.getBucketKeysHash import org.opensearch.client.Client import org.opensearch.common.bytes.BytesReference +import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper @@ -46,12 +49,20 @@ import org.opensearch.commons.alerting.model.Trigger import org.opensearch.commons.alerting.model.action.AlertCategory import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.XContentParser +import org.opensearch.index.VersionType import org.opensearch.index.query.QueryBuilders +import org.opensearch.index.reindex.BulkByScrollResponse +import org.opensearch.index.reindex.DeleteByQueryAction +import org.opensearch.index.reindex.DeleteByQueryRequestBuilder import org.opensearch.rest.RestStatus import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.search.sort.SortOrder import java.time.Instant import java.util.UUID +import java.util.concurrent.TimeUnit +import kotlin.coroutines.resume +import kotlin.coroutines.resumeWithException +import kotlin.coroutines.suspendCoroutine /** Service that handles CRUD operations for alerts */ class AlertService( @@ -62,6 +73,9 @@ class AlertService( companion object { const val MAX_BUCKET_LEVEL_MONITOR_ALERT_SEARCH_COUNT = 500 + const val ERROR_ALERT_ID_PREFIX = "error-alert" + + val ALERTS_SEARCH_TIMEOUT = TimeValue(5, TimeUnit.MINUTES) } private val logger = LogManager.getLogger(AlertService::class.java) @@ -306,18 +320,16 @@ class AlertService( } suspend fun upsertMonitorErrorAlert(monitor: Monitor, errorMessage: String) { - val errorAlertIdPrefix = "error-alert" - val newErrorAlertId = "$errorAlertIdPrefix-${monitor.id}-${UUID.randomUUID()}" + val newErrorAlertId = "$ERROR_ALERT_ID_PREFIX-${monitor.id}-${UUID.randomUUID()}" - val searchRequest = SearchRequest("${monitor.dataSources.alertsIndex}*") + val searchRequest = SearchRequest(monitor.dataSources.alertsIndex) .source( SearchSourceBuilder() .sort(Alert.START_TIME_FIELD, SortOrder.DESC) .query( QueryBuilders.boolQuery() - .must(QueryBuilders.queryStringQuery("${Alert.ALERT_ID_FIELD}:$errorAlertIdPrefix*")) .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitor.id)) - .must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.ERROR)) + .must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.ERROR.name)) ) ) val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } @@ -345,7 +357,7 @@ class AlertService( lastNotificationTime = currentTime ) } else { - existingErrorAlert.copy(startTime = Instant.now(), lastNotificationTime = currentTime) + existingErrorAlert.copy(lastNotificationTime = currentTime) } } @@ -353,12 +365,154 @@ class AlertService( .routing(alert.monitorId) .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) .opType(DocWriteRequest.OpType.INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .id(alert.id) val indexResponse: IndexResponse = client.suspendUntil { index(alertIndexRequest, it) } logger.debug("Monitor error Alert successfully upserted. Op result: ${indexResponse.result}") } + suspend fun clearMonitorErrorAlert(monitor: Monitor) { + val currentTime = Instant.now() + try { + val searchRequest = SearchRequest("${monitor.dataSources.alertsIndex}") + .source( + SearchSourceBuilder() + .size(MAX_SEARCH_SIZE) + .sort(Alert.START_TIME_FIELD, SortOrder.DESC) + .query( + QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitor.id)) + .must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.ERROR.name)) + ) + + ) + searchRequest.cancelAfterTimeInterval = ALERTS_SEARCH_TIMEOUT + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + // If there's no error alert present, there's nothing to clear. We can stop here. + if (searchResponse.hits.totalHits.value == 0L) { + return + } + + val indexRequests = mutableListOf() + searchResponse.hits.hits.forEach { hit -> + if (searchResponse.hits.totalHits.value > 1L) { + logger.warn("Found [${searchResponse.hits.totalHits.value}] error alerts for monitor [${monitor.id}] while clearing") + } + // Deserialize first/latest Alert + val xcp = contentParser(hit.sourceRef) + val existingErrorAlert = Alert.parse(xcp, hit.id, hit.version) + + val updatedAlert = existingErrorAlert.copy( + endTime = currentTime + ) + + indexRequests += IndexRequest(monitor.dataSources.alertsIndex) + .routing(monitor.id) + .id(updatedAlert.id) + .source(updatedAlert.toXContentWithUser(XContentFactory.jsonBuilder())) + .opType(DocWriteRequest.OpType.INDEX) + } + + val bulkResponse: BulkResponse = client.suspendUntil { + bulk(BulkRequest().add(indexRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) + } + if (bulkResponse.hasFailures()) { + bulkResponse.items.forEach { item -> + if (item.isFailed) { + logger.debug("Failed clearing error alert ${item.id} of monitor [${monitor.id}]") + } + } + } else { + logger.debug("[${bulkResponse.items.size}] Error Alerts successfully cleared. End time set to: $currentTime") + } + } catch (e: Exception) { + logger.error("Error clearing monitor error alerts for monitor [${monitor.id}]: ${ExceptionsHelper.detailedMessage(e)}") + } + } + + /** + * Moves already cleared "error alerts" to history index. + * Error Alert is cleared when endTime timestamp is set, on first successful run after failed run + * */ + suspend fun moveClearedErrorAlertsToHistory(monitorId: String, alertIndex: String, alertHistoryIndex: String) { + try { + val searchRequest = SearchRequest(alertIndex) + .source( + SearchSourceBuilder() + .size(MAX_SEARCH_SIZE) + .query( + QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitorId)) + .must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.ERROR.name)) + .must(QueryBuilders.existsQuery(Alert.END_TIME_FIELD)) + ) + .version(true) // Do we need this? + ) + searchRequest.cancelAfterTimeInterval = ALERTS_SEARCH_TIMEOUT + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + + if (searchResponse.hits.totalHits.value == 0L) { + return + } + + // Copy to history index + + val copyRequests = mutableListOf() + + searchResponse.hits.hits.forEach { hit -> + + val xcp = contentParser(hit.sourceRef) + val alert = Alert.parse(xcp, hit.id, hit.version) + + copyRequests.add( + IndexRequest(alertHistoryIndex) + .routing(alert.monitorId) + .source(hit.sourceRef, XContentType.JSON) + .version(hit.version) + .versionType(VersionType.EXTERNAL_GTE) + .id(hit.id) + .timeout(MonitorRunnerService.monitorCtx.indexTimeout) + ) + } + + val bulkResponse: BulkResponse = client.suspendUntil { + bulk(BulkRequest().add(copyRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) + } + if (bulkResponse.hasFailures()) { + bulkResponse.items.forEach { item -> + if (item.isFailed) { + logger.error("Failed copying error alert [${item.id}] to history index [$alertHistoryIndex]") + } + } + return + } + + // Delete from alertIndex + + val alertIds = searchResponse.hits.hits.map { it.id } + + val deleteResponse: BulkByScrollResponse = suspendCoroutine { cont -> + DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) + .source(alertIndex) + .filter(QueryBuilders.termsQuery("_id", alertIds)) + .refresh(true) + .timeout(ALERTS_SEARCH_TIMEOUT) + .execute( + object : ActionListener { + override fun onResponse(response: BulkByScrollResponse) = cont.resume(response) + override fun onFailure(t: Exception) = cont.resumeWithException(t) + } + ) + } + deleteResponse.bulkFailures.forEach { + logger.error("Failed deleting alert while moving cleared alerts: [${it.id}] cause: [${it.cause}] ") + } + } catch (e: Exception) { + logger.error("Failed moving cleared error alerts to history index: ${ExceptionsHelper.detailedMessage(e)}") + } + } + suspend fun saveAlerts( dataSources: DataSources, alerts: List, diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt index 0700965e9..b4c43924a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt @@ -236,6 +236,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { val errorMessage = constructErrorMessageFromTriggerResults(triggerResults = triggerResults) if (errorMessage.isNotEmpty()) { monitorCtx.alertService!!.upsertMonitorErrorAlert(monitor = monitor, errorMessage = errorMessage) + } else { + onSuccessfulMonitorRun(monitorCtx, monitor) } MonitorMetadataService.upsertMetadata( @@ -259,6 +261,17 @@ object DocumentLevelMonitorRunner : MonitorRunner() { } } + private suspend fun onSuccessfulMonitorRun(monitorCtx: MonitorRunnerExecutionContext, monitor: Monitor) { + monitorCtx.alertService!!.clearMonitorErrorAlert(monitor) + if (monitor.dataSources.alertsHistoryIndex != null) { + monitorCtx.alertService!!.moveClearedErrorAlertsToHistory( + monitor.id, + monitor.dataSources.alertsIndex, + monitor.dataSources.alertsHistoryIndex!! + ) + } + } + private fun constructErrorMessageFromTriggerResults( triggerResults: MutableMap? = null ): String { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt index 42e706e74..1492c7681 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt @@ -475,7 +475,7 @@ class AlertIndices( clusterStateRequest, object : ActionListener { override fun onResponse(clusterStateResponse: ClusterStateResponse) { - if (clusterStateResponse.state.metadata.indices.isNotEmpty()) { + if (!clusterStateResponse.state.metadata.indices.isEmpty) { val indicesToDelete = getIndicesToDelete(clusterStateResponse) logger.info("Deleting old $tag indices viz $indicesToDelete") deleteAllOldHistoryIndices(indicesToDelete) @@ -510,7 +510,7 @@ class AlertIndices( ): String? { val creationTime = indexMetadata.creationDate if ((Instant.now().toEpochMilli() - creationTime) > retentionPeriodMillis) { - val alias = indexMetadata.aliases.entries.firstOrNull { writeIndex == it.value.alias } + val alias = indexMetadata.aliases.firstOrNull { writeIndex == it.value.alias } if (alias != null) { if (historyEnabled) { // If the index has the write alias and history is enabled, don't delete the index diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt index a7c8fe81c..1b8a153b3 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt @@ -18,6 +18,8 @@ import org.opensearch.commons.alerting.util.isBucketLevelMonitor private val logger = LogManager.getLogger("AlertingUtils") +val MAX_SEARCH_SIZE = 10000 + /** * RFC 5322 compliant pattern matching: https://www.ietf.org/rfc/rfc5322.txt * Regex was based off of this post: https://stackoverflow.com/a/201378 diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt index 376b4262d..967bf413c 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt @@ -17,6 +17,7 @@ import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest import org.opensearch.action.admin.indices.open.OpenIndexRequest import org.opensearch.action.admin.indices.refresh.RefreshRequest import org.opensearch.action.fieldcaps.FieldCapabilitiesRequest +import org.opensearch.action.index.IndexRequest import org.opensearch.action.search.SearchRequest import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.action.SearchMonitorAction @@ -47,6 +48,7 @@ import org.opensearch.index.query.QueryBuilders import org.opensearch.script.Script import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.OpenSearchTestCase +import java.time.Instant import java.time.ZonedDateTime import java.time.format.DateTimeFormatter import java.time.temporal.ChronoUnit.MILLIS @@ -605,7 +607,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertTrue(getAlertsResponse.alerts.size == 1) Assert.assertTrue(getAlertsResponse.alerts[0].errorMessage!!.contains("Trigger errors")) - val oldAlertStartTime = getAlertsResponse.alerts[0].startTime + val oldLastNotificationTime = getAlertsResponse.alerts[0].lastNotificationTime executeMonitorResponse = executeMonitor(monitor, id, false) Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) @@ -619,7 +621,157 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertTrue(getAlertsResponse.alerts.size == 1) Assert.assertEquals(0, getAlertsResponse.alerts[0].errorHistory.size) Assert.assertTrue(getAlertsResponse.alerts[0].errorMessage!!.contains("Trigger errors")) - Assert.assertTrue(getAlertsResponse.alerts[0].startTime.isAfter(oldAlertStartTime)) + Assert.assertTrue(getAlertsResponse.alerts[0].lastNotificationTime!!.isAfter(oldLastNotificationTime)) + } + + fun `test monitor error alert cleared after successful monitor run`() { + val customAlertIndex = "custom-alert-index" + val customAlertHistoryIndex = "custom-alert-history-index" + val customAlertHistoryIndexPattern = "" + val docQuery = DocLevelQuery(query = "source:12345", name = "1") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + alertsIndex = customAlertIndex, + alertsHistoryIndex = customAlertHistoryIndex, + alertsHistoryIndexPattern = customAlertHistoryIndexPattern + ) + ) + + val monitorResponse = createMonitor(monitor) + assertFalse(monitorResponse?.id.isNullOrEmpty()) + + monitor = monitorResponse!!.monitor + val id = monitorResponse.id + + // Close index to force error alert + client().admin().indices().close(CloseIndexRequest(index)).get() + + var executeMonitorResponse = executeMonitor(monitor, id, false) + Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) + Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 0) + searchAlerts(id) + var table = Table("asc", "id", null, 1, 0, "") + var getAlertsResponse = client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, customAlertIndex)) + .get() + Assert.assertTrue(getAlertsResponse != null) + Assert.assertEquals(1, getAlertsResponse.alerts.size) + Assert.assertTrue(getAlertsResponse.alerts[0].errorMessage == "IndexClosedException[closed]") + Assert.assertNull(getAlertsResponse.alerts[0].endTime) + + // Open index to have monitor run successfully + client().admin().indices().open(OpenIndexRequest(index)).get() + // Execute monitor again and expect successful run + executeMonitorResponse = executeMonitor(monitor, id, false) + Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) + Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) + // Verify that alert is moved to history index + table = Table("asc", "id", null, 10, 0, "") + getAlertsResponse = client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, customAlertIndex)) + .get() + Assert.assertTrue(getAlertsResponse != null) + Assert.assertEquals(0, getAlertsResponse.alerts.size) + + table = Table("asc", "id", null, 10, 0, "") + getAlertsResponse = client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, customAlertHistoryIndex)) + .get() + Assert.assertTrue(getAlertsResponse != null) + Assert.assertEquals(1, getAlertsResponse.alerts.size) + Assert.assertTrue(getAlertsResponse.alerts[0].errorMessage == "IndexClosedException[closed]") + Assert.assertNotNull(getAlertsResponse.alerts[0].endTime) + } + + fun `test multiple monitor error alerts cleared after successful monitor run`() { + val customAlertIndex = "custom-alert-index" + val customAlertHistoryIndex = "custom-alert-history-index" + val customAlertHistoryIndexPattern = "" + val docQuery = DocLevelQuery(query = "source:12345", name = "1") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + alertsIndex = customAlertIndex, + alertsHistoryIndex = customAlertHistoryIndex, + alertsHistoryIndexPattern = customAlertHistoryIndexPattern + ) + ) + + val monitorResponse = createMonitor(monitor) + assertFalse(monitorResponse?.id.isNullOrEmpty()) + + monitor = monitorResponse!!.monitor + val monitorId = monitorResponse.id + + // Close index to force error alert + client().admin().indices().close(CloseIndexRequest(index)).get() + + var executeMonitorResponse = executeMonitor(monitor, monitorId, false) + Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) + Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 0) + // Create 10 old alerts to simulate having "old error alerts"(2.6) + for (i in 1..10) { + val startTimestamp = Instant.now().minusSeconds(3600 * 24 * i.toLong()).toEpochMilli() + val oldErrorAlertAsString = """ + {"id":"$i","version":-1,"monitor_id":"$monitorId", + "schema_version":4,"monitor_version":1,"monitor_name":"geCNcHKTlp","monitor_user":{"name":"","backend_roles":[], + "roles":[],"custom_attribute_names":[],"user_requested_tenant":null},"trigger_id":"_nnk_YcB5pHgSZwYwO2r", + "trigger_name":"NoOp trigger","finding_ids":[],"related_doc_ids":[],"state":"ERROR","error_message":"some monitor error", + "alert_history":[],"severity":"","action_execution_results":[], + "start_time":$startTimestamp,"last_notification_time":$startTimestamp,"end_time":null,"acknowledged_time":null} + """.trimIndent() + + client().index( + IndexRequest(customAlertIndex) + .id("$i") + .routing(monitorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(oldErrorAlertAsString, XContentType.JSON) + ).get() + } + var table = Table("asc", "id", null, 1000, 0, "") + var getAlertsResponse = client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, customAlertIndex)) + .get() + + Assert.assertTrue(getAlertsResponse != null) + Assert.assertEquals(1 + 10, getAlertsResponse.alerts.size) + val newErrorAlert = getAlertsResponse.alerts.firstOrNull { it.errorMessage == "IndexClosedException[closed]" } + Assert.assertNotNull(newErrorAlert) + Assert.assertNull(newErrorAlert!!.endTime) + + // Open index to have monitor run successfully + client().admin().indices().open(OpenIndexRequest(index)).get() + // Execute monitor again and expect successful run + executeMonitorResponse = executeMonitor(monitor, monitorId, false) + Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) + Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) + // Verify that alert is moved to history index + table = Table("asc", "id", null, 1000, 0, "") + getAlertsResponse = client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, customAlertIndex)) + .get() + Assert.assertTrue(getAlertsResponse != null) + Assert.assertEquals(0, getAlertsResponse.alerts.size) + + table = Table("asc", "id", null, 1000, 0, "") + getAlertsResponse = client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, customAlertHistoryIndex)) + .get() + Assert.assertTrue(getAlertsResponse != null) + Assert.assertEquals(11, getAlertsResponse.alerts.size) + getAlertsResponse.alerts.forEach { alert -> assertNotNull(alert.endTime) } } fun `test execute monitor with custom query index and nested mappings`() {