@@ -8,12 +8,15 @@ package org.opensearch.alerting
8
8
import org.apache.logging.log4j.LogManager
9
9
import org.opensearch.ExceptionsHelper
10
10
import org.opensearch.OpenSearchStatusException
11
+ import org.opensearch.action.DocWriteRequest
12
+ import org.opensearch.action.admin.indices.refresh.RefreshAction
13
+ import org.opensearch.action.admin.indices.refresh.RefreshRequest
14
+ import org.opensearch.action.bulk.BulkRequest
15
+ import org.opensearch.action.bulk.BulkResponse
11
16
import org.opensearch.action.index.IndexRequest
12
- import org.opensearch.action.index.IndexResponse
13
17
import org.opensearch.action.search.SearchAction
14
18
import org.opensearch.action.search.SearchRequest
15
19
import org.opensearch.action.search.SearchResponse
16
- import org.opensearch.action.support.WriteRequest
17
20
import org.opensearch.alerting.model.DocumentExecutionContext
18
21
import org.opensearch.alerting.model.DocumentLevelTriggerRunResult
19
22
import org.opensearch.alerting.model.InputRunResults
@@ -273,10 +276,7 @@ object DocumentLevelMonitorRunner : MonitorRunner() {
273
276
// If there are no triggers defined, we still want to generate findings
274
277
if (monitor.triggers.isEmpty()) {
275
278
if (dryrun == false && monitor.id != Monitor .NO_ID ) {
276
- docsToQueries.forEach {
277
- val triggeredQueries = it.value.map { queryId -> idQueryMap[queryId]!! }
278
- createFindings(monitor, monitorCtx, triggeredQueries, it.key, true )
279
- }
279
+ createFindings(monitor, monitorCtx, docsToQueries, idQueryMap, true )
280
280
}
281
281
} else {
282
282
monitor.triggers.forEach {
@@ -365,7 +365,7 @@ object DocumentLevelMonitorRunner : MonitorRunner() {
365
365
trigger : DocumentLevelTrigger ,
366
366
monitor : Monitor ,
367
367
idQueryMap : Map <String , DocLevelQuery >,
368
- docsToQueries : Map <String , List <String >>,
368
+ docsToQueries : MutableMap <String , MutableList <String >>,
369
369
queryToDocIds : Map <DocLevelQuery , Set <String >>,
370
370
dryrun : Boolean ,
371
371
workflowRunContext : WorkflowRunContext ? ,
@@ -374,35 +374,33 @@ object DocumentLevelMonitorRunner : MonitorRunner() {
374
374
val triggerCtx = DocumentLevelTriggerExecutionContext (monitor, trigger)
375
375
val triggerResult = monitorCtx.triggerService!! .runDocLevelTrigger(monitor, trigger, queryToDocIds)
376
376
377
- val findings = mutableListOf<String >()
378
- val findingDocPairs = mutableListOf<Pair <String , String >>()
377
+ val triggerFindingDocPairs = mutableListOf<Pair <String , String >>()
379
378
380
379
// TODO: Implement throttling for findings
381
- docsToQueries.forEach {
382
- val triggeredQueries = it.value.map { queryId -> idQueryMap[queryId]!! }
383
- val findingId = createFindings(
384
- monitor,
385
- monitorCtx,
386
- triggeredQueries,
387
- it.key,
388
- ! dryrun && monitor.id != Monitor .NO_ID ,
389
- executionId
390
- )
391
- findings.add(findingId)
380
+ val findingToDocPairs = createFindings(
381
+ monitor,
382
+ monitorCtx,
383
+ docsToQueries,
384
+ idQueryMap,
385
+ ! dryrun && monitor.id != Monitor .NO_ID ,
386
+ executionId
387
+ )
392
388
393
- if (triggerResult.triggeredDocs.contains(it.key)) {
394
- findingDocPairs.add(Pair (findingId, it.key))
389
+ findingToDocPairs.forEach {
390
+ // Only pick those entries whose docs have triggers associated with them
391
+ if (triggerResult.triggeredDocs.contains(it.second)) {
392
+ triggerFindingDocPairs.add(Pair (it.first, it.second))
395
393
}
396
394
}
397
395
398
396
val actionCtx = triggerCtx.copy(
399
397
triggeredDocs = triggerResult.triggeredDocs,
400
- relatedFindings = findings ,
398
+ relatedFindings = findingToDocPairs.map { it.first } ,
401
399
error = monitorResult.error ? : triggerResult.error
402
400
)
403
401
404
402
val alerts = mutableListOf<Alert >()
405
- findingDocPairs .forEach {
403
+ triggerFindingDocPairs .forEach {
406
404
val alert = monitorCtx.alertService!! .composeDocLevelAlert(
407
405
listOf (it.first),
408
406
listOf (it.second),
@@ -461,51 +459,92 @@ object DocumentLevelMonitorRunner : MonitorRunner() {
461
459
return triggerResult
462
460
}
463
461
462
+ /* *
463
+ * 1. Bulk index all findings based on shouldCreateFinding flag
464
+ * 2. invoke publishFinding() to kickstart auto-correlations
465
+ * 3. Returns a list of pairs for finding id to doc id
466
+ */
464
467
private suspend fun createFindings (
465
468
monitor : Monitor ,
466
469
monitorCtx : MonitorRunnerExecutionContext ,
467
- docLevelQueries : List < DocLevelQuery >,
468
- matchingDocId : String ,
470
+ docsToQueries : MutableMap < String , MutableList < String > >,
471
+ idQueryMap : Map < String , DocLevelQuery > ,
469
472
shouldCreateFinding : Boolean ,
470
473
workflowExecutionId : String? = null,
471
- ): String {
472
- // Before the "|" is the doc id and after the "|" is the index
473
- val docIndex = matchingDocId.split(" |" )
474
+ ): List <Pair <String , String >> {
474
475
475
- val finding = Finding (
476
- id = UUID .randomUUID().toString(),
477
- relatedDocIds = listOf (docIndex[0 ]),
478
- correlatedDocIds = listOf (docIndex[0 ]),
479
- monitorId = monitor.id,
480
- monitorName = monitor.name,
481
- index = docIndex[1 ],
482
- docLevelQueries = docLevelQueries,
483
- timestamp = Instant .now(),
484
- executionId = workflowExecutionId
485
- )
476
+ val findingDocPairs = mutableListOf<Pair <String , String >>()
477
+ val findings = mutableListOf<Finding >()
478
+ val indexRequests = mutableListOf<IndexRequest >()
486
479
487
- val findingStr = finding.toXContent( XContentBuilder .builder( XContentType . JSON .xContent()), ToXContent . EMPTY_PARAMS ).string()
488
- logger.debug( " Findings: $findingStr " )
480
+ docsToQueries.forEach {
481
+ val triggeredQueries = it.value.map { queryId -> idQueryMap[queryId] !! }
489
482
490
- if (shouldCreateFinding) {
491
- val indexRequest = IndexRequest (monitor.dataSources.findingsIndex)
492
- .setRefreshPolicy(WriteRequest .RefreshPolicy .IMMEDIATE )
493
- .source(findingStr, XContentType .JSON )
494
- .id(finding.id)
495
- .routing(finding.id)
483
+ // Before the "|" is the doc id and after the "|" is the index
484
+ val docIndex = it.key.split(" |" )
496
485
497
- monitorCtx.client!! .suspendUntil<Client , IndexResponse > {
498
- monitorCtx.client!! .index(indexRequest, it)
486
+ val finding = Finding (
487
+ id = UUID .randomUUID().toString(),
488
+ relatedDocIds = listOf (docIndex[0 ]),
489
+ correlatedDocIds = listOf (docIndex[0 ]),
490
+ monitorId = monitor.id,
491
+ monitorName = monitor.name,
492
+ index = docIndex[1 ],
493
+ docLevelQueries = triggeredQueries,
494
+ timestamp = Instant .now(),
495
+ executionId = workflowExecutionId
496
+ )
497
+ findingDocPairs.add(Pair (finding.id, it.key))
498
+ findings.add(finding)
499
+
500
+ val findingStr =
501
+ finding.toXContent(XContentBuilder .builder(XContentType .JSON .xContent()), ToXContent .EMPTY_PARAMS )
502
+ .string()
503
+ logger.debug(" Findings: $findingStr " )
504
+
505
+ if (shouldCreateFinding) {
506
+ indexRequests + = IndexRequest (monitor.dataSources.findingsIndex)
507
+ .source(findingStr, XContentType .JSON )
508
+ .id(finding.id)
509
+ .opType(DocWriteRequest .OpType .CREATE )
499
510
}
500
511
}
501
512
513
+ if (indexRequests.isNotEmpty()) {
514
+ bulkIndexFindings(monitor, monitorCtx, indexRequests)
515
+ }
516
+
502
517
try {
503
- publishFinding(monitor, monitorCtx, finding)
518
+ findings.forEach { finding ->
519
+ publishFinding(monitor, monitorCtx, finding)
520
+ }
504
521
} catch (e: Exception ) {
505
522
// suppress exception
506
523
logger.error(" Optional finding callback failed" , e)
507
524
}
508
- return finding.id
525
+ return findingDocPairs
526
+ }
527
+
528
+ private suspend fun bulkIndexFindings (
529
+ monitor : Monitor ,
530
+ monitorCtx : MonitorRunnerExecutionContext ,
531
+ indexRequests : List <IndexRequest >
532
+ ) {
533
+ indexRequests.chunked(monitorCtx.findingsIndexBatchSize).forEach { batch ->
534
+ val bulkResponse: BulkResponse = monitorCtx.client!! .suspendUntil {
535
+ bulk(BulkRequest ().add(batch), it)
536
+ }
537
+ if (bulkResponse.hasFailures()) {
538
+ bulkResponse.items.forEach { item ->
539
+ if (item.isFailed) {
540
+ logger.error(" Failed indexing the finding ${item.id} of monitor [${monitor.id} ]" )
541
+ }
542
+ }
543
+ } else {
544
+ logger.debug(" [${bulkResponse.items.size} ] All findings successfully indexed." )
545
+ }
546
+ }
547
+ monitorCtx.client!! .execute(RefreshAction .INSTANCE , RefreshRequest (monitor.dataSources.findingsIndex))
509
548
}
510
549
511
550
private fun publishFinding (
@@ -629,7 +668,7 @@ object DocumentLevelMonitorRunner : MonitorRunner() {
629
668
matchingDocs.addAll(getAllDocs(hits, index, concreteIndex, monitor.id, conflictingFields))
630
669
}
631
670
} catch (e: Exception ) {
632
- logger.warn (" Failed to run for shard $shard . Error: ${e.message} " )
671
+ logger.error (" Failed to run for shard $shard . Error: ${e.message} " )
633
672
}
634
673
}
635
674
return matchingDocs
0 commit comments