10
10
import static io .camunda .zeebe .util .buffer .BufferUtil .wrapString ;
11
11
12
12
import io .camunda .zeebe .engine .metrics .JobMetrics ;
13
+ import io .camunda .zeebe .engine .processing .job .JobBatchCollector .TooLargeJob ;
13
14
import io .camunda .zeebe .engine .processing .streamprocessor .TypedRecord ;
14
15
import io .camunda .zeebe .engine .processing .streamprocessor .TypedRecordProcessor ;
15
16
import io .camunda .zeebe .engine .processing .streamprocessor .writers .StateWriter ;
18
19
import io .camunda .zeebe .engine .processing .streamprocessor .writers .TypedStreamWriter ;
19
20
import io .camunda .zeebe .engine .processing .streamprocessor .writers .Writers ;
20
21
import io .camunda .zeebe .engine .state .KeyGenerator ;
21
- import io .camunda .zeebe .engine .state .immutable .JobState ;
22
- import io .camunda .zeebe .engine .state .immutable .VariableState ;
23
22
import io .camunda .zeebe .engine .state .immutable .ZeebeState ;
24
- import io .camunda .zeebe .msgpack .value .DocumentValue ;
25
- import io .camunda .zeebe .msgpack .value .LongValue ;
26
- import io .camunda .zeebe .msgpack .value .StringValue ;
27
- import io .camunda .zeebe .msgpack .value .ValueArray ;
28
23
import io .camunda .zeebe .protocol .impl .record .value .incident .IncidentRecord ;
29
24
import io .camunda .zeebe .protocol .impl .record .value .job .JobBatchRecord ;
30
25
import io .camunda .zeebe .protocol .impl .record .value .job .JobRecord ;
33
28
import io .camunda .zeebe .protocol .record .intent .JobBatchIntent ;
34
29
import io .camunda .zeebe .protocol .record .value .ErrorType ;
35
30
import io .camunda .zeebe .util .ByteValue ;
36
- import java .util .Collection ;
37
- import java .util .concurrent .atomic .AtomicInteger ;
31
+ import io .camunda .zeebe .util .Either ;
38
32
import org .agrona .DirectBuffer ;
39
- import org .agrona .ExpandableArrayBuffer ;
40
- import org .agrona .MutableDirectBuffer ;
41
- import org .agrona .collections .ObjectHashSet ;
42
- import org .agrona .concurrent .UnsafeBuffer ;
43
33
44
34
public final class JobBatchActivateProcessor implements TypedRecordProcessor <JobBatchRecord > {
45
35
46
36
private final StateWriter stateWriter ;
47
- private final VariableState variableState ;
48
37
private final TypedRejectionWriter rejectionWriter ;
49
38
private final TypedResponseWriter responseWriter ;
50
-
51
- private final JobState jobState ;
39
+ private final JobBatchCollector jobBatchCollector ;
52
40
private final KeyGenerator keyGenerator ;
53
- private final long maxRecordLength ;
54
- private final long maxJobBatchLength ;
55
-
56
- private final ObjectHashSet <DirectBuffer > variableNames = new ObjectHashSet <>();
57
41
private final JobMetrics jobMetrics ;
58
42
59
43
public JobBatchActivateProcessor (
60
44
final Writers writers ,
61
45
final ZeebeState state ,
62
46
final KeyGenerator keyGenerator ,
63
- final long maxRecordLength ,
64
47
final JobMetrics jobMetrics ) {
65
48
66
49
stateWriter = writers .state ();
67
50
rejectionWriter = writers .rejection ();
68
51
responseWriter = writers .response ();
52
+ jobBatchCollector =
53
+ new JobBatchCollector (
54
+ state .getJobState (), state .getVariableState (), stateWriter ::canWriteEventOfLength );
69
55
70
- jobState = state .getJobState ();
71
- variableState = state .getVariableState ();
72
56
this .keyGenerator = keyGenerator ;
73
-
74
- this .maxRecordLength = maxRecordLength ;
75
- maxJobBatchLength = maxRecordLength - Long .BYTES ;
76
57
this .jobMetrics = jobMetrics ;
77
58
}
78
59
@@ -97,95 +78,21 @@ private boolean isValid(final JobBatchRecord record) {
97
78
98
79
private void activateJobs (final TypedRecord <JobBatchRecord > record ) {
99
80
final JobBatchRecord value = record .getValue ();
100
-
101
81
final long jobBatchKey = keyGenerator .nextKey ();
102
82
103
- final AtomicInteger amount = new AtomicInteger (value .getMaxJobsToActivate ());
104
- collectJobsToActivate (record , amount );
105
-
106
- stateWriter .appendFollowUpEvent (jobBatchKey , JobBatchIntent .ACTIVATED , value );
107
- responseWriter .writeEventOnCommand (jobBatchKey , JobBatchIntent .ACTIVATED , value , record );
108
-
109
- final var activatedJobsCount = record .getValue ().getJobKeys ().size ();
110
- jobMetrics .jobActivated (value .getType (), activatedJobsCount );
111
- }
112
-
113
- private void collectJobsToActivate (
114
- final TypedRecord <JobBatchRecord > record , final AtomicInteger amount ) {
115
- final JobBatchRecord value = record .getValue ();
116
- final ValueArray <JobRecord > jobIterator = value .jobs ();
117
- final ValueArray <LongValue > jobKeyIterator = value .jobKeys ();
118
-
119
- // collect jobs for activation
120
- variableNames .clear ();
121
- final ValueArray <StringValue > jobBatchVariables = value .variables ();
122
-
123
- jobBatchVariables .forEach (
124
- v -> {
125
- final MutableDirectBuffer nameCopy = new UnsafeBuffer (new byte [v .getValue ().capacity ()]);
126
- nameCopy .putBytes (0 , v .getValue (), 0 , v .getValue ().capacity ());
127
- variableNames .add (nameCopy );
128
- });
129
-
130
- jobState .forEachActivatableJobs (
131
- value .getTypeBuffer (),
132
- (key , jobRecord ) -> {
133
- int remainingAmount = amount .get ();
134
- final long deadline = record .getTimestamp () + value .getTimeout ();
135
- jobRecord .setDeadline (deadline ).setWorker (value .getWorkerBuffer ());
136
-
137
- // fetch and set variables, required here to already have the full size of the job record
138
- final long elementInstanceKey = jobRecord .getElementInstanceKey ();
139
- if (elementInstanceKey >= 0 ) {
140
- final DirectBuffer variables = collectVariables (variableNames , elementInstanceKey );
141
- jobRecord .setVariables (variables );
142
- } else {
143
- jobRecord .setVariables (DocumentValue .EMPTY_DOCUMENT );
144
- }
145
-
146
- if (remainingAmount >= 0
147
- && (record .getLength () + jobRecord .getLength ()) <= maxJobBatchLength ) {
148
-
149
- remainingAmount = amount .decrementAndGet ();
150
- jobKeyIterator .add ().setValue (key );
151
- final JobRecord arrayValueJob = jobIterator .add ();
152
-
153
- // clone job record since buffer is reused during iteration
154
- final ExpandableArrayBuffer buffer = new ExpandableArrayBuffer (jobRecord .getLength ());
155
- jobRecord .write (buffer , 0 );
156
- arrayValueJob .wrap (buffer );
157
- } else {
158
- value .setTruncated (true );
159
-
160
- if (value .getJobs ().isEmpty ()) {
161
- raiseIncidentJobTooLargeForMessageSize (key , jobRecord );
162
- }
163
-
164
- return false ;
165
- }
166
-
167
- return remainingAmount > 0 ;
168
- });
169
- }
83
+ final Either <TooLargeJob , Integer > result = jobBatchCollector .collectJobs (record );
84
+ final var activatedJobCount = result .getOrElse (0 );
85
+ result .ifLeft (
86
+ largeJob -> raiseIncidentJobTooLargeForMessageSize (largeJob .key (), largeJob .record ()));
170
87
171
- private DirectBuffer collectVariables (
172
- final Collection <DirectBuffer > variableNames , final long elementInstanceKey ) {
173
- final DirectBuffer variables ;
174
- if (variableNames .isEmpty ()) {
175
- variables = variableState .getVariablesAsDocument (elementInstanceKey );
176
- } else {
177
- variables = variableState .getVariablesAsDocument (elementInstanceKey , variableNames );
178
- }
179
- return variables ;
88
+ activateJobBatch (record , value , jobBatchKey , activatedJobCount );
180
89
}
181
90
182
91
private void rejectCommand (final TypedRecord <JobBatchRecord > record ) {
183
92
final RejectionType rejectionType ;
184
93
final String rejectionReason ;
185
-
186
94
final JobBatchRecord value = record .getValue ();
187
-
188
- final String format = "Expected to activate job batch with %s to be %s, but it was %s" ;
95
+ final var format = "Expected to activate job batch with %s to be %s, but it was %s" ;
189
96
190
97
if (value .getMaxJobsToActivate () < 1 ) {
191
98
rejectionType = RejectionType .INVALID_ARGUMENT ;
@@ -212,18 +119,25 @@ private void rejectCommand(final TypedRecord<JobBatchRecord> record) {
212
119
responseWriter .writeRejectionOnCommand (record , rejectionType , rejectionReason );
213
120
}
214
121
215
- private void raiseIncidentJobTooLargeForMessageSize (final long jobKey , final JobRecord job ) {
216
-
217
- final String messageSize = ByteValue .prettyPrint (maxRecordLength );
122
+ private void activateJobBatch (
123
+ final TypedRecord <JobBatchRecord > record ,
124
+ final JobBatchRecord value ,
125
+ final long jobBatchKey ,
126
+ final Integer activatedCount ) {
127
+ stateWriter .appendFollowUpEvent (jobBatchKey , JobBatchIntent .ACTIVATED , value );
128
+ responseWriter .writeEventOnCommand (jobBatchKey , JobBatchIntent .ACTIVATED , value , record );
129
+ jobMetrics .jobActivated (value .getType (), activatedCount );
130
+ }
218
131
132
+ private void raiseIncidentJobTooLargeForMessageSize (final long jobKey , final JobRecord job ) {
133
+ final String messageSize = ByteValue .prettyPrint (stateWriter .getMaxEventLength ());
219
134
final DirectBuffer incidentMessage =
220
135
wrapString (
221
136
String .format (
222
137
"The job with key '%s' can not be activated because it is larger than the configured message size (%s). "
223
138
+ "Try to reduce the size by reducing the number of fetched variables or modifying the variable values." ,
224
139
jobKey , messageSize ));
225
-
226
- final IncidentRecord incidentEvent =
140
+ final var incidentEvent =
227
141
new IncidentRecord ()
228
142
.setErrorType (ErrorType .MESSAGE_SIZE_EXCEEDED )
229
143
.setErrorMessage (incidentMessage )
0 commit comments