@@ -145,15 +145,16 @@ def batch_generator(self, iterable, batch_size=None):
145
145
146
146
def __call__ (self , images , text_boxes : List [List [List [float ]]], batch_size = None , include_maps = False ) -> List [TextDetectionResult ]:
147
147
detection_generator = self .batch_detection (images , batch_size = batch_size , static_cache = settings .DETECTOR_STATIC_CACHE )
148
- text_box_generator = self .batch_generator (text_boxes )
148
+ text_box_generator = self .batch_generator (text_boxes , batch_size = batch_size )
149
149
150
150
postprocessing_futures = []
151
151
max_workers = min (settings .DETECTOR_POSTPROCESSING_CPU_WORKERS , len (images ))
152
152
parallelize = not settings .IN_STREAMLIT and len (images ) >= settings .DETECTOR_MIN_PARALLEL_THRESH
153
153
executor = ThreadPoolExecutor if parallelize else FakeExecutor
154
154
with executor (max_workers = max_workers ) as e :
155
155
for (preds , orig_sizes ), batch_text_boxes in zip (detection_generator , text_box_generator ):
156
- for pred , orig_size , text_boxes in zip (preds , orig_sizes , batch_text_boxes ):
157
- postprocessing_futures .append (e .submit (parallel_get_inline_boxes , pred , orig_size , text_boxes , include_maps ))
156
+ for pred , orig_size , image_text_boxes in zip (preds , orig_sizes , batch_text_boxes ):
157
+ postprocessing_futures .append (e .submit (parallel_get_inline_boxes , pred , orig_size , image_text_boxes , include_maps ))
158
158
159
+ assert len (postprocessing_futures ) == len (images ) == len (text_boxes ) # Ensure we have a 1:1 mapping
159
160
return [future .result () for future in postprocessing_futures ]
0 commit comments