@@ -93,20 +93,10 @@ impl LatestAtQueryHandle<'_> {
93
93
/// [`Self::schema`].
94
94
/// Columns that do not yield any data will still be present in the results, filled with null values.
95
95
pub fn get ( & self ) -> RecordBatch {
96
- re_tracing:: profile_function!( format!( "{:? }" , self . query) ) ;
96
+ re_tracing:: profile_function!( format!( "{}" , self . query) ) ;
97
97
98
98
let columns = self . schema ( ) ;
99
99
100
- let schema = ArrowSchema {
101
- fields : columns
102
- . iter ( )
103
- . map ( ColumnDescriptor :: to_arrow_field)
104
- . collect ( ) ,
105
-
106
- // TODO(#6889): properly some sorbet stuff we want to get in there at some point.
107
- metadata : Default :: default ( ) ,
108
- } ;
109
-
110
100
let all_units: HashMap < & ComponentColumnDescriptor , UnitChunkShared > = {
111
101
re_tracing:: profile_scope!( "queries" ) ;
112
102
@@ -164,6 +154,10 @@ impl LatestAtQueryHandle<'_> {
164
154
}
165
155
}
166
156
157
+ // If the query didn't return anything at all, we just want a properly empty Recordbatch with
158
+ // the right schema.
159
+ let null_array_length = max_time_per_timeline. get ( & self . query . timeline ) . is_some ( ) as usize ;
160
+
167
161
// NOTE: Keep in mind this must match the ordering specified by `Self::schema`.
168
162
let packed_arrays = {
169
163
re_tracing:: profile_scope!( "packing" ) ;
@@ -186,7 +180,12 @@ impl LatestAtQueryHandle<'_> {
186
180
. and_then ( |( _, chunk) | chunk. timelines ( ) . get ( & descr. timeline ) . cloned ( ) ) ;
187
181
188
182
Some ( time_column. map_or_else (
189
- || arrow2:: array:: new_null_array ( descr. datatype . clone ( ) , 1 ) ,
183
+ || {
184
+ arrow2:: array:: new_null_array (
185
+ descr. datatype . clone ( ) ,
186
+ null_array_length,
187
+ )
188
+ } ,
190
189
|time_column| time_column. times_array ( ) . to_boxed ( ) ,
191
190
) )
192
191
}
@@ -196,16 +195,28 @@ impl LatestAtQueryHandle<'_> {
196
195
. get ( descr)
197
196
. and_then ( |chunk| chunk. components ( ) . get ( & descr. component_name ) )
198
197
. map_or_else (
199
- || arrow2:: array:: new_null_array ( descr. datatype . clone ( ) , 1 ) ,
198
+ || {
199
+ arrow2:: array:: new_null_array (
200
+ descr. datatype . clone ( ) ,
201
+ null_array_length,
202
+ )
203
+ } ,
200
204
|list_array| list_array. to_boxed ( ) ,
201
205
) ,
202
206
) ,
203
207
} )
204
- . collect ( )
208
+ . collect_vec ( )
205
209
} ;
206
210
207
211
RecordBatch {
208
- schema,
212
+ schema : ArrowSchema {
213
+ fields : columns
214
+ . iter ( )
215
+ . zip ( packed_arrays. iter ( ) )
216
+ . map ( |( descr, arr) | descr. to_arrow_field ( Some ( arr. data_type ( ) . clone ( ) ) ) )
217
+ . collect ( ) ,
218
+ metadata : Default :: default ( ) ,
219
+ } ,
209
220
data : ArrowChunk :: new ( packed_arrays) ,
210
221
}
211
222
}
@@ -225,3 +236,141 @@ impl<'a> LatestAtQueryHandle<'a> {
225
236
} )
226
237
}
227
238
}
239
+
240
+ // ---
241
+
242
+ #[ cfg( test) ]
243
+ mod tests {
244
+ use std:: sync:: Arc ;
245
+
246
+ use re_chunk:: { ArrowArray , Chunk , EntityPath , RowId , TimeInt , TimePoint , Timeline } ;
247
+ use re_chunk_store:: {
248
+ ChunkStore , ChunkStoreConfig , ColumnDescriptor , ComponentColumnDescriptor ,
249
+ LatestAtQueryExpression , TimeColumnDescriptor ,
250
+ } ;
251
+ use re_log_types:: { example_components:: MyPoint , StoreId , StoreKind } ;
252
+ use re_query:: Caches ;
253
+ use re_types:: {
254
+ components:: { Color , Position3D , Radius } ,
255
+ Loggable ,
256
+ } ;
257
+
258
+ use crate :: QueryEngine ;
259
+
260
+ #[ test]
261
+ fn empty_yields_empty ( ) {
262
+ let store = ChunkStore :: new (
263
+ StoreId :: random ( StoreKind :: Recording ) ,
264
+ ChunkStoreConfig :: default ( ) ,
265
+ ) ;
266
+ let cache = Caches :: new ( & store) ;
267
+ let engine = QueryEngine {
268
+ store : & store,
269
+ cache : & cache,
270
+ } ;
271
+
272
+ let query = LatestAtQueryExpression {
273
+ entity_path_expr : "/**" . into ( ) ,
274
+ timeline : Timeline :: log_time ( ) ,
275
+ at : TimeInt :: MAX ,
276
+ } ;
277
+
278
+ let entity_path: EntityPath = "/points" . into ( ) ;
279
+ let columns = vec ! [
280
+ ColumnDescriptor :: Time ( TimeColumnDescriptor {
281
+ timeline: Timeline :: log_time( ) ,
282
+ datatype: Timeline :: log_time( ) . datatype( ) ,
283
+ } ) ,
284
+ ColumnDescriptor :: Time ( TimeColumnDescriptor {
285
+ timeline: Timeline :: log_tick( ) ,
286
+ datatype: Timeline :: log_tick( ) . datatype( ) ,
287
+ } ) ,
288
+ ColumnDescriptor :: Component ( ComponentColumnDescriptor :: new:: <Position3D >(
289
+ entity_path. clone( ) ,
290
+ ) ) ,
291
+ ColumnDescriptor :: Component ( ComponentColumnDescriptor :: new:: <Radius >(
292
+ entity_path. clone( ) ,
293
+ ) ) ,
294
+ ColumnDescriptor :: Component ( ComponentColumnDescriptor :: new:: <Color >( entity_path) ) ,
295
+ ] ;
296
+
297
+ let handle = engine. latest_at ( & query, Some ( columns. clone ( ) ) ) ;
298
+ let batch = handle. get ( ) ;
299
+
300
+ // The output should be an empty recordbatch with the right schema and empty arrays.
301
+ assert_eq ! ( 0 , batch. num_rows( ) ) ;
302
+ assert ! ( itertools:: izip!( columns. iter( ) , batch. schema. fields. iter( ) )
303
+ . all( |( descr, field) | descr. to_arrow_field( None ) == * field) ) ;
304
+ assert ! ( itertools:: izip!( columns. iter( ) , batch. data. iter( ) )
305
+ . all( |( descr, array) | descr. datatype( ) == array. data_type( ) ) ) ;
306
+ }
307
+
308
+ #[ test]
309
+ fn static_does_yield ( ) {
310
+ let mut store = ChunkStore :: new (
311
+ StoreId :: random ( StoreKind :: Recording ) ,
312
+ ChunkStoreConfig :: default ( ) ,
313
+ ) ;
314
+
315
+ let entity_path: EntityPath = "/points" . into ( ) ;
316
+ let chunk = Arc :: new (
317
+ Chunk :: builder ( entity_path. clone ( ) )
318
+ . with_component_batches (
319
+ RowId :: new ( ) ,
320
+ TimePoint :: default ( ) ,
321
+ [ & [ MyPoint :: new ( 1.0 , 1.0 ) , MyPoint :: new ( 2.0 , 2.0 ) ] as _ ] ,
322
+ )
323
+ . build ( )
324
+ . unwrap ( ) ,
325
+ ) ;
326
+ _ = store. insert_chunk ( & chunk) ;
327
+
328
+ eprintln ! ( "{store}" ) ;
329
+
330
+ let cache = Caches :: new ( & store) ;
331
+ let engine = QueryEngine {
332
+ store : & store,
333
+ cache : & cache,
334
+ } ;
335
+
336
+ let query = LatestAtQueryExpression {
337
+ entity_path_expr : "/**" . into ( ) ,
338
+ timeline : Timeline :: log_time ( ) ,
339
+ at : TimeInt :: MAX ,
340
+ } ;
341
+
342
+ let columns = vec ! [
343
+ ColumnDescriptor :: Time ( TimeColumnDescriptor {
344
+ timeline: Timeline :: log_time( ) ,
345
+ datatype: Timeline :: log_time( ) . datatype( ) ,
346
+ } ) ,
347
+ ColumnDescriptor :: Time ( TimeColumnDescriptor {
348
+ timeline: Timeline :: log_tick( ) ,
349
+ datatype: Timeline :: log_tick( ) . datatype( ) ,
350
+ } ) ,
351
+ ColumnDescriptor :: Component ( ComponentColumnDescriptor :: new:: <MyPoint >(
352
+ entity_path. clone( ) ,
353
+ ) ) ,
354
+ ColumnDescriptor :: Component ( ComponentColumnDescriptor :: new:: <Radius >(
355
+ entity_path. clone( ) ,
356
+ ) ) ,
357
+ ColumnDescriptor :: Component ( ComponentColumnDescriptor :: new:: <Color >( entity_path) ) ,
358
+ ] ;
359
+
360
+ let handle = engine. latest_at ( & query, Some ( columns. clone ( ) ) ) ;
361
+ let batch = handle. get ( ) ;
362
+
363
+ assert_eq ! ( 1 , batch. num_rows( ) ) ;
364
+ assert_eq ! (
365
+ chunk. components( ) . get( & MyPoint :: name( ) ) . unwrap( ) . to_boxed( ) ,
366
+ itertools:: izip!( batch. schema. fields. iter( ) , batch. data. iter( ) )
367
+ . find_map(
368
+ |( field, array) | ( field. name == MyPoint :: name( ) . short_name( ) )
369
+ . then_some( array. clone( ) )
370
+ )
371
+ . unwrap( )
372
+ ) ;
373
+ assert ! ( itertools:: izip!( columns. iter( ) , batch. schema. fields. iter( ) )
374
+ . all( |( descr, field) | descr. to_arrow_field( None ) == * field) ) ;
375
+ }
376
+ }
0 commit comments