File tree 1 file changed +19
-3
lines changed
1 file changed +19
-3
lines changed Original file line number Diff line number Diff line change @@ -381,9 +381,25 @@ def to_pyarrow_batches(
381
381
chunk_size : int = 1_000_000 ,
382
382
** _ : Any ,
383
383
) -> pa .ipc .RecordBatchReader :
384
- # TODO: duckdb seems to not care about the `chunk_size` argument
385
- # and returns batches in 1024 row chunks
386
- _ = self ._import_pyarrow ()
384
+ """Return a stream of record batches.
385
+
386
+ The returned `RecordBatchReader` contains a cursor with an unbounded lifetime.
387
+
388
+ For analytics use cases this is usually nothing to fret about. In some cases you
389
+ may need to explicit release the cursor.
390
+
391
+ Parameters
392
+ ----------
393
+ expr
394
+ Ibis expression
395
+ params
396
+ Bound parameters
397
+ limit
398
+ Limit the result to this number of rows
399
+ chunk_size
400
+ !!! warning "DuckDB returns 1024 size batches regardless of what argument is passed."
401
+ """
402
+ self ._import_pyarrow ()
387
403
388
404
from ibis .backends .duckdb .pyarrow import IbisRecordBatchReader
389
405
You can’t perform that action at this time.
0 commit comments