Skip to content

Commit ec1ffce

Browse files
cpcloudkszucs
authored andcommitted
docs(duckdb): add to_pyarrow_batches documentation
1 parent abc939e commit ec1ffce

File tree

1 file changed

+19
-3
lines changed

1 file changed

+19
-3
lines changed

ibis/backends/duckdb/__init__.py

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -381,9 +381,25 @@ def to_pyarrow_batches(
381381
chunk_size: int = 1_000_000,
382382
**_: Any,
383383
) -> pa.ipc.RecordBatchReader:
384-
# TODO: duckdb seems to not care about the `chunk_size` argument
385-
# and returns batches in 1024 row chunks
386-
_ = self._import_pyarrow()
384+
"""Return a stream of record batches.
385+
386+
The returned `RecordBatchReader` contains a cursor with an unbounded lifetime.
387+
388+
For analytics use cases this is usually nothing to fret about. In some cases you
389+
may need to explicit release the cursor.
390+
391+
Parameters
392+
----------
393+
expr
394+
Ibis expression
395+
params
396+
Bound parameters
397+
limit
398+
Limit the result to this number of rows
399+
chunk_size
400+
!!! warning "DuckDB returns 1024 size batches regardless of what argument is passed."
401+
"""
402+
self._import_pyarrow()
387403

388404
from ibis.backends.duckdb.pyarrow import IbisRecordBatchReader
389405

0 commit comments

Comments
 (0)