@@ -3358,6 +3358,14 @@ def insert_rows(
3358
3358
See
3359
3359
https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll
3360
3360
3361
+ BigQuery will reject insertAll payloads that exceed a defined limit (10MB).
3362
+ Additionally, if a payload vastly exceeds this limit, the request is rejected
3363
+ by the intermediate architecture, which returns a 413 (Payload Too Large) status code.
3364
+
3365
+
3366
+ See
3367
+ https://cloud.google.com/bigquery/quotas#streaming_inserts
3368
+
3361
3369
Args:
3362
3370
table (Union[ \
3363
3371
google.cloud.bigquery.table.Table, \
@@ -3424,6 +3432,13 @@ def insert_rows_from_dataframe(
3424
3432
) -> Sequence [Sequence [dict ]]:
3425
3433
"""Insert rows into a table from a dataframe via the streaming API.
3426
3434
3435
+ BigQuery will reject insertAll payloads that exceed a defined limit (10MB).
3436
+ Additionally, if a payload vastly exceeds this limit, the request is rejected
3437
+ by the intermediate architecture, which returns a 413 (Payload Too Large) status code.
3438
+
3439
+ See
3440
+ https://cloud.google.com/bigquery/quotas#streaming_inserts
3441
+
3427
3442
Args:
3428
3443
table (Union[ \
3429
3444
google.cloud.bigquery.table.Table, \
@@ -3485,6 +3500,13 @@ def insert_rows_json(
3485
3500
See
3486
3501
https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll
3487
3502
3503
+ BigQuery will reject insertAll payloads that exceed a defined limit (10MB).
3504
+ Additionally, if a payload vastly exceeds this limit, the request is rejected
3505
+ by the intermediate architecture, which returns a 413 (Payload Too Large) status code.
3506
+
3507
+ See
3508
+ https://cloud.google.com/bigquery/quotas#streaming_inserts
3509
+
3488
3510
Args:
3489
3511
table (Union[ \
3490
3512
google.cloud.bigquery.table.Table \
0 commit comments