@@ -395,6 +395,7 @@ def import_files(
395
395
transformation_config : Optional [TransformationConfig ] = None ,
396
396
timeout : int = 600 ,
397
397
max_embedding_requests_per_min : int = 1000 ,
398
+ import_result_sink : Optional [str ] = None ,
398
399
partial_failures_sink : Optional [str ] = None ,
399
400
parser : Optional [LayoutParserConfig ] = None ,
400
401
) -> ImportRagFilesResponse :
@@ -509,8 +510,17 @@ def import_files(
509
510
here. If unspecified, a default value of 1,000
510
511
QPM would be used.
511
512
timeout: Default is 600 seconds.
512
- partial_failures_sink: Either a GCS path to store partial failures or a
513
- BigQuery table to store partial failures. The format is
513
+ import_result_sink: Either a GCS path to store import results or a
514
+ BigQuery table to store import results. The format is
515
+ "gs://my-bucket/my/object.ndjson" for GCS or
516
+ "bq://my-project.my-dataset.my-table" for BigQuery. An existing GCS
517
+ object cannot be used. However, the BigQuery table may or may not
518
+ exist - if it does not exist, it will be created. If it does exist,
519
+ the schema will be checked and the import results will be appended
520
+ to the table.
521
+ partial_failures_sink: Deprecated. Prefer to use `import_result_sink`.
522
+ Either a GCS path to store partial failures or a BigQuery table to
523
+ store partial failures. The format is
514
524
"gs://my-bucket/my/object.ndjson" for GCS or
515
525
"bq://my-project.my-dataset.my-table" for BigQuery. An existing GCS
516
526
object cannot be used. However, the BigQuery table may or may not
@@ -534,6 +544,7 @@ def import_files(
534
544
source = source ,
535
545
transformation_config = transformation_config ,
536
546
max_embedding_requests_per_min = max_embedding_requests_per_min ,
547
+ import_result_sink = import_result_sink ,
537
548
partial_failures_sink = partial_failures_sink ,
538
549
parser = parser ,
539
550
)
@@ -552,6 +563,7 @@ async def import_files_async(
552
563
source : Optional [Union [SlackChannelsSource , JiraSource , SharePointSources ]] = None ,
553
564
transformation_config : Optional [TransformationConfig ] = None ,
554
565
max_embedding_requests_per_min : int = 1000 ,
566
+ import_result_sink : Optional [str ] = None ,
555
567
partial_failures_sink : Optional [str ] = None ,
556
568
parser : Optional [LayoutParserConfig ] = None ,
557
569
) -> operation_async .AsyncOperation :
@@ -666,8 +678,17 @@ async def import_files_async(
666
678
page on the project to set an appropriate value
667
679
here. If unspecified, a default value of 1,000
668
680
QPM would be used.
669
- partial_failures_sink: Either a GCS path to store partial failures or a
670
- BigQuery table to store partial failures. The format is
681
+ import_result_sink: Either a GCS path to store import results or a
682
+ BigQuery table to store import results. The format is
683
+ "gs://my-bucket/my/object.ndjson" for GCS or
684
+ "bq://my-project.my-dataset.my-table" for BigQuery. An existing GCS
685
+ object cannot be used. However, the BigQuery table may or may not
686
+ exist - if it does not exist, it will be created. If it does exist,
687
+ the schema will be checked and the import results will be appended
688
+ to the table.
689
+ partial_failures_sink: Deprecated. Prefer to use `import_result_sink`.
690
+ Either a GCS path to store partial failures or a BigQuery table to
691
+ store partial failures. The format is
671
692
"gs://my-bucket/my/object.ndjson" for GCS or
672
693
"bq://my-project.my-dataset.my-table" for BigQuery. An existing GCS
673
694
object cannot be used. However, the BigQuery table may or may not
@@ -691,6 +712,7 @@ async def import_files_async(
691
712
source = source ,
692
713
transformation_config = transformation_config ,
693
714
max_embedding_requests_per_min = max_embedding_requests_per_min ,
715
+ import_result_sink = import_result_sink ,
694
716
partial_failures_sink = partial_failures_sink ,
695
717
parser = parser ,
696
718
)
0 commit comments