@@ -395,6 +395,7 @@ def import_files(
395395 transformation_config : Optional [TransformationConfig ] = None ,
396396 timeout : int = 600 ,
397397 max_embedding_requests_per_min : int = 1000 ,
398+ import_result_sink : Optional [str ] = None ,
398399 partial_failures_sink : Optional [str ] = None ,
399400 parser : Optional [LayoutParserConfig ] = None ,
400401) -> ImportRagFilesResponse :
@@ -509,8 +510,17 @@ def import_files(
509510 here. If unspecified, a default value of 1,000
510511 QPM would be used.
511512 timeout: Default is 600 seconds.
512- partial_failures_sink: Either a GCS path to store partial failures or a
513- BigQuery table to store partial failures. The format is
513+ import_result_sink: Either a GCS path to store import results or a
514+ BigQuery table to store import results. The format is
515+ "gs://my-bucket/my/object.ndjson" for GCS or
516+ "bq://my-project.my-dataset.my-table" for BigQuery. An existing GCS
517+ object cannot be used. However, the BigQuery table may or may not
518+ exist - if it does not exist, it will be created. If it does exist,
519+ the schema will be checked and the import results will be appended
520+ to the table.
521+ partial_failures_sink: Deprecated. Prefer to use `import_result_sink`.
522+ Either a GCS path to store partial failures or a BigQuery table to
523+ store partial failures. The format is
514524 "gs://my-bucket/my/object.ndjson" for GCS or
515525 "bq://my-project.my-dataset.my-table" for BigQuery. An existing GCS
516526 object cannot be used. However, the BigQuery table may or may not
@@ -534,6 +544,7 @@ def import_files(
534544 source = source ,
535545 transformation_config = transformation_config ,
536546 max_embedding_requests_per_min = max_embedding_requests_per_min ,
547+ import_result_sink = import_result_sink ,
537548 partial_failures_sink = partial_failures_sink ,
538549 parser = parser ,
539550 )
@@ -552,6 +563,7 @@ async def import_files_async(
552563 source : Optional [Union [SlackChannelsSource , JiraSource , SharePointSources ]] = None ,
553564 transformation_config : Optional [TransformationConfig ] = None ,
554565 max_embedding_requests_per_min : int = 1000 ,
566+ import_result_sink : Optional [str ] = None ,
555567 partial_failures_sink : Optional [str ] = None ,
556568 parser : Optional [LayoutParserConfig ] = None ,
557569) -> operation_async .AsyncOperation :
@@ -666,8 +678,17 @@ async def import_files_async(
666678 page on the project to set an appropriate value
667679 here. If unspecified, a default value of 1,000
668680 QPM would be used.
669- partial_failures_sink: Either a GCS path to store partial failures or a
670- BigQuery table to store partial failures. The format is
681+ import_result_sink: Either a GCS path to store import results or a
682+ BigQuery table to store import results. The format is
683+ "gs://my-bucket/my/object.ndjson" for GCS or
684+ "bq://my-project.my-dataset.my-table" for BigQuery. An existing GCS
685+ object cannot be used. However, the BigQuery table may or may not
686+ exist - if it does not exist, it will be created. If it does exist,
687+ the schema will be checked and the import results will be appended
688+ to the table.
689+ partial_failures_sink: Deprecated. Prefer to use `import_result_sink`.
690+ Either a GCS path to store partial failures or a BigQuery table to
691+ store partial failures. The format is
671692 "gs://my-bucket/my/object.ndjson" for GCS or
672693 "bq://my-project.my-dataset.my-table" for BigQuery. An existing GCS
673694 object cannot be used. However, the BigQuery table may or may not
@@ -691,6 +712,7 @@ async def import_files_async(
691712 source = source ,
692713 transformation_config = transformation_config ,
693714 max_embedding_requests_per_min = max_embedding_requests_per_min ,
715+ import_result_sink = import_result_sink ,
694716 partial_failures_sink = partial_failures_sink ,
695717 parser = parser ,
696718 )
0 commit comments