Skip to content

Commit ef80003

Browse files
yeesiancopybara-github
authored andcommitted
chore: Do not specify PackageSpec.dependency_files_gcs_uri if extra_packages is empty
PiperOrigin-RevId: 673523227
1 parent 0bc608a commit ef80003

File tree

2 files changed

+15
-62
lines changed

2 files changed

+15
-62
lines changed

tests/unit/vertex_langchain/test_reasoning_engines.py

+7-22
Original file line numberDiff line numberDiff line change
@@ -352,25 +352,7 @@ def setup_method(self):
352352
def teardown_method(self):
353353
initializer.global_pool.shutdown(wait=True)
354354

355-
def test_prepare_create(
356-
self,
357-
cloud_storage_create_bucket_mock,
358-
tarfile_open_mock,
359-
cloudpickle_dump_mock,
360-
):
361-
_reasoning_engines._prepare_create(
362-
reasoning_engine=self.test_app,
363-
requirements=_TEST_REASONING_ENGINE_REQUIREMENTS,
364-
extra_packages=[],
365-
project=_TEST_PROJECT,
366-
location=_TEST_LOCATION,
367-
staging_bucket=_TEST_STAGING_BUCKET,
368-
gcs_dir_name=_TEST_GCS_DIR_NAME,
369-
)
370-
cloudpickle_dump_mock.assert_called() # when preparing object.pkl
371-
tarfile_open_mock.assert_called() # when preparing extra_packages
372-
373-
def test_prepare_update_with_unspecified_extra_packages(
355+
def test_prepare_with_unspecified_extra_packages(
374356
self,
375357
cloud_storage_create_bucket_mock,
376358
cloudpickle_dump_mock,
@@ -379,7 +361,7 @@ def test_prepare_update_with_unspecified_extra_packages(
379361
_reasoning_engines,
380362
"_upload_extra_packages",
381363
) as upload_extra_packages_mock:
382-
_reasoning_engines._prepare_update(
364+
_reasoning_engines._prepare(
383365
reasoning_engine=self.test_app,
384366
requirements=_TEST_REASONING_ENGINE_REQUIREMENTS,
385367
extra_packages=None,
@@ -390,7 +372,7 @@ def test_prepare_update_with_unspecified_extra_packages(
390372
)
391373
upload_extra_packages_mock.assert_not_called()
392374

393-
def test_prepare_update_with_empty_extra_packages(
375+
def test_prepare_with_empty_extra_packages(
394376
self,
395377
cloud_storage_create_bucket_mock,
396378
cloudpickle_dump_mock,
@@ -399,7 +381,7 @@ def test_prepare_update_with_empty_extra_packages(
399381
_reasoning_engines,
400382
"_upload_extra_packages",
401383
) as upload_extra_packages_mock:
402-
_reasoning_engines._prepare_update(
384+
_reasoning_engines._prepare(
403385
reasoning_engine=self.test_app,
404386
requirements=_TEST_REASONING_ENGINE_REQUIREMENTS,
405387
extra_packages=[],
@@ -429,6 +411,7 @@ def test_create_reasoning_engine(
429411
self.test_app,
430412
display_name=_TEST_REASONING_ENGINE_DISPLAY_NAME,
431413
requirements=_TEST_REASONING_ENGINE_REQUIREMENTS,
414+
extra_packages=[_TEST_REASONING_ENGINE_EXTRA_PACKAGE_PATH],
432415
)
433416
# Manually set _gca_resource here to prevent the mocks from propagating.
434417
test_reasoning_engine._gca_resource = _TEST_REASONING_ENGINE_OBJ
@@ -494,6 +477,7 @@ def test_create_reasoning_engine_requirements_from_file(
494477
self.test_app,
495478
display_name=_TEST_REASONING_ENGINE_DISPLAY_NAME,
496479
requirements="requirements.txt",
480+
extra_packages=[_TEST_REASONING_ENGINE_EXTRA_PACKAGE_PATH],
497481
)
498482
mock_file.assert_called_with("requirements.txt")
499483
# Manually set _gca_resource here to prevent the mocks from propagating.
@@ -668,6 +652,7 @@ def test_delete_after_create_reasoning_engine(
668652
self.test_app,
669653
display_name=_TEST_REASONING_ENGINE_DISPLAY_NAME,
670654
requirements=_TEST_REASONING_ENGINE_REQUIREMENTS,
655+
extra_packages=[_TEST_REASONING_ENGINE_EXTRA_PACKAGE_PATH],
671656
)
672657
# Manually set _gca_resource here to prevent the mocks from propagating.
673658
test_reasoning_engine._gca_resource = _TEST_REASONING_ENGINE_OBJ

vertexai/reasoning_engines/_reasoning_engines.py

+8-40
Original file line numberDiff line numberDiff line change
@@ -209,7 +209,7 @@ def create(
209209
# This involves packaging and uploading the artifacts for
210210
# reasoning_engine, requirements and extra_packages to
211211
# `staging_bucket/gcs_dir_name`.
212-
_prepare_create(
212+
_prepare(
213213
reasoning_engine=reasoning_engine,
214214
requirements=requirements,
215215
project=sdk_resource.project,
@@ -226,12 +226,13 @@ def create(
226226
gcs_dir_name,
227227
_BLOB_FILENAME,
228228
),
229-
dependency_files_gcs_uri="{}/{}/{}".format(
229+
)
230+
if extra_packages:
231+
package_spec.dependency_files_gcs_uri = "{}/{}/{}".format(
230232
staging_bucket,
231233
gcs_dir_name,
232234
_EXTRA_PACKAGES_FILE,
233-
),
234-
)
235+
)
235236
if requirements:
236237
package_spec.requirements_gcs_uri = "{}/{}/{}".format(
237238
staging_bucket,
@@ -377,7 +378,7 @@ def update(
377378
# This involves packaging and uploading the artifacts for
378379
# reasoning_engine, requirements and extra_packages to
379380
# `staging_bucket/gcs_dir_name`.
380-
_prepare_update(
381+
_prepare(
381382
reasoning_engine=reasoning_engine,
382383
requirements=requirements,
383384
project=self.project,
@@ -564,40 +565,7 @@ def _upload_extra_packages(
564565
_LOGGER.info(f"Writing to {dir_name}/{_EXTRA_PACKAGES_FILE}")
565566

566567

567-
def _prepare_create(
568-
reasoning_engine: Queryable,
569-
requirements: Sequence[str],
570-
extra_packages: Sequence[str],
571-
project: str,
572-
location: str,
573-
staging_bucket: str,
574-
gcs_dir_name: str,
575-
) -> None:
576-
"""Prepares the reasoning engine for creation in Vertex AI.
577-
578-
This involves packaging and uploading artifacts to Cloud Storage. Note that
579-
1. This does not actually create the Reasoning Engine in Vertex AI.
580-
2. This will always generate and upload a pickled object.
581-
3. This will always generate and upload the dependencies.tar.gz file.
582-
583-
Args:
584-
reasoning_engine: The reasoning engine to be prepared.
585-
requirements (Sequence[str]): The set of PyPI dependencies needed.
586-
extra_packages (Sequence[str]): The set of extra user-provided packages.
587-
project (str): The project for the staging bucket.
588-
location (str): The location for the staging bucket.
589-
staging_bucket (str): The staging bucket name in the form "gs://...".
590-
gcs_dir_name (str): The GCS bucket directory under `staging_bucket` to
591-
use for staging the artifacts needed.
592-
"""
593-
gcs_bucket = _get_gcs_bucket(project, location, staging_bucket)
594-
_upload_reasoning_engine(reasoning_engine, gcs_bucket, gcs_dir_name)
595-
if requirements:
596-
_upload_requirements(requirements, gcs_bucket, gcs_dir_name)
597-
_upload_extra_packages(extra_packages, gcs_bucket, gcs_dir_name)
598-
599-
600-
def _prepare_update(
568+
def _prepare(
601569
reasoning_engine: Optional[Queryable],
602570
requirements: Optional[Sequence[str]],
603571
extra_packages: Optional[Sequence[str]],
@@ -606,7 +574,7 @@ def _prepare_update(
606574
staging_bucket: str,
607575
gcs_dir_name: str,
608576
) -> None:
609-
"""Prepares the reasoning engine for updates in Vertex AI.
577+
"""Prepares the reasoning engine for creation or updates in Vertex AI.
610578
611579
This involves packaging and uploading artifacts to Cloud Storage. Note that
612580
1. This does not actually update the Reasoning Engine in Vertex AI.

0 commit comments

Comments
 (0)