Skip to content
12 changes: 11 additions & 1 deletion google/cloud/aiplatform/docker_utils/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,6 +418,7 @@ def build_image(
pip_command: str = "pip",
python_command: str = "python",
no_cache: bool = True,
platform: Optional[str] = None,
**kwargs,
) -> Image:
"""Builds a Docker image.
Expand Down Expand Up @@ -459,6 +460,10 @@ def build_image(
reduces the image building time. See
https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#leverage-build-cache
for more details.
platform (str):
Optional. The target platform for the Docker image build. See
https://docs.docker.com/build/building/multi-platform/#building-multi-platform-images
for more details.
**kwargs:
Other arguments to pass to underlying method that generates the Dockerfile.

Expand All @@ -472,9 +477,14 @@ def build_image(

tag_options = ["-t", output_image_name]
cache_args = ["--no-cache"] if no_cache else []
platform_args = ["--platform", platform] if platform is not None else []

command = (
["docker", "build"] + cache_args + tag_options + ["--rm", "-f-", host_workdir]
["docker", "build"]
+ cache_args
+ platform_args
+ tag_options
+ ["--rm", "-f-", host_workdir]
)

requirements_relative_path = _get_relative_path_to_workdir(
Expand Down
7 changes: 7 additions & 0 deletions google/cloud/aiplatform/prediction/local_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,7 @@ def build_cpr_model(
requirements_path: Optional[str] = None,
extra_packages: Optional[List[str]] = None,
no_cache: bool = False,
platform: Optional[str] = None,
) -> "LocalModel":
"""Builds a local model from a custom predictor.

Expand Down Expand Up @@ -274,6 +275,7 @@ def build_cpr_model(
predictor=$CUSTOM_PREDICTOR_CLASS,
requirements_path="./user_src_dir/requirements.txt",
extra_packages=["./user_src_dir/user_code/custom_package.tar.gz"],
platform="linux/amd64", # i.e., if you're building on a non-x86 machine
)

In the built image, user provided files will be copied as follows:
Expand Down Expand Up @@ -340,6 +342,10 @@ def build_cpr_model(
reduces the image building time. See
https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#leverage-build-cache
for more details.
platform (str):
Optional. The target platform for the Docker image build. See
https://docs.docker.com/build/building/multi-platform/#building-multi-platform-images
for more details.

Returns:
local model: Instantiated representation of the local model.
Expand Down Expand Up @@ -391,6 +397,7 @@ def build_cpr_model(
pip_command="pip3" if is_prebuilt_prediction_image else "pip",
python_command="python3" if is_prebuilt_prediction_image else "python",
no_cache=no_cache,
platform=platform,
)

container_spec = gca_model_compat.ModelContainerSpec(
Expand Down
4 changes: 3 additions & 1 deletion tests/system/aiplatform/test_prediction_cpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,8 @@ class TestPredictionCpr(e2e_base.TestEndToEnd):

_temp_prefix = "temp-vertex-sdk-e2e-prediction-cpr"

def test_build_cpr_model_upload_and_deploy(self, shared_state, caplog):
@pytest.mark.parametrize("platform", [None, "linux/amd64"])
def test_build_cpr_model_upload_and_deploy(self, shared_state, caplog, platform):
"""Creates a CPR model from custom predictor, uploads it and deploys."""
Comment on lines -52 to 54
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Parametrizing the platform arg in tests with

  1. The default value (None)
  2. A valid value (for now the only expected/supported value?)

caplog.set_level(logging.INFO)
Expand All @@ -61,6 +62,7 @@ def test_build_cpr_model_upload_and_deploy(self, shared_state, caplog):
_IMAGE_URI,
predictor=SklearnPredictor,
requirements_path=os.path.join(_USER_CODE_DIR, _REQUIREMENTS_FILE),
platform=platform,
)
Comment on lines 62 to 66
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Passing it into the build options for the local model.


with local_model.deploy_to_local_endpoint(
Expand Down
70 changes: 70 additions & 0 deletions tests/unit/aiplatform/test_prediction.py
Original file line number Diff line number Diff line change
Expand Up @@ -1304,6 +1304,7 @@ class {predictor_class}:
pip_command="pip",
python_command="python",
no_cache=False,
platform=None,
)

def test_build_cpr_model_fails_handler_is_none(
Expand Down Expand Up @@ -1418,6 +1419,7 @@ class {handler_class}:
pip_command="pip",
python_command="python",
no_cache=False,
platform=None,
)

def test_build_cpr_model_with_custom_handler_and_predictor_is_none(
Expand Down Expand Up @@ -1472,6 +1474,7 @@ class {handler_class}:
pip_command="pip",
python_command="python",
no_cache=False,
platform=None,
)

def test_build_cpr_model_creates_and_get_localmodel_base_is_prebuilt(
Expand Down Expand Up @@ -1527,6 +1530,7 @@ class {predictor_class}:
pip_command="pip3",
python_command="python3",
no_cache=False,
platform=None,
)

def test_build_cpr_model_creates_and_get_localmodel_with_requirements_path(
Expand Down Expand Up @@ -1584,6 +1588,7 @@ class {predictor_class}:
pip_command="pip",
python_command="python",
no_cache=False,
platform=None,
)

def test_build_cpr_model_creates_and_get_localmodel_with_extra_packages(
Expand Down Expand Up @@ -1641,6 +1646,7 @@ class {predictor_class}:
pip_command="pip",
python_command="python",
no_cache=False,
platform=None,
)

def test_build_cpr_model_creates_and_get_localmodel_no_cache(
Expand Down Expand Up @@ -1695,6 +1701,70 @@ class {predictor_class}:
pip_command="pip",
python_command="python",
no_cache=no_cache,
platform=None,
)

@pytest.mark.parametrize(
"platform",
[
None,
"linux/amd64",
"some_arbitrary_platform_value_that_will_by_validated_by_docker_build_command",
],
)
def test_build_cpr_model_creates_and_get_localmodel_platform(
self,
tmp_path,
inspect_source_from_class_mock_predictor_only,
is_prebuilt_prediction_container_uri_is_false_mock,
build_image_mock,
platform,
):
src_dir = tmp_path / _TEST_SRC_DIR
src_dir.mkdir()
predictor = src_dir / _TEST_PREDICTOR_FILE
predictor.write_text(
textwrap.dedent(
"""
class {predictor_class}:
pass
"""
).format(predictor_class=_TEST_PREDICTOR_CLASS)
)
my_predictor = self._load_module(_TEST_PREDICTOR_CLASS, str(predictor))

local_model = LocalModel.build_cpr_model(
str(src_dir), _TEST_OUTPUT_IMAGE, predictor=my_predictor, platform=platform
)

assert local_model.serving_container_spec.image_uri == _TEST_OUTPUT_IMAGE
assert local_model.serving_container_spec.predict_route == DEFAULT_PREDICT_ROUTE
assert local_model.serving_container_spec.health_route == DEFAULT_HEALTH_ROUTE
inspect_source_from_class_mock_predictor_only.assert_called_once_with(
my_predictor, str(src_dir)
)
is_prebuilt_prediction_container_uri_is_false_mock.assert_called_once_with(
_DEFAULT_BASE_IMAGE
)
build_image_mock.assert_called_once_with(
_DEFAULT_BASE_IMAGE,
str(src_dir),
_TEST_OUTPUT_IMAGE,
python_module=_DEFAULT_PYTHON_MODULE,
requirements_path=None,
extra_requirements=_DEFAULT_SDK_REQUIREMENTS,
extra_packages=None,
exposed_ports=[DEFAULT_HTTP_PORT],
environment_variables={
"HANDLER_MODULE": _DEFAULT_HANDLER_MODULE,
"HANDLER_CLASS": _DEFAULT_HANDLER_CLASS,
"PREDICTOR_MODULE": f"{_TEST_SRC_DIR}.{_TEST_PREDICTOR_FILE_STEM}",
"PREDICTOR_CLASS": _TEST_PREDICTOR_CLASS,
},
pip_command="pip",
python_command="python",
no_cache=False,
platform=platform,
)

def test_deploy_to_local_endpoint(
Expand Down