From 7377384d4b3568640b962b3646f1282cf3434b0f Mon Sep 17 00:00:00 2001 From: "Tarun Chevula (CSI INTERFUSION INC)" Date: Wed, 18 Dec 2024 12:51:32 +0530 Subject: [PATCH] Updated tests to use created environment in the job --- .../tests/minimal_inference_test.py | 6 +++--- .../tests/minimal_inference_test.py | 6 +++--- .../context/conda_dependencies.yaml | 4 +--- .../tests/mlflow_test.py | 6 +++--- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/assets/inference/environments/minimal-ubuntu20.04-py39-cpu-inference/tests/minimal_inference_test.py b/assets/inference/environments/minimal-ubuntu20.04-py39-cpu-inference/tests/minimal_inference_test.py index a2d79a1795..b3595a5e51 100644 --- a/assets/inference/environments/minimal-ubuntu20.04-py39-cpu-inference/tests/minimal_inference_test.py +++ b/assets/inference/environments/minimal-ubuntu20.04-py39-cpu-inference/tests/minimal_inference_test.py @@ -32,10 +32,10 @@ def test_minimal_cpu_inference(): env_docker_context = Environment( build=BuildContext(path=this_dir / BUILD_CONTEXT), - name="minimal_cpu_inference", + name=env_name, description="minimal 20.04 py39 cpu inference environment created from a Docker context.", ) - ml_client.environments.create_or_update(env_docker_context) + returned_env = ml_client.environments.create_or_update(env_docker_context) # create the command job = command( @@ -44,7 +44,7 @@ def test_minimal_cpu_inference(): inputs=dict( score="valid_score.py", ), - environment=f"{env_name}@latest", + environment=returned_env, compute=os.environ.get("cpu_cluster"), display_name="minimal-cpu-inference-example", description="A test run of the minimal 20.04 py39 cpu inference curated environment", diff --git a/assets/inference/environments/minimal-ubuntu22.04-py311-cpu-inference/tests/minimal_inference_test.py b/assets/inference/environments/minimal-ubuntu22.04-py311-cpu-inference/tests/minimal_inference_test.py index 91e05aa6ed..7f325ae497 100644 --- a/assets/inference/environments/minimal-ubuntu22.04-py311-cpu-inference/tests/minimal_inference_test.py +++ b/assets/inference/environments/minimal-ubuntu22.04-py311-cpu-inference/tests/minimal_inference_test.py @@ -32,10 +32,10 @@ def test_minimal_cpu_inference(): env_docker_context = Environment( build=BuildContext(path=this_dir / BUILD_CONTEXT), - name="minimal_cpu_inference", + name=env_name, description="minimal 22.04 py311 cpu inference environment created from a Docker context.", ) - ml_client.environments.create_or_update(env_docker_context) + returned_env = ml_client.environments.create_or_update(env_docker_context) # create the command job = command( @@ -44,7 +44,7 @@ def test_minimal_cpu_inference(): inputs=dict( score="valid_score.py", ), - environment=f"{env_name}@latest", + environment=returned_env, compute=os.environ.get("cpu_cluster"), display_name="minimal-cpu-inference-example", description="A test run of the minimal 22.04 py311 cpu inference curated environment", diff --git a/assets/inference/environments/mlflow-ubuntu20.04-py39-cpu-inference/context/conda_dependencies.yaml b/assets/inference/environments/mlflow-ubuntu20.04-py39-cpu-inference/context/conda_dependencies.yaml index bcb2b7f9bc..73eb0e5c2f 100644 --- a/assets/inference/environments/mlflow-ubuntu20.04-py39-cpu-inference/context/conda_dependencies.yaml +++ b/assets/inference/environments/mlflow-ubuntu20.04-py39-cpu-inference/context/conda_dependencies.yaml @@ -8,6 +8,4 @@ dependencies: - pip: - azureml-inference-server-http=={{latest-pypi-version}} - azureml-ai-monitoring=={{latest-pypi-version}} - - numpy - - mlflow - - azureml-contrib-services \ No newline at end of file + - mlflow \ No newline at end of file diff --git a/assets/inference/environments/mlflow-ubuntu20.04-py39-cpu-inference/tests/mlflow_test.py b/assets/inference/environments/mlflow-ubuntu20.04-py39-cpu-inference/tests/mlflow_test.py index cdf606b943..0674e8f946 100644 --- a/assets/inference/environments/mlflow-ubuntu20.04-py39-cpu-inference/tests/mlflow_test.py +++ b/assets/inference/environments/mlflow-ubuntu20.04-py39-cpu-inference/tests/mlflow_test.py @@ -32,10 +32,10 @@ def test_mlflow_cpu_inference(): env_docker_context = Environment( build=BuildContext(path=this_dir / BUILD_CONTEXT), - name="mlflow_py39_inference", + name=env_name, description="mlflow 20.04 py39 cpu inference environment created from a Docker context.", ) - ml_client.environments.create_or_update(env_docker_context) + returned_env = ml_client.environments.create_or_update(env_docker_context) # create the command job = command( @@ -47,7 +47,7 @@ def test_mlflow_cpu_inference(): score_input="sample_2_0_input.txt", model_dir="mlflow_2_0_model_folder" ), - environment=f"{env_name}@latest", + environment=returned_env, compute=os.environ.get("cpu_cluster"), display_name="mlflow-py39-inference-example", description="A test run of the mlflow 20.04 py39 cpu inference curated environment",