|
28 | 28 | from tests.integ.sagemaker.serve.constants import ( |
29 | 29 | PYTORCH_SQUEEZENET_RESOURCE_DIR, |
30 | 30 | SERVE_SAGEMAKER_ENDPOINT_TIMEOUT, |
31 | | - NOT_RUNNING_ON_PY310, |
| 31 | + PYTHON_VERSION_IS_310, |
32 | 32 | ) |
33 | 33 | from tests.integ.timeout import timeout |
34 | 34 | from tests.integ.utils import cleanup_model_resources |
@@ -148,44 +148,45 @@ def model_builder(request): |
148 | 148 | return request.getfixturevalue(request.param) |
149 | 149 |
|
150 | 150 |
|
151 | | -# @pytest.mark.skipif( |
152 | | -# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or NOT_RUNNING_ON_PY310, |
153 | | -# reason="The goal of these test are to test the serving components of our feature", |
154 | | -# ) |
155 | | -# @pytest.mark.parametrize( |
156 | | -# "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True |
157 | | -# ) |
158 | | -# def test_happy_pytorch_local_container(sagemaker_session, model_builder, test_image): |
159 | | -# logger.info("Running in LOCAL_CONTAINER mode...") |
160 | | -# caught_ex = None |
| 151 | +@pytest.mark.skipif( |
| 152 | + PYTHON_VERSION_IS_310, |
| 153 | + reason="The goal of these test are to test the serving components of our feature", |
| 154 | +) |
| 155 | +@pytest.mark.parametrize( |
| 156 | + "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True |
| 157 | +) |
| 158 | +def test_happy_pytorch_local_container(sagemaker_session, model_builder, test_image): |
| 159 | + logger.info("Running in LOCAL_CONTAINER mode...") |
| 160 | + caught_ex = None |
161 | 161 |
|
162 | | -# model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session) |
| 162 | + model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session) |
163 | 163 |
|
164 | | -# with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT): |
165 | | -# try: |
166 | | -# logger.info("Deploying and predicting in LOCAL_CONTAINER mode...") |
167 | | -# predictor = model.deploy() |
168 | | -# logger.info("Local container successfully deployed.") |
169 | | -# predictor.predict(test_image) |
170 | | -# except Exception as e: |
171 | | -# logger.exception("test failed") |
172 | | -# caught_ex = e |
173 | | -# finally: |
174 | | -# if model.modes[str(Mode.LOCAL_CONTAINER)].container: |
175 | | -# model.modes[str(Mode.LOCAL_CONTAINER)].container.kill() |
176 | | -# if caught_ex: |
177 | | -# assert ( |
178 | | -# False |
179 | | -# ), f"{caught_ex} was thrown when running pytorch squeezenet local container test" |
| 164 | + with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT): |
| 165 | + try: |
| 166 | + logger.info("Deploying and predicting in LOCAL_CONTAINER mode...") |
| 167 | + predictor = model.deploy() |
| 168 | + logger.info("Local container successfully deployed.") |
| 169 | + predictor.predict(test_image) |
| 170 | + except Exception as e: |
| 171 | + logger.exception("test failed") |
| 172 | + caught_ex = e |
| 173 | + finally: |
| 174 | + if model.modes[str(Mode.LOCAL_CONTAINER)].container: |
| 175 | + model.modes[str(Mode.LOCAL_CONTAINER)].container.kill() |
| 176 | + if caught_ex: |
| 177 | + assert ( |
| 178 | + False |
| 179 | + ), f"{caught_ex} was thrown when running pytorch squeezenet local container test" |
180 | 180 |
|
181 | 181 |
|
182 | 182 | @pytest.mark.skipif( |
183 | | - NOT_RUNNING_ON_PY310, # or NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE, |
| 183 | + PYTHON_VERSION_IS_310, # or NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE, |
184 | 184 | reason="The goal of these test are to test the serving components of our feature", |
185 | 185 | ) |
186 | 186 | @pytest.mark.parametrize( |
187 | 187 | "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True |
188 | 188 | ) |
| 189 | +@pytest.mark.slow_test |
189 | 190 | def test_happy_pytorch_sagemaker_endpoint( |
190 | 191 | sagemaker_session, model_builder, cpu_instance_type, test_image |
191 | 192 | ): |
@@ -221,7 +222,7 @@ def test_happy_pytorch_sagemaker_endpoint( |
221 | 222 |
|
222 | 223 |
|
223 | 224 | # @pytest.mark.skipif( |
224 | | -# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or NOT_RUNNING_ON_PY310, |
| 225 | +# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or PYTHON_VERSION_IS_310, |
225 | 226 | # reason="The goal of these test are to test the serving components of our feature", |
226 | 227 | # ) |
227 | 228 | # @pytest.mark.parametrize( |
@@ -267,7 +268,7 @@ def test_happy_pytorch_sagemaker_endpoint( |
267 | 268 |
|
268 | 269 |
|
269 | 270 | # @pytest.mark.skipif( |
270 | | -# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or NOT_RUNNING_ON_PY310, |
| 271 | +# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or PYTHON_VERSION_IS_310, |
271 | 272 | # reason="The goal of these test are to test the serving components of our feature", |
272 | 273 | # ) |
273 | 274 | # @pytest.mark.parametrize( |
|
0 commit comments