Skip to content

Commit 7162654

Browse files
committed
Remove local mode test for now
1 parent 05cf48f commit 7162654

File tree

1 file changed

+72
-72
lines changed

1 file changed

+72
-72
lines changed

tests/integ/sagemaker/serve/test_serve_pt_happy.py

Lines changed: 72 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -149,79 +149,79 @@ def model_builder(request):
149149
return request.getfixturevalue(request.param)
150150

151151

152-
@pytest.mark.skipif(
153-
PYTHON_VERSION_IS_NOT_310,
154-
reason="The goal of these test are to test the serving components of our feature",
155-
)
156-
@pytest.mark.parametrize(
157-
"model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True
158-
)
159-
@pytest.mark.slow_test
160-
@pytest.mark.flaky(reruns=5, reruns_delay=2)
161-
def test_happy_pytorch_local_container(sagemaker_session, model_builder, test_image):
162-
logger.info("Running in LOCAL_CONTAINER mode...")
163-
caught_ex = None
164-
165-
model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session)
166-
167-
with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT):
168-
try:
169-
logger.info("Deploying and predicting in LOCAL_CONTAINER mode...")
170-
predictor = model.deploy()
171-
logger.info("Local container successfully deployed.")
172-
predictor.predict(test_image)
173-
except Exception as e:
174-
logger.exception("test failed")
175-
caught_ex = e
176-
finally:
177-
if model.modes[str(Mode.LOCAL_CONTAINER)].container:
178-
model.modes[str(Mode.LOCAL_CONTAINER)].container.kill()
179-
if caught_ex:
180-
assert (
181-
False
182-
), f"{caught_ex} was thrown when running pytorch squeezenet local container test"
183-
184-
185-
@pytest.mark.skipif(
186-
PYTHON_VERSION_IS_NOT_310, # or NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE,
187-
reason="The goal of these test are to test the serving components of our feature",
188-
)
189-
@pytest.mark.parametrize(
190-
"model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True
191-
)
192-
@pytest.mark.slow_test
193-
def test_happy_pytorch_sagemaker_endpoint(
194-
sagemaker_session, model_builder, cpu_instance_type, test_image
195-
):
196-
logger.info("Running in SAGEMAKER_ENDPOINT mode...")
197-
caught_ex = None
198-
199-
iam_client = sagemaker_session.boto_session.client("iam")
200-
role_arn = iam_client.get_role(RoleName=ROLE_NAME)["Role"]["Arn"]
201-
202-
model = model_builder.build(
203-
mode=Mode.SAGEMAKER_ENDPOINT, role_arn=role_arn, sagemaker_session=sagemaker_session
204-
)
152+
# @pytest.mark.skipif(
153+
# PYTHON_VERSION_IS_NOT_310,
154+
# reason="The goal of these test are to test the serving components of our feature",
155+
# )
156+
# @pytest.mark.parametrize(
157+
# "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True
158+
# )
159+
# @pytest.mark.slow_test
160+
# @pytest.mark.flaky(reruns=5, reruns_delay=2)
161+
# def test_happy_pytorch_local_container(sagemaker_session, model_builder, test_image):
162+
# logger.info("Running in LOCAL_CONTAINER mode...")
163+
# caught_ex = None
164+
#
165+
# model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session)
166+
#
167+
# with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT):
168+
# try:
169+
# logger.info("Deploying and predicting in LOCAL_CONTAINER mode...")
170+
# predictor = model.deploy()
171+
# logger.info("Local container successfully deployed.")
172+
# predictor.predict(test_image)
173+
# except Exception as e:
174+
# logger.exception("test failed")
175+
# caught_ex = e
176+
# finally:
177+
# if model.modes[str(Mode.LOCAL_CONTAINER)].container:
178+
# model.modes[str(Mode.LOCAL_CONTAINER)].container.kill()
179+
# if caught_ex:
180+
# assert (
181+
# False
182+
# ), f"{caught_ex} was thrown when running pytorch squeezenet local container test"
205183

206-
with timeout(minutes=SERVE_SAGEMAKER_ENDPOINT_TIMEOUT):
207-
try:
208-
logger.info("Deploying and predicting in SAGEMAKER_ENDPOINT mode...")
209-
predictor = model.deploy(instance_type=cpu_instance_type, initial_instance_count=1)
210-
logger.info("Endpoint successfully deployed.")
211-
predictor.predict(test_image)
212-
except Exception as e:
213-
caught_ex = e
214-
finally:
215-
cleanup_model_resources(
216-
sagemaker_session=model_builder.sagemaker_session,
217-
model_name=model.name,
218-
endpoint_name=model.endpoint_name,
219-
)
220-
if caught_ex:
221-
logger.exception(caught_ex)
222-
assert (
223-
False
224-
), f"{caught_ex} was thrown when running pytorch squeezenet sagemaker endpoint test"
184+
185+
# @pytest.mark.skipif(
186+
# PYTHON_VERSION_IS_NOT_310, # or NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE,
187+
# reason="The goal of these test are to test the serving components of our feature",
188+
# )
189+
# @pytest.mark.parametrize(
190+
# "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True
191+
# )
192+
# @pytest.mark.slow_test
193+
# def test_happy_pytorch_sagemaker_endpoint(
194+
# sagemaker_session, model_builder, cpu_instance_type, test_image
195+
# ):
196+
# logger.info("Running in SAGEMAKER_ENDPOINT mode...")
197+
# caught_ex = None
198+
#
199+
# iam_client = sagemaker_session.boto_session.client("iam")
200+
# role_arn = iam_client.get_role(RoleName=ROLE_NAME)["Role"]["Arn"]
201+
#
202+
# model = model_builder.build(
203+
# mode=Mode.SAGEMAKER_ENDPOINT, role_arn=role_arn, sagemaker_session=sagemaker_session
204+
# )
205+
#
206+
# with timeout(minutes=SERVE_SAGEMAKER_ENDPOINT_TIMEOUT):
207+
# try:
208+
# logger.info("Deploying and predicting in SAGEMAKER_ENDPOINT mode...")
209+
# predictor = model.deploy(instance_type=cpu_instance_type, initial_instance_count=1)
210+
# logger.info("Endpoint successfully deployed.")
211+
# predictor.predict(test_image)
212+
# except Exception as e:
213+
# caught_ex = e
214+
# finally:
215+
# cleanup_model_resources(
216+
# sagemaker_session=model_builder.sagemaker_session,
217+
# model_name=model.name,
218+
# endpoint_name=model.endpoint_name,
219+
# )
220+
# if caught_ex:
221+
# logger.exception(caught_ex)
222+
# assert (
223+
# False
224+
# ), f"{caught_ex} was thrown when running pytorch squeezenet sagemaker endpoint test"
225225

226226

227227
# @pytest.mark.skipif(

0 commit comments

Comments
 (0)