diff --git a/CHANGELOG.md b/CHANGELOG.md index b579150d2b..21361c97e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,46 @@ # Changelog +## v2.74.0 (2022-01-26) + +### Features + + * Add support for SageMaker lineage queries context + +### Bug Fixes and Other Changes + + * support specifying a facet by its column index + +### Documentation Changes + + * more documentation for serverless inference + +## v2.73.0 (2022-01-19) + +### Features + + * Add EMRStep support in Sagemaker pipeline + * Adds Lineage queries in artifact, context and trial components + * Add support for SageMaker lineage queries in action + * Adds support for Serverless inference + * support checkpoint to be passed from estimator + * support JsonGet/Join parameterization in tuning step Hyperparameters + * Support model pipelines in CreateModelStep + * enable python 3.9 + * Add models_v2 under lineage context + +### Bug Fixes and Other Changes + + * allow kms_key to be passed for processing step + * Remove duplicate vertex/edge in query lineage + * update pricing link + * Update CHANGELOG.md + * fixes unnecessary session call while generating pipeline definition for lambda step + +### Documentation Changes + + * Enhance smddp 1.2.2 doc + * Document the available ExecutionVariables + ## v2.72.3 (2022-01-10) ### Features diff --git a/VERSION b/VERSION index e5a188f81c..0a0dda9767 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.72.4.dev0 +2.74.1.dev0 diff --git a/doc/Makefile b/doc/Makefile index 1cdfaa77df..af378c2e0f 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -3,7 +3,7 @@ # You can set these variables from the command line. SPHINXOPTS = -W -SPHINXBUILD = python -msphinx +SPHINXBUILD = python -msphinx SPHINXPROJ = sagemaker SOURCEDIR = . BUILDDIR = _build diff --git a/doc/api/inference/async_inference.rst b/doc/api/inference/async_inference.rst new file mode 100644 index 0000000000..493cab40eb --- /dev/null +++ b/doc/api/inference/async_inference.rst @@ -0,0 +1,19 @@ +Async Inference +----------------- + +This module contains classes related to Amazon Sagemaker Async Inference + +.. automodule:: sagemaker.async_inference.async_inference_config + :members: + :undoc-members: + :show-inheritance: + +.. automodule:: sagemaker.async_inference.async_inference_response + :members: + :undoc-members: + :show-inheritance: + +.. automodule:: sagemaker.async_inference.waiter_config + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/api/inference/predictor_async.rst b/doc/api/inference/predictor_async.rst new file mode 100644 index 0000000000..e5b83e2324 --- /dev/null +++ b/doc/api/inference/predictor_async.rst @@ -0,0 +1,9 @@ +AsyncPredictor +-------------------- + +Make async predictions against SageMaker endpoints with Python objects + +.. autoclass:: sagemaker.predictor_async.AsyncPredictor + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/api/inference/serverless.rst b/doc/api/inference/serverless.rst new file mode 100644 index 0000000000..d338efd7be --- /dev/null +++ b/doc/api/inference/serverless.rst @@ -0,0 +1,9 @@ +Serverless Inference +--------------------- + +This module contains classes related to Amazon Sagemaker Serverless Inference + +.. automodule:: sagemaker.serverless.serverless_inference_config + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/api/training/sdp_versions/latest/smd_data_parallel_pytorch.rst b/doc/api/training/sdp_versions/latest/smd_data_parallel_pytorch.rst index 52de6223d7..85c9594e73 100644 --- a/doc/api/training/sdp_versions/latest/smd_data_parallel_pytorch.rst +++ b/doc/api/training/sdp_versions/latest/smd_data_parallel_pytorch.rst @@ -2,10 +2,12 @@ PyTorch Guide to SageMaker's distributed data parallel library ############################################################## -.. admonition:: Contents +Use this guide to learn about the SageMaker distributed +data parallel library API for PyTorch. - - :ref:`pytorch-sdp-modify` - - :ref:`pytorch-sdp-api` +.. contents:: Topics + :depth: 3 + :local: .. _pytorch-sdp-modify: @@ -55,7 +57,7 @@ API offered for PyTorch. - Modify the ``torch.utils.data.distributed.DistributedSampler`` to - include the cluster’s information. Set``num_replicas`` to the + include the cluster’s information. Set ``num_replicas`` to the total number of GPUs participating in training across all the nodes in the cluster. This is called ``world_size``. You can get ``world_size`` with @@ -110,7 +112,7 @@ you will have for distributed training with the distributed data parallel librar def main():     # Scale batch size by world size -     batch_size //= dist.get_world_size() // 8 +     batch_size //= dist.get_world_size()     batch_size = max(batch_size, 1)     # Prepare dataset @@ -153,9 +155,132 @@ you will have for distributed training with the distributed data parallel librar PyTorch API =========== -.. rubric:: Supported versions +.. class:: smdistributed.dataparallel.torch.parallel.DistributedDataParallel(module, device_ids=None, output_device=None, broadcast_buffers=True, process_group=None, bucket_cap_mb=None) + + ``smdistributed.dataparallel``'s implementation of distributed data + parallelism for PyTorch. In most cases, wrapping your PyTorch Module + with ``smdistributed.dataparallel``'s ``DistributedDataParallel`` (DDP) is + all you need to do to use ``smdistributed.dataparallel``. + + Creation of this DDP class requires ``smdistributed.dataparallel`` + already initialized + with ``smdistributed.dataparallel.torch.distributed.init_process_group()``. + + This container parallelizes the application of the given module by + splitting the input across the specified devices by chunking in the + batch dimension. The module is replicated on each machine and each + device, and each such replica handles a portion of the input. During the + backwards pass, gradients from each node are averaged. + + The batch size should be larger than the number of GPUs used locally. + ​ + Example usage + of ``smdistributed.dataparallel.torch.parallel.DistributedDataParallel``: + + .. code:: python + + import torch + import smdistributed.dataparallel.torch.distributed as dist + from smdistributed.dataparallel.torch.parallel import DistributedDataParallel as DDP + + dist.init_process_group() + + # Pin GPU to be used to process local rank (one GPU per process) + torch.cuda.set_device(dist.get_local_rank()) + + # Build model and optimizer + model = ... + optimizer = torch.optim.SGD(model.parameters(), +                             lr=1e-3 * dist.get_world_size()) + # Wrap model with smdistributed.dataparallel's DistributedDataParallel + model = DDP(model) + + **Parameters:** + + - ``module (torch.nn.Module)(required):`` PyTorch NN Module to be + parallelized + - ``device_ids (list[int])(optional):`` CUDA devices. This should only + be provided when the input module resides on a single CUDA device. + For single-device modules, + the ``ith module replica is placed on device_ids[i]``. For + multi-device modules and CPU modules, device_ids must be None or an + empty list, and input data for the forward pass must be placed on the + correct device. Defaults to ``None``. + - ``output_device (int)(optional):`` Device location of output for + single-device CUDA modules. For multi-device modules and CPU modules, + it must be None, and the module itself dictates the output location. + (default: device_ids[0] for single-device modules).  Defaults + to ``None``. + - ``broadcast_buffers (bool)(optional):`` Flag that enables syncing + (broadcasting) buffers of the module at beginning of the forward + function. ``smdistributed.dataparallel`` does not support broadcast + buffer yet. Please set this to ``False``. + - ``process_group(smdistributed.dataparallel.torch.distributed.group)(optional):`` Process + group is not supported in ``smdistributed.dataparallel``. This + parameter exists for API parity with torch.distributed only. Only + supported value is + ``smdistributed.dataparallel.torch.distributed.group.WORLD.`` Defaults + to ``None.`` + - ``bucket_cap_mb (int)(optional):`` DistributedDataParallel will + bucket parameters into multiple buckets so that gradient reduction of + each bucket can potentially overlap with backward + computation. ``bucket_cap_mb`` controls the bucket size in + MegaBytes (MB) (default: 25). + + .. note:: + + This module assumes all parameters are registered in the model by the + time it is created. No parameters should be added nor removed later. + + .. note:: + + This module assumes all parameters are registered in the model of + each distributed processes are in the same order. The module itself + will conduct gradient all-reduction following the reverse order of + the registered parameters of the model. In other words, it is users’ + responsibility to ensure that each distributed process has the exact + same model and thus the exact same parameter registration order. + + .. note:: + + You should never change the set of your model’s parameters after + wrapping up your model with DistributedDataParallel. In other words, + when wrapping up your model with DistributedDataParallel, the + constructor of DistributedDataParallel will register the additional + gradient reduction functions on all the parameters of the model + itself at the time of construction. If you change the model’s + parameters after the DistributedDataParallel construction, this is + not supported and unexpected behaviors can happen, since some + parameters’ gradient reduction functions might not get called. + + .. method:: no_sync() + + ``smdistributed.dataparallel`` supports the `PyTorch DDP no_sync() `_ + context manager. It enables gradient accumulation by skipping AllReduce + during training iterations inside the context. + + .. note:: + + The ``no_sync()`` context manager is available from smdistributed-dataparallel v1.2.2. + To find the release note, see :ref:`sdp_1.2.2_release_note`. -**PyTorch 1.7.1, 1.8.1** + **Example:** + + .. code:: python + + # Gradients are accumulated while inside no_sync context + with model.no_sync(): + ... + loss.backward() + + # First iteration upon exiting context + # Incoming gradients are added to the accumulated gradients and then synchronized via AllReduce + ... + loss.backward() + + # Update weights and reset gradients to zero after accumulation is finished + optimizer.step() + optimizer.zero_grad() .. function:: smdistributed.dataparallel.torch.distributed.is_available() @@ -409,99 +534,6 @@ PyTorch API otherwise. -.. class:: smdistributed.dataparallel.torch.parallel.DistributedDataParallel(module, device_ids=None, output_device=None, broadcast_buffers=True, process_group=None, bucket_cap_mb=None) - - ``smdistributed.dataparallel's`` implementation of distributed data - parallelism for PyTorch. In most cases, wrapping your PyTorch Module - with ``smdistributed.dataparallel's`` ``DistributedDataParallel (DDP)`` is - all you need to do to use ``smdistributed.dataparallel``. - - Creation of this DDP class requires ``smdistributed.dataparallel`` - already initialized - with ``smdistributed.dataparallel.torch.distributed.init_process_group()``. - - This container parallelizes the application of the given module by - splitting the input across the specified devices by chunking in the - batch dimension. The module is replicated on each machine and each - device, and each such replica handles a portion of the input. During the - backwards pass, gradients from each node are averaged. - - The batch size should be larger than the number of GPUs used locally. - ​ - Example usage - of ``smdistributed.dataparallel.torch.parallel.DistributedDataParallel``: - - .. code:: python - - import torch - import smdistributed.dataparallel.torch.distributed as dist - from smdistributed.dataparallel.torch.parallel import DistributedDataParallel as DDP - - dist.init_process_group() - - # Pin GPU to be used to process local rank (one GPU per process) - torch.cuda.set_device(dist.get_local_rank()) - - # Build model and optimizer - model = ... - optimizer = torch.optim.SGD(model.parameters(), -                             lr=1e-3 * dist.get_world_size()) - # Wrap model with smdistributed.dataparallel's DistributedDataParallel - model = DDP(model) - - **Parameters:** - - - ``module (torch.nn.Module)(required):`` PyTorch NN Module to be - parallelized - - ``device_ids (list[int])(optional):`` CUDA devices. This should only - be provided when the input module resides on a single CUDA device. - For single-device modules, - the ``ith module replica is placed on device_ids[i]``. For - multi-device modules and CPU modules, device_ids must be None or an - empty list, and input data for the forward pass must be placed on the - correct device. Defaults to ``None``. - - ``output_device (int)(optional):`` Device location of output for - single-device CUDA modules. For multi-device modules and CPU modules, - it must be None, and the module itself dictates the output location. - (default: device_ids[0] for single-device modules).  Defaults - to ``None``. - - ``broadcast_buffers (bool)(optional):`` Flag that enables syncing - (broadcasting) buffers of the module at beginning of the forward - function. ``smdistributed.dataparallel`` does not support broadcast - buffer yet. Please set this to ``False``. - - ``process_group(smdistributed.dataparallel.torch.distributed.group)(optional):`` Process - group is not supported in ``smdistributed.dataparallel``. This - parameter exists for API parity with torch.distributed only. Only - supported value is - ``smdistributed.dataparallel.torch.distributed.group.WORLD.`` Defaults - to ``None.`` - - ``bucket_cap_mb (int)(optional):`` DistributedDataParallel will - bucket parameters into multiple buckets so that gradient reduction of - each bucket can potentially overlap with backward - computation. ``bucket_cap_mb`` controls the bucket size in - MegaBytes (MB) (default: 25). - - .. rubric:: Notes - - - This module assumes all parameters are registered in the model by the - time it is created. No parameters should be added nor removed later. - - This module assumes all parameters are registered in the model of - each distributed processes are in the same order. The module itself - will conduct gradient all-reduction following the reverse order of - the registered parameters of the model. In other words, it is users’ - responsibility to ensure that each distributed process has the exact - same model and thus the exact same parameter registration order. - - You should never change the set of your model’s parameters after - wrapping up your model with DistributedDataParallel. In other words, - when wrapping up your model with DistributedDataParallel, the - constructor of DistributedDataParallel will register the additional - gradient reduction functions on all the parameters of the model - itself at the time of construction. If you change the model’s - parameters after the DistributedDataParallel construction, this is - not supported and unexpected behaviors can happen, since some - parameters’ gradient reduction functions might not get called. - - .. class:: smdistributed.dataparallel.torch.distributed.ReduceOp An enum-like class for supported reduction operations diff --git a/doc/api/training/sdp_versions/latest/smd_data_parallel_tensorflow.rst b/doc/api/training/sdp_versions/latest/smd_data_parallel_tensorflow.rst index 48f729d9a2..c615ad67aa 100644 --- a/doc/api/training/sdp_versions/latest/smd_data_parallel_tensorflow.rst +++ b/doc/api/training/sdp_versions/latest/smd_data_parallel_tensorflow.rst @@ -155,10 +155,6 @@ script you will have for distributed training with the library. TensorFlow API ============== -.. rubric:: Supported versions - -**TensorFlow 2.3.1, 2.4.1, 2.5.0** - .. function:: smdistributed.dataparallel.tensorflow.init() Initialize ``smdistributed.dataparallel``. Must be called at the diff --git a/doc/api/training/smd_data_parallel_release_notes/smd_data_parallel_change_log.rst b/doc/api/training/smd_data_parallel_release_notes/smd_data_parallel_change_log.rst index 5357a2166c..8de575a218 100644 --- a/doc/api/training/smd_data_parallel_release_notes/smd_data_parallel_change_log.rst +++ b/doc/api/training/smd_data_parallel_release_notes/smd_data_parallel_change_log.rst @@ -1,6 +1,43 @@ -Sagemaker Distributed Data Parallel 1.2.1 Release Notes +.. _sdp_1.2.2_release_note: + +SageMaker Distributed Data Parallel 1.2.2 Release Notes ======================================================= +*Date: November. 24. 2021* + +**New Features** + +* Added support for PyTorch 1.10 +* PyTorch ``no_sync`` API support for DistributedDataParallel +* Timeout when training stalls due to allreduce and broadcast collective calls + +**Bug Fixes** + +* Fixed a bug that would impact correctness in the mixed dtype case +* Fixed a bug related to the timeline writer that would cause a crash when SageMaker Profiler is enabled for single node jobs. + +**Improvements** + +* Performance optimizations for small models on small clusters + +**Migration to AWS Deep Learning Containers** + +This version passed benchmark testing and is migrated to the following AWS Deep Learning Containers: + +- PyTorch 1.10 DLC release: `v1.0-pt-sagemaker-1.10.0-py38 `_ + + .. code:: + + 763104351884.dkr.ecr..amazonaws.com/pytorch-training:1.10.0-gpu-py38-cu113-ubuntu20.04-sagemaker + +---- + +Release History +=============== + +SageMaker Distributed Data Parallel 1.2.1 Release Notes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + *Date: June. 29. 2021* **New Features:** @@ -28,12 +65,8 @@ This version passed benchmark testing and is migrated to the following AWS Deep 763104351884.dkr.ecr..amazonaws.com/tensorflow-training:2.5.0-gpu-py37-cu112-ubuntu18.04-v1.0 ----- - -Release History -=============== -Sagemaker Distributed Data Parallel 1.2.0 Release Notes +SageMaker Distributed Data Parallel 1.2.0 Release Notes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - New features @@ -46,7 +79,7 @@ Sagemaker Distributed Data Parallel 1.2.0 Release Notes AllReduce. For best performance, it is recommended you use an instance type that supports Amazon Elastic Fabric Adapter (ml.p3dn.24xlarge and ml.p4d.24xlarge) when you train a model using - Sagemaker Distributed data parallel. + SageMaker Distributed data parallel. **Bug Fixes:** @@ -54,7 +87,7 @@ Sagemaker Distributed Data Parallel 1.2.0 Release Notes ---- -Sagemaker Distributed Data Parallel 1.1.2 Release Notes +SageMaker Distributed Data Parallel 1.1.2 Release Notes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Bug Fixes @@ -68,7 +101,7 @@ Sagemaker Distributed Data Parallel 1.1.2 Release Notes **Known Issues:** -- Sagemaker Distributed data parallel has slower throughput than NCCL +- SageMaker Distributed data parallel has slower throughput than NCCL when run using a single node. For the best performance, use multi-node distributed training with smdistributed.dataparallel. Use a single node only for experimental runs while preparing your @@ -76,7 +109,7 @@ Sagemaker Distributed Data Parallel 1.1.2 Release Notes ---- -Sagemaker Distributed Data Parallel 1.1.1 Release Notes +SageMaker Distributed Data Parallel 1.1.1 Release Notes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - New Features @@ -103,7 +136,7 @@ Sagemaker Distributed Data Parallel 1.1.1 Release Notes ---- -Sagemaker Distributed Data Parallel 1.1.0 Release Notes +SageMaker Distributed Data Parallel 1.1.0 Release Notes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - New Features @@ -139,7 +172,7 @@ SDK Guide ---- -Sagemaker Distributed Data Parallel 1.0.0 Release Notes +SageMaker Distributed Data Parallel 1.0.0 Release Notes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - First Release diff --git a/doc/conf.py b/doc/conf.py index 4976e809f9..a229c6453a 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -16,7 +16,7 @@ import pkg_resources from datetime import datetime -project = u"sagemaker" +project = "sagemaker" version = pkg_resources.require(project)[0].version # Add any Sphinx extension module names here, as strings. They can be extensions @@ -38,7 +38,7 @@ source_suffix = ".rst" # The suffix of source filenames. master_doc = "index" # The master toctree document. -copyright = u"%s, Amazon" % datetime.now().year +copyright = "%s, Amazon" % datetime.now().year # The full version, including alpha/beta/rc tags. release = version diff --git a/doc/overview.rst b/doc/overview.rst index 02290ff94c..0224c8eb3f 100644 --- a/doc/overview.rst +++ b/doc/overview.rst @@ -684,6 +684,155 @@ For more detailed explanations of the classes that this library provides for aut - `API docs for HyperparameterTuner and parameter range classes `__ - `API docs for analytics classes `__ +********************************** +SageMaker Asynchronous Inference +********************************** +Amazon SageMaker Asynchronous Inference is a new capability in SageMaker that queues incoming requests and processes them asynchronously. +This option is ideal for requests with large payload sizes up to 1GB, long processing times, and near real-time latency requirements. +You can configure Asynchronous Inference scale the instance count to zero when there are no requests to process, thereby saving costs. +More information about SageMaker Asynchronous Inference can be found in the `AWS documentation `__. + +To deploy asynchronous inference endpoint, you will need to create a ``AsyncInferenceConfig`` object. +If you create ``AsyncInferenceConfig`` without specifying its arguments, the default ``S3OutputPath`` will +be ``s3://sagemaker-{REGION}-{ACCOUNTID}/async-endpoint-outputs/{UNIQUE-JOB-NAME}``. (example shown below): + +.. code:: python + + from sagemaker.async_inference import AsyncInferenceConfig + + # Create an empty AsyncInferenceConfig object to use default values + async_config = new AsyncInferenceConfig() + +Or you can specify configurations in ``AsyncInferenceConfig`` as you like. All of those configuration parameters +are optional but if you don’t specify the ``output_path``, Amazon SageMaker will use the default ``S3OutputPath`` +mentioned above (example shown below): + +.. code:: python + + # Specify S3OutputPath, MaxConcurrentInvocationsPerInstance and NotificationConfig in the async config object + async_config = new AsyncInferenceConfig( + output_path="s3://{s3_bucket}/{bucket_prefix}/output", + max_concurrent_invocations_per_instance=10, + notification_config = { + "SuccessTopic": "arn:aws:sns:aws-region:account-id:topic-name", + "ErrorTopic": "arn:aws:sns:aws-region:account-id:topic-name", + } + ) + +Then use the ``AsyncInferenceConfig`` in the estimator's ``deploy()`` method to deploy an asynchronous inference endpoint: + +.. code:: python + + # Deploys the model that was generated by fit() to a SageMaker asynchronous inference endpoint + async_predictor = estimator.deploy(async_inference_config=async_config) + +After deployment is complete, it will return an ``AsyncPredictor`` object. To perform asynchronous inference, you first +need to upload data to S3 and then use the ``predict_async()`` method with the s3 URI as the input. It will return an +``AsyncInferenceResponse`` object: + +.. code:: python + + # Upload data to S3 bucket then use that as input + async_response = async_predictor.predict_async(input_path=input_s3_path) + +The Amazon SageMaker SDK also enables you to serialize the data and pass the payload data directly to the +``predict_async()`` method. For this pattern of invocation, the Amazon SageMaker SDK will upload the data to an Amazon +S3 bucket under ``s3://sagemaker-{REGION}-{ACCOUNTID}/async-endpoint-inputs/``. + +.. code:: python + + # Serializes data and makes a prediction request to the SageMaker asynchronous endpoint + async_response = async_predictor.predict_async(data=data) + +Then you can switch to other stuff and wait the inference to complete. After it is completed, you can check +the result using ``AsyncInferenceResponse``: + +.. code:: python + + # Switch back to check the result + result = async_response.get_result() + +Alternatively, if you would like to check for a result periodically and return it upon generation, use the +``predict()`` method + +.. code:: python + + # Use predict() to wait for the result + response = async_predictor.predict(data=data) + + # Or use Amazon S3 input path + response = async_predictor.predict(input_path=input_s3_path) + +Clean up the endpoint and model if needed after inference: + +.. code:: python + + # Tears down the SageMaker endpoint and endpoint configuration + async_predictor.delete_endpoint() + + # Deletes the SageMaker model + async_predictor.delete_model() + +For more details about Asynchronous Inference, +see the API docs for `Asynchronous Inference `__ + +******************************* +SageMaker Serverless Inference +******************************* +Amazon SageMaker Serverless Inference enables you to easily deploy machine learning models for inference without having +to configure or manage the underlying infrastructure. After you trained a model, you can deploy it to Amazon Sagemaker +Serverless endpoint and then invoke the endpoint with the model to get inference results back. More information about +SageMaker Serverless Inference can be found in the `AWS documentation `__. + +To deploy serverless endpoint, you will need to create a ``ServerlessInferenceConfig``. +If you create ``ServerlessInferenceConfig`` without specifying its arguments, the default ``MemorySizeInMB`` will be **2048** and +the default ``MaxConcurrency`` will be **5** : + +.. code:: python + + from sagemaker.serverless import ServerlessInferenceConfig + + # Create an empty ServerlessInferenceConfig object to use default values + serverless_config = new ServerlessInferenceConfig() + +Or you can specify ``MemorySizeInMB`` and ``MaxConcurrency`` in ``ServerlessInferenceConfig`` (example shown below): + +.. code:: python + + # Specify MemorySizeInMB and MaxConcurrency in the serverless config object + serverless_config = new ServerlessInferenceConfig( + memory_size_in_mb=4096, + max_concurrency=10, + ) + +Then use the ``ServerlessInferenceConfig`` in the estimator's ``deploy()`` method to deploy a serverless endpoint: + +.. code:: python + + # Deploys the model that was generated by fit() to a SageMaker serverless endpoint + serverless_predictor = estimator.deploy(serverless_inference_config=serverless_config) + +After deployment is complete, you can use predictor's ``predict()`` method to invoke the serverless endpoint just like +real-time endpoints: + +.. code:: python + + # Serializes data and makes a prediction request to the SageMaker serverless endpoint + response = serverless_predictor.predict(data) + +Clean up the endpoint and model if needed after inference: + +.. code:: python + + # Tears down the SageMaker endpoint and endpoint configuration + serverless_predictor.delete_endpoint() + + # Deletes the SageMaker model + serverless_predictor.delete_model() + +For more details about ``ServerlessInferenceConfig``, +see the API docs for `Serverless Inference `__ + ************************* SageMaker Batch Transform ************************* diff --git a/setup.py b/setup.py index 5b6c31fd3c..3c4728c96e 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ def read_version(): # Declare minimal set for installation required_packages = [ "attrs", - "boto3>=1.20.18", + "boto3>=1.20.21", "google-pasta", "numpy>=1.9.0", "protobuf>=3.1", diff --git a/src/sagemaker/analytics.py b/src/sagemaker/analytics.py index 7ddfc29c7e..1fc45b99d8 100644 --- a/src/sagemaker/analytics.py +++ b/src/sagemaker/analytics.py @@ -261,7 +261,11 @@ def training_job_summaries(self, force_refresh=False): ) new_output = raw_result["TrainingJobSummaries"] output.extend(new_output) - logger.debug("Got %d more TrainingJobs. Total so far: %d", len(new_output), len(output)) + logger.debug( + "Got %d more TrainingJobs. Total so far: %d", + len(new_output), + len(output), + ) if ("NextToken" in raw_result) and (len(new_output) > 0): next_args["NextToken"] = raw_result["NextToken"] else: @@ -344,7 +348,7 @@ def _determine_timeinterval(self): a dict with the `start_time` and `end_time`. """ description = self._sage_client.describe_training_job(TrainingJobName=self.name) - start_time = self._start_time or description[u"TrainingStartTime"] # datetime object + start_time = self._start_time or description["TrainingStartTime"] # datetime object # Incrementing end time by 1 min since CloudWatch drops seconds before finding the logs. # This results in logs being searched in the time range in which the correct log line was # not present. @@ -353,7 +357,7 @@ def _determine_timeinterval(self): # CW will consider end time as 2018-10-22 08:25 and will not be able to search the # correct log. end_time = self._end_time or description.get( - u"TrainingEndTime", datetime.datetime.utcnow() + "TrainingEndTime", datetime.datetime.utcnow() ) + datetime.timedelta(minutes=1) return {"start_time": start_time, "end_time": end_time} diff --git a/src/sagemaker/async_inference/__init__.py b/src/sagemaker/async_inference/__init__.py new file mode 100644 index 0000000000..471e7d2241 --- /dev/null +++ b/src/sagemaker/async_inference/__init__.py @@ -0,0 +1,19 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Imports the classes in this module to simplify customer imports""" + +from __future__ import absolute_import + +from sagemaker.async_inference.async_inference_config import AsyncInferenceConfig # noqa: F401 +from sagemaker.async_inference.waiter_config import WaiterConfig # noqa: F401 +from sagemaker.async_inference.async_inference_response import AsyncInferenceResponse # noqa: F401 diff --git a/src/sagemaker/async_inference/async_inference_config.py b/src/sagemaker/async_inference/async_inference_config.py new file mode 100644 index 0000000000..f5e2cb8f57 --- /dev/null +++ b/src/sagemaker/async_inference/async_inference_config.py @@ -0,0 +1,82 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""A class for AsyncInferenceConfig + +Used for configuring async inference endpoint. Use AsyncInferenceConfig when deploying +the model to the async inference endpoints. +""" +from __future__ import print_function, absolute_import + + +class AsyncInferenceConfig(object): + """Configuration object passed in when deploying models to Amazon SageMaker Endpoints. + + This object specifies configuration related to async endpoint. Use this configuration + when trying to create async endpoint and make async inference + """ + + def __init__( + self, + output_path=None, + max_concurrent_invocations_per_instance=None, + kms_key_id=None, + notification_config=None, + ): + """Initialize an AsyncInferenceConfig object for async inference configuration. + + Args: + output_path (str): Optional. The Amazon S3 location that endpoints upload + inference responses to. If no value is provided, Amazon SageMaker will + use default Amazon S3 Async Inference output path. (Default: None) + max_concurrent_invocations_per_instance (int): Optional. The maximum number of + concurrent requests sent by the SageMaker client to the model container. If + no value is provided, Amazon SageMaker will choose an optimal value for you. + (Default: None) + kms_key_id (str): Optional. The Amazon Web Services Key Management Service + (Amazon Web Services KMS) key that Amazon SageMaker uses to encrypt the + asynchronous inference output in Amazon S3. (Default: None) + notification_config (dict): Optional. Specifies the configuration for notifications + of inference results for asynchronous inference. Only one notification is generated + per invocation request (Default: None): + * success_topic (str): Amazon SNS topic to post a notification to when inference + completes successfully. If no topic is provided, no notification is sent on success. + The key in notification_config is 'SuccessTopic'. + * error_topic (str): Amazon SNS topic to post a notification to when inference + fails. If no topic is provided, no notification is sent on failure. + The key in notification_config is 'ErrorTopic'. + """ + self.output_path = output_path + self.max_concurrent_invocations_per_instance = max_concurrent_invocations_per_instance + self.kms_key_id = kms_key_id + self.notification_config = notification_config + + def _to_request_dict(self): + """Generates a request dictionary using the parameters provided to the class.""" + request_dict = { + "OutputConfig": { + "S3OutputPath": self.output_path, + }, + } + + if self.max_concurrent_invocations_per_instance: + request_dict["ClientConfig"] = { + "MaxConcurrentInvocationsPerInstance": self.max_concurrent_invocations_per_instance + } + + if self.kms_key_id: + request_dict["OutputConfig"]["KmsKeyId"] = self.kms_key_id + + if self.notification_config: + request_dict["OutputConfig"]["NotificationConfig"] = self.notification_config + + return request_dict diff --git a/src/sagemaker/async_inference/async_inference_response.py b/src/sagemaker/async_inference/async_inference_response.py new file mode 100644 index 0000000000..c0e4f8a83d --- /dev/null +++ b/src/sagemaker/async_inference/async_inference_response.py @@ -0,0 +1,98 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""A class for AsyncInferenceResponse""" + +from __future__ import print_function, absolute_import + +from botocore.exceptions import ClientError +from sagemaker.s3 import parse_s3_url +from sagemaker.async_inference import WaiterConfig +from sagemaker.exceptions import ObjectNotExistedError, UnexpectedClientError + + +class AsyncInferenceResponse(object): + """Response from Async Inference endpoint + + This response object provides a method to check for an async inference result in the + Amazon S3 output path specified. If result object exists in that path, get and return + the result + """ + + def __init__( + self, + predictor_async, + output_path, + ): + """Initialize an AsyncInferenceResponse object. + + AsyncInferenceResponse can help users to get async inference result + from the Amazon S3 output path + + Args: + predictor_async (sagemaker.predictor.AsyncPredictor): The ``AsyncPredictor`` + that return this response. + output_path (str): The Amazon S3 location that endpoints upload inference responses + to. + """ + self.predictor_async = predictor_async + self.output_path = output_path + self._result = None + + def get_result( + self, + waiter_config=None, + ): + """Get async inference result in the Amazon S3 output path specified + + Args: + waiter_config (sagemaker.async_inference.waiter_config.WaiterConfig): Configuration + for the waiter. The pre-defined value for the delay between poll is 15 seconds + and the default max attempts is 60 + Raises: + ValueError: If a wrong type of object is provided as ``waiter_config`` + Returns: + object: Inference result in the given Amazon S3 output path. If a deserializer was + specified when creating the AsyncPredictor, the result of the deserializer is + returned. Otherwise the response returns the sequence of bytes + as is. + """ + if waiter_config is not None and not isinstance(waiter_config, WaiterConfig): + raise ValueError("waiter_config should be a WaiterConfig object") + + if self._result is None: + if waiter_config is None: + self._result = self._get_result_from_s3(self.output_path) + else: + self._result = self.predictor_async._wait_for_output( + self.output_path, waiter_config + ) + return self._result + + def _get_result_from_s3( + self, + output_path, + ): + """Get inference result from the output Amazon S3 path""" + bucket, key = parse_s3_url(output_path) + try: + response = self.predictor_async.s3_client.get_object(Bucket=bucket, Key=key) + return self.predictor_async.predictor._handle_response(response) + except ClientError as ex: + if ex.response["Error"]["Code"] == "NoSuchKey": + raise ObjectNotExistedError( + message="Inference could still be running", + output_path=output_path, + ) + raise UnexpectedClientError( + message=ex.response["Error"]["Message"], + ) diff --git a/src/sagemaker/async_inference/waiter_config.py b/src/sagemaker/async_inference/waiter_config.py new file mode 100644 index 0000000000..7c46f2fd6c --- /dev/null +++ b/src/sagemaker/async_inference/waiter_config.py @@ -0,0 +1,47 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""A class for WaiterConfig used in async inference + +Use it when using async inference and wait for the result. +""" + +from __future__ import absolute_import + + +class WaiterConfig(object): + """Configuration object passed in when using async inference and wait for the result.""" + + def __init__( + self, + max_attempts=60, + delay=15, + ): + """Initialize a WaiterConfig object that provides parameters to control waiting behavior. + + Args: + max_attempts (int): The maximum number of attempts to be made. If the max attempts is + exceeded, Amazon SageMaker will raise ``PollingTimeoutError``. (Default: 60) + delay (int): The amount of time in seconds to wait between attempts. (Default: 15) + """ + + self.max_attempts = max_attempts + self.delay = delay + + def _to_request_dict(self): + """Generates a dictionary using the parameters provided to the class.""" + waiter_dict = { + "Delay": self.delay, + "MaxAttempts": self.max_attempts, + } + + return waiter_dict diff --git a/src/sagemaker/clarify.py b/src/sagemaker/clarify.py index 006cc4846c..a06ca79f12 100644 --- a/src/sagemaker/clarify.py +++ b/src/sagemaker/clarify.py @@ -22,11 +22,17 @@ import tempfile from abc import ABC, abstractmethod from sagemaker import image_uris, s3, utils +from sagemaker.deprecations import deprecation_warning from sagemaker.processing import ProcessingInput, ProcessingOutput, Processor logger = logging.getLogger(__name__) +@deprecation_warning( + msg="s3_data_distribution_type parameter will no longer be supported. Everything else will" + " remain as is", + date="15 Mar 2022", +) class DataConfig: """Config object related to configurations of the input and output dataset.""" @@ -58,8 +64,8 @@ def __init__( dataset format is JSONLines. dataset_type (str): Format of the dataset. Valid values are "text/csv" for CSV, "application/jsonlines" for JSONLines, and "application/x-parquet" for Parquet. - s3_data_distribution_type (str): Valid options are "FullyReplicated" or - "ShardedByS3Key". + s3_data_distribution_type (str): Deprecated. Only valid option is "FullyReplicated". + Any other value is ignored. s3_compression_type (str): Valid options are "None" or "Gzip". joinsource (str): The name or index of the column in the dataset that acts as an identifier column (for instance, while performing a join). This column is only @@ -80,7 +86,13 @@ def __init__( self.s3_data_input_path = s3_data_input_path self.s3_output_path = s3_output_path self.s3_analysis_config_output_path = s3_analysis_config_output_path - self.s3_data_distribution_type = s3_data_distribution_type + if s3_data_distribution_type != "FullyReplicated": + logger.warning( + "s3_data_distribution_type parameter, set to %s, is being ignored. Only" + " valid option is FullyReplicated", + s3_data_distribution_type, + ) + self.s3_data_distribution_type = "FullyReplicated" self.s3_compression_type = s3_compression_type self.label = label self.headers = headers @@ -111,33 +123,58 @@ def __init__( """Initializes a configuration of the sensitive groups in the dataset. Args: - label_values_or_threshold (Any): List of label values or threshold to indicate positive - outcome used for bias metrics. - facet_name (str or [str]): String or List of strings of sensitive attribute(s) in the - input data for which we like to compare metrics. - facet_values_or_threshold (list): Optional list of values to form a sensitive group or - threshold for a numeric facet column that defines the lower bound of a sensitive - group. Defaults to considering each possible value as sensitive group and - computing metrics vs all the other examples. - If facet_name is a list, this needs to be None or a List consisting of lists or None - with the same length as facet_name list. + label_values_or_threshold ([int or float or str]): List of label value(s) or threshold + to indicate positive outcome used for bias metrics. Dependency on the problem type, + + * Binary problem: The list shall include one positive value. + * Categorical problem: The list shall include one or more (but not all) categories + which are the positive values. + * Regression problem: The list shall include one threshold that defines the lower + bound of positive values. + + facet_name (str or int or [str] or [int]): Sensitive attribute column name (or index in + the input data) for which you like to compute bias metrics. It can also be a list + of names (or indexes) if you like to compute for multiple sensitive attributes. + facet_values_or_threshold ([int or float or str] or [[int or float or str]]): + The parameter indicates the sensitive group. If facet_name is a scalar, then it can + be None or a list. Depending on the data type of the facet column, + + * Binary: None means computing the bias metrics for each binary value. Or add one + binary value to the list, to compute its bias metrics only. + * Categorical: None means computing the bias metrics for each category. Or add one + or more (but not all) categories to the list, to compute their bias metrics v.s. + the other categories. + * Continuous: The list shall include one and only one threshold which defines the + lower bound of a sensitive group. + + If facet_name is a list, then it can be None if all facets are of binary type or + categorical type. Otherwise it shall be a list, and each element is the values or + threshold of the corresponding facet. group_name (str): Optional column name or index to indicate a group column to be used for the bias metric 'Conditional Demographic Disparity in Labels - CDDL' or 'Conditional Demographic Disparity in Predicted Labels - CDDPL'. """ - if isinstance(facet_name, str): + if isinstance(facet_name, list): + assert len(facet_name) > 0, "Please provide at least one facet" + if facet_values_or_threshold is None: + facet_list = [ + {"name_or_index": single_facet_name} for single_facet_name in facet_name + ] + elif len(facet_values_or_threshold) == len(facet_name): + facet_list = [] + for i, single_facet_name in enumerate(facet_name): + facet = {"name_or_index": single_facet_name} + if facet_values_or_threshold is not None: + _set(facet_values_or_threshold[i], "value_or_threshold", facet) + facet_list.append(facet) + else: + raise ValueError( + "The number of facet names doesn't match the number of facet values" + ) + else: facet = {"name_or_index": facet_name} _set(facet_values_or_threshold, "value_or_threshold", facet) facet_list = [facet] - elif facet_values_or_threshold is None or len(facet_name) == len(facet_values_or_threshold): - facet_list = [] - for i, single_facet_name in enumerate(facet_name): - facet = {"name_or_index": single_facet_name} - if facet_values_or_threshold is not None: - _set(facet_values_or_threshold[i], "value_or_threshold", facet) - facet_list.append(facet) - else: - raise ValueError("Wrong combination of argument values passed") self.analysis_config = { "label_values_or_threshold": label_values_or_threshold, "facet": facet_list, diff --git a/src/sagemaker/estimator.py b/src/sagemaker/estimator.py index d66b194309..2e0e99ef05 100644 --- a/src/sagemaker/estimator.py +++ b/src/sagemaker/estimator.py @@ -1112,8 +1112,8 @@ def logs(self): def deploy( self, - initial_instance_count, - instance_type, + initial_instance_count=None, + instance_type=None, serializer=None, deserializer=None, accelerator_type=None, @@ -1124,6 +1124,8 @@ def deploy( kms_key=None, data_capture_config=None, tags=None, + serverless_inference_config=None, + async_inference_config=None, **kwargs, ): """Deploy the trained model to an Amazon SageMaker endpoint. @@ -1134,10 +1136,14 @@ def deploy( http://docs.aws.amazon.com/sagemaker/latest/dg/how-it-works-training.html Args: - initial_instance_count (int): Minimum number of EC2 instances to - deploy to an endpoint for prediction. - instance_type (str): Type of EC2 instance to deploy to an endpoint - for prediction, for example, 'ml.c4.xlarge'. + initial_instance_count (int): The initial number of instances to run + in the ``Endpoint`` created from this ``Model``. If not using + serverless inference, then it need to be a number larger or equals + to 1 (default: None) + instance_type (str): The EC2 instance type to deploy this Model to. + For example, 'ml.p2.xlarge', or 'local' for local mode. If not using + serverless inference, then it is required to deploy a model. + (default: None) serializer (:class:`~sagemaker.serializers.BaseSerializer`): A serializer object, used to encode data for an inference endpoint (default: None). If ``serializer`` is not None, then @@ -1170,6 +1176,17 @@ def deploy( data_capture_config (sagemaker.model_monitor.DataCaptureConfig): Specifies configuration related to Endpoint data capture for use with Amazon SageMaker Model Monitoring. Default: None. + async_inference_config (sagemaker.model_monitor.AsyncInferenceConfig): Specifies + configuration related to async inference. Use this configuration when trying + to create async endpoint and make async inference. If empty config object + passed through, will use default config to deploy async endpoint. Deploy a + real-time endpoint if it's None. (default: None) + serverless_inference_config (sagemaker.serverless.ServerlessInferenceConfig): + Specifies configuration related to serverless endpoint. Use this configuration + when trying to create serverless endpoint and make serverless inference. If + empty object passed through, will use pre-defined values in + ``ServerlessInferenceConfig`` class to deploy serverless endpoint. Deploy an + instance based endpoint if it's None. (default: None) tags(List[dict[str, str]]): Optional. The list of tags to attach to this specific endpoint. Example: >>> tags = [{'Key': 'tagname', 'Value': 'tagvalue'}] @@ -1187,6 +1204,7 @@ def deploy( endpoint and obtain inferences. """ removed_kwargs("update_endpoint", kwargs) + is_serverless = serverless_inference_config is not None self._ensure_latest_training_job() self._ensure_base_job_name() default_name = name_from_base(self.base_job_name) @@ -1194,7 +1212,7 @@ def deploy( model_name = model_name or default_name self.deploy_instance_type = instance_type - if use_compiled_model: + if use_compiled_model and not is_serverless: family = "_".join(instance_type.split(".")[:-1]) if family not in self._compiled_models: raise ValueError( @@ -1223,6 +1241,8 @@ def deploy( wait=wait, kms_key=kms_key, data_capture_config=data_capture_config, + serverless_inference_config=serverless_inference_config, + async_inference_config=async_inference_config, ) def register( diff --git a/src/sagemaker/exceptions.py b/src/sagemaker/exceptions.py index 29ce854068..1f17d5cce4 100644 --- a/src/sagemaker/exceptions.py +++ b/src/sagemaker/exceptions.py @@ -21,3 +21,41 @@ def __init__(self, message, allowed_statuses, actual_status): self.allowed_statuses = allowed_statuses self.actual_status = actual_status super(UnexpectedStatusException, self).__init__(message) + + +class AsyncInferenceError(Exception): + """The base exception class for Async Inference exceptions.""" + + fmt = "An unspecified error occurred" + + def __init__(self, **kwargs): + msg = self.fmt.format(**kwargs) + Exception.__init__(self, msg) + self.kwargs = kwargs + + +class ObjectNotExistedError(AsyncInferenceError): + """Raised when Amazon S3 object not exist in the given path""" + + fmt = "Object not exist at {output_path}. {message}" + + def __init__(self, message, output_path): + super().__init__(message=message, output_path=output_path) + + +class PollingTimeoutError(AsyncInferenceError): + """Raised when wait longer than expected and no result object in Amazon S3 bucket yet""" + + fmt = "No result at {output_path} after polling for {seconds} seconds. {message}" + + def __init__(self, message, output_path, seconds): + super().__init__(message=message, output_path=output_path, seconds=seconds) + + +class UnexpectedClientError(AsyncInferenceError): + """Raised when ClientError's error code is not expected""" + + fmt = "Encountered unexpected client error: {message}" + + def __init__(self, message): + super().__init__(message=message) diff --git a/src/sagemaker/lineage/action.py b/src/sagemaker/lineage/action.py index 67ba6d5db0..9046a3ccf2 100644 --- a/src/sagemaker/lineage/action.py +++ b/src/sagemaker/lineage/action.py @@ -13,13 +13,22 @@ """This module contains code to create and manage SageMaker ``Actions``.""" from __future__ import absolute_import -from typing import Optional, Iterator +from typing import Optional, Iterator, List from datetime import datetime -from sagemaker import Session +from sagemaker.session import Session from sagemaker.apiutils import _base_types from sagemaker.lineage import _api_types, _utils from sagemaker.lineage._api_types import ActionSource, ActionSummary +from sagemaker.lineage.artifact import Artifact + +from sagemaker.lineage.query import ( + LineageQuery, + LineageFilter, + LineageSourceEnum, + LineageEntityEnum, + LineageQueryDirectionEnum, +) class Action(_base_types.Record): @@ -116,7 +125,7 @@ def delete(self, disassociate: bool = False): self._invoke_api(self._boto_delete_method, self._boto_delete_members) @classmethod - def load(cls, action_name: str, sagemaker_session: Session = None) -> "Action": + def load(cls, action_name: str, sagemaker_session=None) -> "Action": """Load an existing action and return an ``Action`` object representing it. Args: @@ -250,3 +259,86 @@ def list( max_results=max_results, next_token=next_token, ) + + def artifacts( + self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.BOTH + ) -> List[Artifact]: + """Use a lineage query to retrieve all artifacts that use this action. + + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + + Returns: + list of Artifacts: Artifacts. + """ + query_filter = LineageFilter(entities=[LineageEntityEnum.ARTIFACT]) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.action_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + return [vertex.to_lineage_object() for vertex in query_result.vertices] + + +class ModelPackageApprovalAction(Action): + """An Amazon SageMaker model package approval action, which is part of a SageMaker lineage.""" + + def datasets( + self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.ASCENDANTS + ) -> List[Artifact]: + """Use a lineage query to retrieve all upstream datasets that use this action. + + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + + Returns: + list of Artifacts: Artifacts representing a dataset. + """ + query_filter = LineageFilter( + entities=[LineageEntityEnum.ARTIFACT], sources=[LineageSourceEnum.DATASET] + ) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.action_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + return [vertex.to_lineage_object() for vertex in query_result.vertices] + + def model_package(self): + """Get model package from model package approval action. + + Returns: + Model package. + """ + source_uri = self.source.source_uri + if source_uri is None: + return None + + model_package_name = source_uri.split("/")[1] + return self.sagemaker_session.sagemaker_client.describe_model_package( + ModelPackageName=model_package_name + ) + + def endpoints( + self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.DESCENDANTS + ) -> List: + """Use a lineage query to retrieve downstream endpoint contexts that use this action. + + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + + Returns: + list of Contexts: Contexts representing an endpoint. + """ + query_filter = LineageFilter( + entities=[LineageEntityEnum.CONTEXT], sources=[LineageSourceEnum.ENDPOINT] + ) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.action_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + return [vertex.to_lineage_object() for vertex in query_result.vertices] diff --git a/src/sagemaker/lineage/artifact.py b/src/sagemaker/lineage/artifact.py index fc41808099..3921562beb 100644 --- a/src/sagemaker/lineage/artifact.py +++ b/src/sagemaker/lineage/artifact.py @@ -143,10 +143,10 @@ def load(cls, artifact_arn: str, sagemaker_session=None) -> "Artifact": return artifact def downstream_trials(self, sagemaker_session=None) -> list: - """Retrieve all trial runs which that use this artifact. + """Use the lineage API to retrieve all downstream trials that use this artifact. Args: - sagemaker_session (obj): Sagemaker Sesssion to use. If not provided a default session + sagemaker_session (obj): Sagemaker Session to use. If not provided a default session will be created. Returns: @@ -159,6 +159,54 @@ def downstream_trials(self, sagemaker_session=None) -> list: ) trial_component_arns: list = list(map(lambda x: x.destination_arn, outgoing_associations)) + return self._get_trial_from_trial_component(trial_component_arns) + + def downstream_trials_v2(self) -> list: + """Use a lineage query to retrieve all downstream trials that use this artifact. + + Returns: + [Trial]: A list of SageMaker `Trial` objects. + """ + return self._trials(direction=LineageQueryDirectionEnum.DESCENDANTS) + + def upstream_trials(self) -> List: + """Use the lineage query to retrieve all upstream trials that use this artifact. + + Returns: + [Trial]: A list of SageMaker `Trial` objects. + """ + return self._trials(direction=LineageQueryDirectionEnum.ASCENDANTS) + + def _trials( + self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.BOTH + ) -> List: + """Use the lineage query to retrieve all trials that use this artifact. + + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + + Returns: + [Trial]: A list of SageMaker `Trial` objects. + """ + query_filter = LineageFilter(entities=[LineageEntityEnum.TRIAL_COMPONENT]) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.artifact_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + trial_component_arns: list = list(map(lambda x: x.arn, query_result.vertices)) + return self._get_trial_from_trial_component(trial_component_arns) + + def _get_trial_from_trial_component(self, trial_component_arns: list) -> List: + """Retrieve all upstream trial runs which that use the trial component arns. + + Args: + trial_component_arns (list): list of trial component arns + + Returns: + [Trial]: A list of SageMaker `Trial` objects. + """ if not trial_component_arns: # no outgoing associations for this artifact return [] @@ -170,7 +218,7 @@ def downstream_trials(self, sagemaker_session=None) -> list: num_search_batches = math.ceil(len(trial_component_arns) % max_search_by_arn) trial_components: list = [] - sagemaker_session = sagemaker_session or _utils.default_session() + sagemaker_session = self.sagemaker_session or _utils.default_session() sagemaker_client = sagemaker_session.sagemaker_client for i in range(num_search_batches): @@ -335,6 +383,17 @@ def list( sagemaker_session=sagemaker_session, ) + def s3_uri_artifacts(self, s3_uri: str) -> dict: + """Retrieve a list of artifacts that use provided s3 uri. + + Args: + s3_uri (str): A S3 URI. + + Returns: + A list of ``Artifacts`` + """ + return self.sagemaker_session.sagemaker_client.list_artifacts(SourceUri=s3_uri) + class ModelArtifact(Artifact): """A SageMaker lineage artifact representing a model. @@ -349,7 +408,7 @@ def endpoints(self) -> list: """Get association summaries for endpoints deployed with this model. Returns: - [AssociationSummary]: A list of associations repesenting the endpoints using the model. + [AssociationSummary]: A list of associations representing the endpoints using the model. """ endpoint_development_actions: Iterator = Association.list( source_arn=self.artifact_arn, @@ -522,3 +581,69 @@ def endpoint_contexts( for vertex in query_result.vertices: endpoint_contexts.append(vertex.to_lineage_object()) return endpoint_contexts + + def upstream_datasets(self) -> List[Artifact]: + """Use the lineage query to retrieve upstream artifacts that use this dataset artifact. + + Returns: + list of Artifacts: Artifacts representing an dataset. + """ + return self._datasets(direction=LineageQueryDirectionEnum.ASCENDANTS) + + def downstream_datasets(self) -> List[Artifact]: + """Use the lineage query to retrieve downstream artifacts that use this dataset. + + Returns: + list of Artifacts: Artifacts representing an dataset. + """ + return self._datasets(direction=LineageQueryDirectionEnum.DESCENDANTS) + + def _datasets( + self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.BOTH + ) -> List[Artifact]: + """Use the lineage query to retrieve all artifacts that use this dataset. + + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + + Returns: + list of Artifacts: Artifacts representing an dataset. + """ + query_filter = LineageFilter( + entities=[LineageEntityEnum.ARTIFACT], sources=[LineageSourceEnum.DATASET] + ) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.artifact_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + return [vertex.to_lineage_object() for vertex in query_result.vertices] + + +class ImageArtifact(Artifact): + """A SageMaker lineage artifact representing an image. + + Common model specific lineage traversals to discover how the image is connected + to other entities. + """ + + def datasets(self, direction: LineageQueryDirectionEnum) -> List[Artifact]: + """Use the lineage query to retrieve datasets that use this image artifact. + + Args: + direction (LineageQueryDirectionEnum): The query direction. + + Returns: + list of Artifacts: Artifacts representing a dataset. + """ + query_filter = LineageFilter( + entities=[LineageEntityEnum.ARTIFACT], sources=[LineageSourceEnum.DATASET] + ) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.artifact_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + return [vertex.to_lineage_object() for vertex in query_result.vertices] diff --git a/src/sagemaker/lineage/context.py b/src/sagemaker/lineage/context.py index 469b9aeb1a..aef919e876 100644 --- a/src/sagemaker/lineage/context.py +++ b/src/sagemaker/lineage/context.py @@ -31,6 +31,8 @@ LineageQueryDirectionEnum, ) from sagemaker.lineage.artifact import Artifact +from sagemaker.lineage.action import Action +from sagemaker.lineage.lineage_trial_component import LineageTrialComponent class Context(_base_types.Record): @@ -256,12 +258,30 @@ def list( sagemaker_session=sagemaker_session, ) + def actions(self, direction: LineageQueryDirectionEnum) -> List[Action]: + """Use the lineage query to retrieve actions that use this context. + + Args: + direction (LineageQueryDirectionEnum): The query direction. + + Returns: + list of Actions: Actions. + """ + query_filter = LineageFilter(entities=[LineageEntityEnum.ACTION]) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.context_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + return [vertex.to_lineage_object() for vertex in query_result.vertices] + class EndpointContext(Context): """An Amazon SageMaker endpoint context, which is part of a SageMaker lineage.""" def models(self) -> List[association.Association]: - """Get all models deployed by all endpoint versions of the endpoint. + """Use Lineage API to get all models deployed by this endpoint. Returns: list of Associations: Associations that destination represents an endpoint's model. @@ -286,7 +306,7 @@ def models(self) -> List[association.Association]: def models_v2( self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.DESCENDANTS ) -> List[Artifact]: - """Get artifacts representing models from the context lineage by querying lineage data. + """Use the lineage query to retrieve downstream model artifacts that use this endpoint. Args: direction (LineageQueryDirectionEnum, optional): The query direction. @@ -335,7 +355,7 @@ def models_v2( def dataset_artifacts( self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.ASCENDANTS ) -> List[Artifact]: - """Get artifacts representing datasets from the endpoint's lineage. + """Use the lineage query to retrieve datasets that use this endpoint. Args: direction (LineageQueryDirectionEnum, optional): The query direction. @@ -360,6 +380,9 @@ def training_job_arns( ) -> List[str]: """Get ARNs for all training jobs that appear in the endpoint's lineage. + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + Returns: list of str: Training job ARNs. """ @@ -382,11 +405,78 @@ def training_job_arns( training_job_arns.append(trial_component["Source"]["SourceArn"]) return training_job_arns + def processing_jobs( + self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.ASCENDANTS + ) -> List[LineageTrialComponent]: + """Use the lineage query to retrieve processing jobs that use this endpoint. + + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + + Returns: + list of LineageTrialComponent: Lineage trial component that represent Processing jobs. + """ + query_filter = LineageFilter( + entities=[LineageEntityEnum.TRIAL_COMPONENT], sources=[LineageSourceEnum.PROCESSING_JOB] + ) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.context_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + return [vertex.to_lineage_object() for vertex in query_result.vertices] + + def transform_jobs( + self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.ASCENDANTS + ) -> List[LineageTrialComponent]: + """Use the lineage query to retrieve transform jobs that use this endpoint. + + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + + Returns: + list of LineageTrialComponent: Lineage trial component that represent Transform jobs. + """ + query_filter = LineageFilter( + entities=[LineageEntityEnum.TRIAL_COMPONENT], sources=[LineageSourceEnum.TRANSFORM_JOB] + ) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.context_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + return [vertex.to_lineage_object() for vertex in query_result.vertices] + + def trial_components( + self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.ASCENDANTS + ) -> List[LineageTrialComponent]: + """Use the lineage query to retrieve trial components that use this endpoint. + + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + + Returns: + list of LineageTrialComponent: Lineage trial component. + """ + query_filter = LineageFilter(entities=[LineageEntityEnum.TRIAL_COMPONENT]) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.context_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + return [vertex.to_lineage_object() for vertex in query_result.vertices] + def pipeline_execution_arn( self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.ASCENDANTS ) -> str: """Get the ARN for the pipeline execution associated with this endpoint (if any). + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + Returns: str: A pipeline execution ARN. """ @@ -400,3 +490,15 @@ def pipeline_execution_arn( return tag["Value"] return None + + +class ModelPackageGroup(Context): + """An Amazon SageMaker model package group context, which is part of a SageMaker lineage.""" + + def pipeline_execution_arn(self) -> str: + """Get the ARN for the pipeline execution associated with this model package group (if any). + + Returns: + str: A pipeline execution ARN. + """ + return self.properties.get("PipelineExecutionArn") diff --git a/src/sagemaker/lineage/lineage_trial_component.py b/src/sagemaker/lineage/lineage_trial_component.py new file mode 100644 index 0000000000..f8bc0e53b4 --- /dev/null +++ b/src/sagemaker/lineage/lineage_trial_component.py @@ -0,0 +1,184 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""This module contains code to create and manage SageMaker ``LineageTrialComponent``.""" +from __future__ import absolute_import + +import logging + +from typing import List + +from sagemaker.apiutils import _base_types +from sagemaker.lineage.query import ( + LineageQuery, + LineageFilter, + LineageSourceEnum, + LineageEntityEnum, + LineageQueryDirectionEnum, +) +from sagemaker.lineage.artifact import Artifact + + +LOGGER = logging.getLogger("sagemaker") + + +class LineageTrialComponent(_base_types.Record): + """An Amazon SageMaker, lineage trial component, which is part of a SageMaker lineage. + + A trial component is a stage in a trial. + Trial components are created automatically within the SageMaker runtime and also can be + created directly. To automatically associate trial components with a trial and experiment + supply an experiment config when creating a job. + For example: https://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateTrainingJob.html + + Attributes: + trial_component_name (str): The name of the trial component. Generated by SageMaker from the + name of the source job with a suffix specific to the type of source job. + trial_component_arn (str): The ARN of the trial component. + display_name (str): The name of the trial component that will appear in UI, + such as SageMaker Studio. + source (obj): A TrialComponentSource object with a source_arn attribute. + status (str): Status of the source job. + start_time (datetime): When the source job started. + end_time (datetime): When the source job ended. + creation_time (datetime): When the source job was created. + created_by (obj): Contextual info on which account created the trial component. + last_modified_time (datetime): When the trial component was last modified. + last_modified_by (obj): Contextual info on which account last modified the trial component. + parameters (dict): Dictionary of parameters to the source job. + input_artifacts (dict): Dictionary of input artifacts. + output_artifacts (dict): Dictionary of output artifacts. + metrics (obj): Aggregated metrics for the job. + parameters_to_remove (list): The hyperparameters to remove from the component. + input_artifacts_to_remove (list): The input artifacts to remove from the component. + output_artifacts_to_remove (list): The output artifacts to remove from the component. + tags (List[dict[str, str]]): A list of tags to associate with the trial component. + """ + + trial_component_name = None + trial_component_arn = None + display_name = None + source = None + status = None + start_time = None + end_time = None + creation_time = None + created_by = None + last_modified_time = None + last_modified_by = None + parameters = None + input_artifacts = None + output_artifacts = None + metrics = None + parameters_to_remove = None + input_artifacts_to_remove = None + output_artifacts_to_remove = None + tags = None + + _boto_create_method: str = "create_trial_component" + _boto_load_method: str = "describe_trial_component" + _boto_update_method: str = "update_trial_component" + _boto_delete_method: str = "delete_trial_component" + + _boto_update_members = [ + "trial_component_name", + "display_name", + "status", + "start_time", + "end_time", + "parameters", + "input_artifacts", + "output_artifacts", + "parameters_to_remove", + "input_artifacts_to_remove", + "output_artifacts_to_remove", + ] + _boto_delete_members = ["trial_component_name"] + + @classmethod + def load(cls, trial_component_name: str, sagemaker_session=None) -> "LineageTrialComponent": + """Load an existing trial component and return an ``TrialComponent`` object representing it. + + Args: + trial_component_name (str): Name of the trial component + sagemaker_session (sagemaker.session.Session): Session object which + manages interactions with Amazon SageMaker APIs and any other + AWS services needed. If not specified, one is created using the + default AWS configuration chain. + Returns: + LineageTrialComponent: A SageMaker ``LineageTrialComponent`` object + """ + trial_component = cls._construct( + cls._boto_load_method, + trial_component_name=trial_component_name, + sagemaker_session=sagemaker_session, + ) + return trial_component + + def pipeline_execution_arn(self) -> str: + """Get the ARN for the pipeline execution associated with this trial component (if any). + + Returns: + str: A pipeline execution ARN. + """ + tags = self.sagemaker_session.sagemaker_client.list_tags( + ResourceArn=self.trial_component_arn + )["Tags"] + for tag in tags: + if tag["Key"] == "sagemaker:pipeline-execution-arn": + return tag["Value"] + return None + + def dataset_artifacts( + self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.ASCENDANTS + ) -> List[Artifact]: + """Use the lineage query to retrieve datasets that use this trial component. + + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + + Returns: + list of Artifacts: Artifacts representing a dataset. + """ + query_filter = LineageFilter( + entities=[LineageEntityEnum.ARTIFACT], sources=[LineageSourceEnum.DATASET] + ) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.trial_component_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + + return [vertex.to_lineage_object() for vertex in query_result.vertices] + + def models( + self, direction: LineageQueryDirectionEnum = LineageQueryDirectionEnum.DESCENDANTS + ) -> List[Artifact]: + """Use the lineage query to retrieve models that use this trial component. + + Args: + direction (LineageQueryDirectionEnum, optional): The query direction. + + Returns: + list of Artifacts: Artifacts representing a dataset. + """ + query_filter = LineageFilter( + entities=[LineageEntityEnum.ARTIFACT], sources=[LineageSourceEnum.MODEL] + ) + query_result = LineageQuery(self.sagemaker_session).query( + start_arns=[self.trial_component_arn], + query_filter=query_filter, + direction=direction, + include_edges=False, + ) + return [vertex.to_lineage_object() for vertex in query_result.vertices] diff --git a/src/sagemaker/lineage/query.py b/src/sagemaker/lineage/query.py index 033e838137..a54331c39a 100644 --- a/src/sagemaker/lineage/query.py +++ b/src/sagemaker/lineage/query.py @@ -12,15 +12,18 @@ # language governing permissions and limitations under the License. """This module contains code to query SageMaker lineage.""" from __future__ import absolute_import + from datetime import datetime from enum import Enum from typing import Optional, Union, List, Dict + from sagemaker.lineage._utils import get_resource_name_from_arn class LineageEntityEnum(Enum): """Enum of lineage entities for use in a query filter.""" + TRIAL = "Trial" ACTION = "Action" ARTIFACT = "Artifact" CONTEXT = "Context" @@ -41,6 +44,9 @@ class LineageSourceEnum(Enum): MODEL_REPLACE = "ModelReplaced" TENSORBOARD = "TensorBoard" TRAINING_JOB = "TrainingJob" + APPROVAL = "Approval" + PROCESSING_JOB = "ProcessingJob" + TRANSFORM_JOB = "TransformJob" class LineageQueryDirectionEnum(Enum): @@ -65,6 +71,27 @@ def __init__( self.destination_arn = destination_arn self.association_type = association_type + def __hash__(self): + """Define hash function for ``Edge``.""" + return hash( + ( + "source_arn", + self.source_arn, + "destination_arn", + self.destination_arn, + "association_type", + self.association_type, + ) + ) + + def __eq__(self, other): + """Define equal function for ``Edge``.""" + return ( + self.association_type == other.association_type + and self.source_arn == other.source_arn + and self.destination_arn == other.destination_arn + ) + class Vertex: """A vertex for a lineage graph.""" @@ -82,12 +109,37 @@ def __init__( self.lineage_source = lineage_source self._session = sagemaker_session + def __hash__(self): + """Define hash function for ``Vertex``.""" + return hash( + ( + "arn", + self.arn, + "lineage_entity", + self.lineage_entity, + "lineage_source", + self.lineage_source, + ) + ) + + def __eq__(self, other): + """Define equal function for ``Vertex``.""" + return ( + self.arn == other.arn + and self.lineage_entity == other.lineage_entity + and self.lineage_source == other.lineage_source + ) + def to_lineage_object(self): - """Convert the ``Vertex`` object to its corresponding Artifact, Action, Context object.""" - from sagemaker.lineage.artifact import Artifact, ModelArtifact + """Convert the ``Vertex`` object to its corresponding lineage object. + + Returns: + A ``Vertex`` object to its corresponding ``Artifact``,``Action``, ``Context`` + or ``TrialComponent`` object. + """ from sagemaker.lineage.context import Context, EndpointContext - from sagemaker.lineage.artifact import DatasetArtifact from sagemaker.lineage.action import Action + from sagemaker.lineage.lineage_trial_component import LineageTrialComponent if self.lineage_entity == LineageEntityEnum.CONTEXT.value: resource_name = get_resource_name_from_arn(self.arn) @@ -98,17 +150,31 @@ def to_lineage_object(self): return Context.load(context_name=resource_name, sagemaker_session=self._session) if self.lineage_entity == LineageEntityEnum.ARTIFACT.value: - if self.lineage_source == LineageSourceEnum.MODEL.value: - return ModelArtifact.load(artifact_arn=self.arn, sagemaker_session=self._session) - if self.lineage_source == LineageSourceEnum.DATASET.value: - return DatasetArtifact.load(artifact_arn=self.arn, sagemaker_session=self._session) - return Artifact.load(artifact_arn=self.arn, sagemaker_session=self._session) + return self._artifact_to_lineage_object() if self.lineage_entity == LineageEntityEnum.ACTION.value: return Action.load(action_name=self.arn.split("/")[1], sagemaker_session=self._session) + if self.lineage_entity == LineageEntityEnum.TRIAL_COMPONENT.value: + trial_component_name = get_resource_name_from_arn(self.arn) + return LineageTrialComponent.load( + trial_component_name=trial_component_name, sagemaker_session=self._session + ) raise ValueError("Vertex cannot be converted to a lineage object.") + def _artifact_to_lineage_object(self): + """Convert the ``Vertex`` object to its corresponding ``Artifact``.""" + from sagemaker.lineage.artifact import Artifact, ModelArtifact, ImageArtifact + from sagemaker.lineage.artifact import DatasetArtifact + + if self.lineage_source == LineageSourceEnum.MODEL.value: + return ModelArtifact.load(artifact_arn=self.arn, sagemaker_session=self._session) + if self.lineage_source == LineageSourceEnum.DATASET.value: + return DatasetArtifact.load(artifact_arn=self.arn, sagemaker_session=self._session) + if self.lineage_source == LineageSourceEnum.IMAGE.value: + return ImageArtifact.load(artifact_arn=self.arn, sagemaker_session=self._session) + return Artifact.load(artifact_arn=self.arn, sagemaker_session=self._session) + class LineageQueryResult(object): """A wrapper around the results of a lineage query.""" @@ -159,11 +225,11 @@ def __init__( def _to_request_dict(self): """Convert the lineage filter to its API representation.""" filter_request = {} - if self.entities: + if self.sources: filter_request["Types"] = list( map(lambda x: x.value if isinstance(x, LineageSourceEnum) else x, self.sources) ) - if self.sources: + if self.entities: filter_request["LineageTypes"] = list( map(lambda x: x.value if isinstance(x, LineageEntityEnum) else x, self.entities) ) @@ -197,9 +263,12 @@ def _get_edge(self, edge): def _get_vertex(self, vertex): """Convert lineage query API response to a Vertex.""" + vertex_type = None + if "Type" in vertex: + vertex_type = vertex["Type"] return Vertex( arn=vertex["Arn"], - lineage_source=vertex["Type"], + lineage_source=vertex_type, lineage_entity=vertex["LineageType"], sagemaker_session=self._session, ) @@ -210,6 +279,18 @@ def _convert_api_response(self, response) -> LineageQueryResult: converted.edges = [self._get_edge(edge) for edge in response["Edges"]] converted.vertices = [self._get_vertex(vertex) for vertex in response["Vertices"]] + edge_set = set() + for edge in converted.edges: + if edge in edge_set: + converted.edges.remove(edge) + edge_set.add(edge) + + vertex_set = set() + for vertex in converted.vertices: + if vertex in vertex_set: + converted.vertices.remove(vertex) + vertex_set.add(vertex) + return converted def _collapse_cross_account_artifacts(self, query_response): diff --git a/src/sagemaker/model.py b/src/sagemaker/model.py index f576f78fb4..bfe89ce0b0 100644 --- a/src/sagemaker/model.py +++ b/src/sagemaker/model.py @@ -33,8 +33,12 @@ from sagemaker.inputs import CompilationInput from sagemaker.deprecations import removed_kwargs from sagemaker.predictor import PredictorBase +from sagemaker.serverless import ServerlessInferenceConfig from sagemaker.transformer import Transformer from sagemaker.jumpstart.utils import add_jumpstart_tags +from sagemaker.utils import unique_name_from_base +from sagemaker.async_inference import AsyncInferenceConfig +from sagemaker.predictor_async import AsyncPredictor LOGGER = logging.getLogger("sagemaker") @@ -362,7 +366,7 @@ def register( model_package_arn=model_package.get("ModelPackageArn"), ) - def _init_sagemaker_session_if_does_not_exist(self, instance_type): + def _init_sagemaker_session_if_does_not_exist(self, instance_type=None): """Set ``self.sagemaker_session`` to ``LocalSession`` or ``Session`` if it's not already. The type of session object is determined by the instance type. @@ -922,8 +926,8 @@ def compile( def deploy( self, - initial_instance_count, - instance_type, + initial_instance_count=None, + instance_type=None, serializer=None, deserializer=None, accelerator_type=None, @@ -932,6 +936,8 @@ def deploy( kms_key=None, wait=True, data_capture_config=None, + async_inference_config=None, + serverless_inference_config=None, **kwargs, ): """Deploy this ``Model`` to an ``Endpoint`` and optionally return a ``Predictor``. @@ -949,9 +955,13 @@ def deploy( Args: initial_instance_count (int): The initial number of instances to run - in the ``Endpoint`` created from this ``Model``. + in the ``Endpoint`` created from this ``Model``. If not using + serverless inference, then it need to be a number larger or equals + to 1 (default: None) instance_type (str): The EC2 instance type to deploy this Model to. - For example, 'ml.p2.xlarge', or 'local' for local mode. + For example, 'ml.p2.xlarge', or 'local' for local mode. If not using + serverless inference, then it is required to deploy a model. + (default: None) serializer (:class:`~sagemaker.serializers.BaseSerializer`): A serializer object, used to encode data for an inference endpoint (default: None). If ``serializer`` is not None, then @@ -980,7 +990,24 @@ def deploy( data_capture_config (sagemaker.model_monitor.DataCaptureConfig): Specifies configuration related to Endpoint data capture for use with Amazon SageMaker Model Monitoring. Default: None. - + async_inference_config (sagemaker.model_monitor.AsyncInferenceConfig): Specifies + configuration related to async endpoint. Use this configuration when trying + to create async endpoint and make async inference. If empty config object + passed through, will use default config to deploy async endpoint. Deploy a + real-time endpoint if it's None. (default: None) + serverless_inference_config (sagemaker.serverless.ServerlessInferenceConfig): + Specifies configuration related to serverless endpoint. Use this configuration + when trying to create serverless endpoint and make serverless inference. If + empty object passed through, will use pre-defined values in + ``ServerlessInferenceConfig`` class to deploy serverless endpoint. Deploy an + instance based endpoint if it's None. (default: None) + Raises: + ValueError: If arguments combination check failed in these circumstances: + - If no role is specified or + - If serverless inference config is not specified and instance type and instance + count are also not specified or + - If a wrong type of object is provided as serverless inference config or async + inference config Returns: callable[string, sagemaker.session.Session] or None: Invocation of ``self.predictor_cls`` on the created endpoint name, if ``self.predictor_cls`` @@ -996,33 +1023,65 @@ def deploy( if self.role is None: raise ValueError("Role can not be null for deploying a model") - if instance_type.startswith("ml.inf") and not self._is_compiled_model: + is_async = async_inference_config is not None + if is_async and not isinstance(async_inference_config, AsyncInferenceConfig): + raise ValueError("async_inference_config needs to be a AsyncInferenceConfig object") + + is_serverless = serverless_inference_config is not None + if not is_serverless and not (instance_type and initial_instance_count): + raise ValueError( + "Must specify instance type and instance count unless using serverless inference" + ) + + if is_serverless and not isinstance(serverless_inference_config, ServerlessInferenceConfig): + raise ValueError( + "serverless_inference_config needs to be a ServerlessInferenceConfig object" + ) + + if instance_type and instance_type.startswith("ml.inf") and not self._is_compiled_model: LOGGER.warning( "Your model is not compiled. Please compile your model before using Inferentia." ) - compiled_model_suffix = "-".join(instance_type.split(".")[:-1]) - if self._is_compiled_model: + compiled_model_suffix = None if is_serverless else "-".join(instance_type.split(".")[:-1]) + if self._is_compiled_model and not is_serverless: self._ensure_base_name_if_needed(self.image_uri) if self._base_name is not None: self._base_name = "-".join((self._base_name, compiled_model_suffix)) self._create_sagemaker_model(instance_type, accelerator_type, tags) + + serverless_inference_config_dict = ( + serverless_inference_config._to_request_dict() if is_serverless else None + ) production_variant = sagemaker.production_variant( - self.name, instance_type, initial_instance_count, accelerator_type=accelerator_type + self.name, + instance_type, + initial_instance_count, + accelerator_type=accelerator_type, + serverless_inference_config=serverless_inference_config_dict, ) if endpoint_name: self.endpoint_name = endpoint_name else: base_endpoint_name = self._base_name or utils.base_from_name(self.name) - if self._is_compiled_model and not base_endpoint_name.endswith(compiled_model_suffix): - base_endpoint_name = "-".join((base_endpoint_name, compiled_model_suffix)) + if self._is_compiled_model and not is_serverless: + if not base_endpoint_name.endswith(compiled_model_suffix): + base_endpoint_name = "-".join((base_endpoint_name, compiled_model_suffix)) self.endpoint_name = utils.name_from_base(base_endpoint_name) data_capture_config_dict = None if data_capture_config is not None: data_capture_config_dict = data_capture_config._to_request_dict() + async_inference_config_dict = None + if is_async: + if async_inference_config.output_path is None: + async_inference_config = self._build_default_async_inference_config( + async_inference_config + ) + async_inference_config_dict = async_inference_config._to_request_dict() + self.sagemaker_session.endpoint_from_production_variants( name=self.endpoint_name, production_variants=[production_variant], @@ -1030,6 +1089,7 @@ def deploy( kms_key=kms_key, wait=wait, data_capture_config_dict=data_capture_config_dict, + async_inference_config_dict=async_inference_config_dict, ) if self.predictor_cls: @@ -1038,9 +1098,20 @@ def deploy( predictor.serializer = serializer if deserializer: predictor.deserializer = deserializer + if is_async: + return AsyncPredictor(predictor, self.name) return predictor return None + def _build_default_async_inference_config(self, async_inference_config): + """Build default async inference config and return ``AsyncInferenceConfig``""" + async_output_folder = unique_name_from_base(self.name) + async_output_s3uri = "s3://{}/async-endpoint-outputs/{}".format( + self.sagemaker_session.default_bucket(), async_output_folder + ) + async_inference_config.output_path = async_output_s3uri + return async_inference_config + def transformer( self, instance_count, diff --git a/src/sagemaker/parameter.py b/src/sagemaker/parameter.py index 3b7ef1d4bb..a7f8440f3d 100644 --- a/src/sagemaker/parameter.py +++ b/src/sagemaker/parameter.py @@ -15,6 +15,8 @@ import json from sagemaker.workflow.parameters import Parameter as PipelineParameter +from sagemaker.workflow.functions import JsonGet as PipelineJsonGet +from sagemaker.workflow.functions import Join as PipelineJoin class ParameterRange(object): @@ -71,10 +73,10 @@ def as_tuning_range(self, name): return { "Name": name, "MinValue": str(self.min_value) - if not isinstance(self.min_value, PipelineParameter) + if not isinstance(self.min_value, (PipelineParameter, PipelineJsonGet, PipelineJoin)) else self.min_value, "MaxValue": str(self.max_value) - if not isinstance(self.max_value, PipelineParameter) + if not isinstance(self.max_value, (PipelineParameter, PipelineJsonGet, PipelineJoin)) else self.max_value, "ScalingType": self.scaling_type, } @@ -108,10 +110,11 @@ def __init__(self, values): # pylint: disable=super-init-not-called values (list or object): The possible values for the hyperparameter. This input will be converted into a list of strings. """ - if isinstance(values, list): - self.values = [str(v) if not isinstance(v, PipelineParameter) else v for v in values] - else: - self.values = [str(values) if not isinstance(values, PipelineParameter) else values] + values = values if isinstance(values, list) else [values] + self.values = [ + str(v) if not isinstance(v, (PipelineParameter, PipelineJsonGet, PipelineJoin)) else v + for v in values + ] def as_tuning_range(self, name): """Represent the parameter range as a dictionary. diff --git a/src/sagemaker/predictor_async.py b/src/sagemaker/predictor_async.py new file mode 100644 index 0000000000..39d31c3ee5 --- /dev/null +++ b/src/sagemaker/predictor_async.py @@ -0,0 +1,371 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Placeholder docstring""" +from __future__ import absolute_import + +import uuid +from botocore.exceptions import WaiterError +from sagemaker.exceptions import PollingTimeoutError +from sagemaker.async_inference import WaiterConfig, AsyncInferenceResponse +from sagemaker.s3 import parse_s3_url +from sagemaker.session import Session +from sagemaker.utils import name_from_base, sagemaker_timestamp + + +class AsyncPredictor: + """Make async prediction requests to an Amazon SageMaker endpoint.""" + + def __init__( + self, + predictor, + name=None, + ): + """Initialize an ``AsyncPredictor``. + + Args: + predictor (sagemaker.predictor.Predictor): General ``Predictor`` + object has useful methods and variables. ``AsyncPredictor`` + stands on top of it with capability for async inference. + """ + self.predictor = predictor + self.endpoint_name = predictor.endpoint_name + self.sagemaker_session = predictor.sagemaker_session or Session() + if self.sagemaker_session.s3_client is None: + self.s3_client = self.sagemaker_session.boto_session.client( + "s3", + region_name=self.sagemaker_session.boto_region_name, + ) + else: + self.s3_client = self.sagemaker_session.s3_client + + self.serializer = predictor.serializer + self.deserializer = predictor.deserializer + self.name = name + self._endpoint_config_name = None + self._model_names = None + self._context = None + self._input_path = None + + def predict( + self, + data=None, + input_path=None, + initial_args=None, + inference_id=None, + waiter_config=WaiterConfig(), + ): + """Wait and return the Async Inference result from the specified endpoint. + + Args: + data (object): Input data for which you want the model to provide + inference. If a serializer was specified in the encapsulated + Predictor object, the result of the serializer is sent as input + data. Otherwise the data must be sequence of bytes, and the + predict method then sends the bytes in the request body as is. + input_path (str): Amazon S3 URI contains input data for which you want + the model to provide async inference. (Default: None) + initial_args (dict[str,str]): Optional. Default arguments for boto3 + ``invoke_endpoint_async`` call. (Default: None). + inference_id (str): If you provide a value, it is added to the captured data + when you enable data capture on the endpoint (Default: None). + waiter_config (sagemaker.async_inference.waiter_config.WaiterConfig): Configuration + for the waiter. (Default: {"Delay": 15 seconds, "MaxAttempts": 60} + Raises: + ValueError: If both input data and input Amazon S3 path are not provided + Returns: + object: Inference for the given input. If a deserializer was specified when creating + the Predictor, the result of the deserializer is + returned. Otherwise the response returns the sequence of bytes + as is. + """ + if data is None and input_path is None: + raise ValueError( + "Please provide input data or input Amazon S3 location to use async prediction" + ) + if data is not None: + input_path = self._upload_data_to_s3(data, input_path) + + self._input_path = input_path + response = self._submit_async_request(input_path, initial_args, inference_id) + output_location = response["OutputLocation"] + result = self._wait_for_output(output_path=output_location, waiter_config=waiter_config) + + return result + + def predict_async( + self, + data=None, + input_path=None, + initial_args=None, + inference_id=None, + ): + """Return the Async Inference ouput Amazon S3 path from the specified endpoint. + + Args: + data (object): Input data for which you want the model to provide + inference. If a serializer was specified in the encapsulated + Predictor object, the result of the serializer is sent as input + data. Otherwise the data must be sequence of bytes, and the + predict method then upload the data to the ``input_s3_path``. If + ``input_s3_path`` is None, upload the data to + input_path (str): Amazon S3 URI contains input data for which you want + the model to provide async inference. (Default: None) + initial_args (dict[str,str]): Optional. Default arguments for boto3 + ``invoke_endpoint_async`` call. (Default: None). + inference_id (str): If you provide a value, it is added to the captured data + when you enable data capture on the endpoint (Default: None). + Raises: + ValueError: If both input data and input Amazon S3 path are not provided + Returns: + AsyncInferenceResponse: + Inference response for the given input. It provides method to check + the result in the Amazon S3 output path. + """ + if data is None and input_path is None: + raise ValueError( + "Please provide input data or input Amazon S3 location to use async prediction" + ) + if data is not None: + input_path = self._upload_data_to_s3(data, input_path) + + self._input_path = input_path + response = self._submit_async_request(input_path, initial_args, inference_id) + output_location = response["OutputLocation"] + response_async = AsyncInferenceResponse( + predictor_async=self, + output_path=output_location, + ) + + return response_async + + def _upload_data_to_s3( + self, + data, + input_path=None, + ): + """Upload request data to Amazon S3 for users""" + if input_path: + bucket, key = parse_s3_url(input_path) + else: + my_uuid = str(uuid.uuid4()) + timestamp = sagemaker_timestamp() + bucket = self.sagemaker_session.default_bucket() + key = "async-endpoint-inputs/{}/{}-{}".format( + name_from_base(self.name, short=True), + timestamp, + my_uuid, + ) + + data = self.serializer.serialize(data) + self.s3_client.put_object( + Body=data, Bucket=bucket, Key=key, ContentType=self.serializer.CONTENT_TYPE + ) + input_path = input_path or "s3://{}/{}".format(self.sagemaker_session.default_bucket(), key) + + return input_path + + def _create_request_args( + self, + input_path, + initial_args=None, + inference_id=None, + ): + """Create request args for ``invoke_endpoint_async``""" + args = dict(initial_args) if initial_args else {} + args["InputLocation"] = input_path + if "EndpointName" not in args: + args["EndpointName"] = self.predictor.endpoint_name + + if "Accept" not in args: + args["Accept"] = ", ".join(self.predictor.accept) + + if inference_id: + args["InferenceId"] = inference_id + + return args + + def _submit_async_request( + self, + input_path, + initial_args, + inference_id, + ): + """Create request and invoke async endpoint with the request""" + request_args = self._create_request_args(input_path, initial_args, inference_id) + + response = self.sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async( + **request_args + ) + + return response + + def _wait_for_output( + self, + output_path, + waiter_config, + ): + """Check the Amazon S3 output path for the output. + + Periodically check Amazon S3 output path for async inference result. + Timeout automatically after max attempts reached + """ + bucket, key = parse_s3_url(output_path) + s3_waiter = self.s3_client.get_waiter("object_exists") + try: + s3_waiter.wait(Bucket=bucket, Key=key, WaiterConfig=waiter_config._to_request_dict()) + except WaiterError: + raise PollingTimeoutError( + message="Inference could still be running", + output_path=output_path, + seconds=waiter_config.delay * waiter_config.max_attempts, + ) + + s3_object = self.s3_client.get_object(Bucket=bucket, Key=key) + result = self.predictor._handle_response(response=s3_object) + return result + + def update_endpoint( + self, + initial_instance_count=None, + instance_type=None, + accelerator_type=None, + model_name=None, + tags=None, + kms_key=None, + data_capture_config_dict=None, + wait=True, + ): + """Update the existing endpoint with the provided attributes. + + This creates a new EndpointConfig in the process. If ``initial_instance_count``, + ``instance_type``, ``accelerator_type``, or ``model_name`` is specified, then a new + ProductionVariant configuration is created; values from the existing configuration + are not preserved if any of those parameters are specified. + + Args: + initial_instance_count (int): The initial number of instances to run in the endpoint. + This is required if ``instance_type``, ``accelerator_type``, or ``model_name`` is + specified. Otherwise, the values from the existing endpoint configuration's + ProductionVariants are used. + instance_type (str): The EC2 instance type to deploy the endpoint to. + This is required if ``initial_instance_count`` or ``accelerator_type`` is specified. + Otherwise, the values from the existing endpoint configuration's + ``ProductionVariants`` are used. + accelerator_type (str): The type of Elastic Inference accelerator to attach to + the endpoint, e.g. "ml.eia1.medium". If not specified, and + ``initial_instance_count``, ``instance_type``, and ``model_name`` are also ``None``, + the values from the existing endpoint configuration's ``ProductionVariants`` are + used. Otherwise, no Elastic Inference accelerator is attached to the endpoint. + model_name (str): The name of the model to be associated with the endpoint. + This is required if ``initial_instance_count``, ``instance_type``, or + ``accelerator_type`` is specified and if there is more than one model associated + with the endpoint. Otherwise, the existing model for the endpoint is used. + tags (list[dict[str, str]]): The list of tags to add to the endpoint + config. If not specified, the tags of the existing endpoint configuration are used. + If any of the existing tags are reserved AWS ones (i.e. begin with "aws"), + they are not carried over to the new endpoint configuration. + kms_key (str): The KMS key that is used to encrypt the data on the storage volume + attached to the instance hosting the endpoint If not specified, + the KMS key of the existing endpoint configuration is used. + data_capture_config_dict (dict): The endpoint data capture configuration + for use with Amazon SageMaker Model Monitoring. If not specified, + the data capture configuration of the existing endpoint configuration is used. + wait (bool): Wait for updating to finish + """ + + self.predictor.update_endpoint( + initial_instance_count=initial_instance_count, + instance_type=instance_type, + accelerator_type=accelerator_type, + model_name=model_name, + tags=tags, + kms_key=kms_key, + data_capture_config_dict=data_capture_config_dict, + wait=wait, + ) + + def delete_endpoint(self, delete_endpoint_config=True): + """Delete the Amazon SageMaker endpoint backing this async predictor. + + This also delete the endpoint configuration attached to it if + delete_endpoint_config is True. + + Args: + delete_endpoint_config (bool, optional): Flag to indicate whether to + delete endpoint configuration together with endpoint. Defaults + to True. If True, both endpoint and endpoint configuration will + be deleted. If False, only endpoint will be deleted. + """ + self.predictor.delete_endpoint(delete_endpoint_config) + + def delete_model(self): + """Deletes the Amazon SageMaker models backing this predictor.""" + self.predictor.delete_model() + + def enable_data_capture(self): + """Enables data capture by updating DataCaptureConfig. + + This function updates the DataCaptureConfig for the Predictor's associated Amazon SageMaker + Endpoint to enable data capture. For a more customized experience, refer to + update_data_capture_config, instead. + """ + self.predictor.enable_data_capture() + + def disable_data_capture(self): + """Disables data capture by updating DataCaptureConfig. + + This function updates the DataCaptureConfig for the Predictor's associated Amazon SageMaker + Endpoint to disable data capture. For a more customized experience, refer to + update_data_capture_config, instead. + """ + self.predictor.disable_data_capture() + + def update_data_capture_config(self, data_capture_config): + """Updates the DataCaptureConfig for the Predictor's associated Amazon SageMaker Endpoint. + + Update is done using the provided DataCaptureConfig. + + Args: + data_capture_config (sagemaker.model_monitor.DataCaptureConfig): The + DataCaptureConfig to update the predictor's endpoint to use. + """ + self.predictor.update_data_capture_config(data_capture_config) + + def list_monitors(self): + """Generates ModelMonitor objects (or DefaultModelMonitors). + + Objects are generated based on the schedule(s) associated with the endpoint + that this predictor refers to. + + Returns: + [sagemaker.model_monitor.model_monitoring.ModelMonitor]: A list of + ModelMonitor (or DefaultModelMonitor) objects. + + """ + return self.predictor.list_monitors() + + def endpoint_context(self): + """Retrieves the lineage context object representing the endpoint. + + Examples: + .. code-block:: python + + predictor = Predictor() + context = predictor.endpoint_context() + models = context.models() + + Returns: + ContextEndpoint: The context for the endpoint. + """ + + return self.predictor.endpoint_context() diff --git a/src/sagemaker/serverless/__init__.py b/src/sagemaker/serverless/__init__.py index 8bf55c0dcd..4ecffb56d8 100644 --- a/src/sagemaker/serverless/__init__.py +++ b/src/sagemaker/serverless/__init__.py @@ -13,3 +13,6 @@ """Classes for performing machine learning on serverless compute.""" from sagemaker.serverless.model import LambdaModel # noqa: F401 from sagemaker.serverless.predictor import LambdaPredictor # noqa: F401 +from sagemaker.serverless.serverless_inference_config import ( # noqa: F401 + ServerlessInferenceConfig, +) diff --git a/src/sagemaker/serverless/serverless_inference_config.py b/src/sagemaker/serverless/serverless_inference_config.py new file mode 100644 index 0000000000..39950f4f84 --- /dev/null +++ b/src/sagemaker/serverless/serverless_inference_config.py @@ -0,0 +1,54 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""This module contains code related to the ServerlessInferenceConfig class. + +Codes are used for configuring async inference endpoint. Use it when deploying +the model to the endpoints. +""" +from __future__ import print_function, absolute_import + + +class ServerlessInferenceConfig(object): + """Configuration object passed in when deploying models to Amazon SageMaker Endpoints. + + This object specifies configuration related to serverless endpoint. Use this configuration + when trying to create serverless endpoint and make serverless inference + """ + + def __init__( + self, + memory_size_in_mb=2048, + max_concurrency=5, + ): + """Initialize a ServerlessInferenceConfig object for serverless inference configuration. + + Args: + memory_size_in_mb (int): Optional. The memory size of your serverless endpoint. + Valid values are in 1 GB increments: 1024 MB, 2048 MB, 3072 MB, 4096 MB, + 5120 MB, or 6144 MB. If no value is provided, Amazon SageMaker will choose + the default value for you. (Default: 2048) + max_concurrency (int): Optional. The maximum number of concurrent invocations + your serverless endpoint can process. If no value is provided, Amazon + SageMaker will choose the default value for you. (Default: 5) + """ + self.memory_size_in_mb = memory_size_in_mb + self.max_concurrency = max_concurrency + + def _to_request_dict(self): + """Generates a request dictionary using the parameters provided to the class.""" + request_dict = { + "MemorySizeInMB": self.memory_size_in_mb, + "MaxConcurrency": self.max_concurrency, + } + + return request_dict diff --git a/src/sagemaker/session.py b/src/sagemaker/session.py index 56f008be84..0ac5b6d6e7 100644 --- a/src/sagemaker/session.py +++ b/src/sagemaker/session.py @@ -3543,6 +3543,7 @@ def endpoint_from_production_variants( kms_key=None, wait=True, data_capture_config_dict=None, + async_inference_config_dict=None, ): """Create an SageMaker ``Endpoint`` from a list of production variants. @@ -3557,7 +3558,9 @@ def endpoint_from_production_variants( (default: True). data_capture_config_dict (dict): Specifies configuration related to Endpoint data capture for use with Amazon SageMaker Model Monitoring. Default: None. - + async_inference_config_dict (dict) : specifies configuration related to async endpoint. + Use this configuration when trying to create async endpoint and make async inference + (default: None) Returns: str: The name of the created ``Endpoint``. """ @@ -3569,6 +3572,8 @@ def endpoint_from_production_variants( config_options["KmsKeyId"] = kms_key if data_capture_config_dict is not None: config_options["DataCaptureConfig"] = data_capture_config_dict + if async_inference_config_dict is not None: + config_options["AsyncInferenceConfig"] = async_inference_config_dict LOGGER.info("Creating endpoint-config with name %s", name) self.sagemaker_client.create_endpoint_config(**config_options) @@ -4382,11 +4387,12 @@ def pipeline_container_def(models, instance_type=None): def production_variant( model_name, - instance_type, - initial_instance_count=1, + instance_type=None, + initial_instance_count=None, variant_name="AllTraffic", initial_weight=1, accelerator_type=None, + serverless_inference_config=None, ): """Create a production variant description suitable for use in a ``ProductionVariant`` list. @@ -4405,14 +4411,15 @@ def production_variant( accelerator_type (str): Type of Elastic Inference accelerator for this production variant. For example, 'ml.eia1.medium'. For more information: https://docs.aws.amazon.com/sagemaker/latest/dg/ei.html + serverless_inference_config (dict): Specifies configuration dict related to serverless + endpoint. The dict is converted from sagemaker.model_monitor.ServerlessInferenceConfig + object (default: None) Returns: dict[str, str]: An SageMaker ``ProductionVariant`` description """ production_variant_configuration = { "ModelName": model_name, - "InstanceType": instance_type, - "InitialInstanceCount": initial_instance_count, "VariantName": variant_name, "InitialVariantWeight": initial_weight, } @@ -4420,6 +4427,13 @@ def production_variant( if accelerator_type: production_variant_configuration["AcceleratorType"] = accelerator_type + if serverless_inference_config: + production_variant_configuration["ServerlessConfig"] = serverless_inference_config + else: + initial_instance_count = initial_instance_count or 1 + production_variant_configuration["InitialInstanceCount"] = initial_instance_count + production_variant_configuration["InstanceType"] = instance_type + return production_variant_configuration diff --git a/src/sagemaker/tensorflow/model.py b/src/sagemaker/tensorflow/model.py index 94c5eb37ad..0b8d2f7235 100644 --- a/src/sagemaker/tensorflow/model.py +++ b/src/sagemaker/tensorflow/model.py @@ -258,8 +258,8 @@ def register( def deploy( self, - initial_instance_count, - instance_type, + initial_instance_count=None, + instance_type=None, serializer=None, deserializer=None, accelerator_type=None, @@ -269,6 +269,8 @@ def deploy( wait=True, data_capture_config=None, update_endpoint=None, + async_inference_config=None, + serverless_inference_config=None, ): """Deploy a Tensorflow ``Model`` to a SageMaker ``Endpoint``.""" @@ -287,6 +289,8 @@ def deploy( kms_key=kms_key, wait=wait, data_capture_config=data_capture_config, + async_inference_config=async_inference_config, + serverless_inference_config=serverless_inference_config, update_endpoint=update_endpoint, ) diff --git a/src/sagemaker/tuner.py b/src/sagemaker/tuner.py index efa2617887..f661e26e04 100644 --- a/src/sagemaker/tuner.py +++ b/src/sagemaker/tuner.py @@ -38,6 +38,10 @@ IntegerParameter, ParameterRange, ) +from sagemaker.workflow.parameters import Parameter as PipelineParameter +from sagemaker.workflow.functions import JsonGet as PipelineJsonGet +from sagemaker.workflow.functions import Join as PipelineJoin + from sagemaker.session import Session from sagemaker.utils import base_from_name, base_name_from_image, name_from_base @@ -59,6 +63,18 @@ logger = logging.getLogger(__name__) +def is_pipeline_parameters(value): + """Determine if a value is a pipeline parameter or function representation + + Args: + value (float or int): The value to be verified. + + Returns: + bool: True if it is, False otherwise. + """ + return isinstance(value, (PipelineParameter, PipelineJsonGet, PipelineJoin)) + + class WarmStartTypes(Enum): """Warm Start Configuration type. @@ -359,7 +375,12 @@ def _prepare_static_hyperparameters( ): """Prepare static hyperparameters for one estimator before tuning.""" # Remove any hyperparameter that will be tuned - static_hyperparameters = {str(k): str(v) for (k, v) in estimator.hyperparameters().items()} + static_hyperparameters = { + str(k): str(v) + if not isinstance(v, (PipelineParameter, PipelineJsonGet, PipelineJoin)) + else v + for (k, v) in estimator.hyperparameters().items() + } for hyperparameter_name in hyperparameter_ranges.keys(): static_hyperparameters.pop(hyperparameter_name, None) diff --git a/src/sagemaker/utils.py b/src/sagemaker/utils.py index 5c617b0155..6a217deaea 100644 --- a/src/sagemaker/utils.py +++ b/src/sagemaker/utils.py @@ -79,7 +79,7 @@ def name_from_base(base, max_length=63, short=False): def unique_name_from_base(base, max_length=63): """Placeholder Docstring""" - unique = "%04x" % random.randrange(16 ** 4) # 4-digit hex + unique = "%04x" % random.randrange(16**4) # 4-digit hex ts = str(int(time.time())) available_length = max_length - 2 - len(ts) - len(unique) trimmed = base[:available_length] @@ -413,7 +413,12 @@ def repack_model( model_dir = _extract_model(model_uri, sagemaker_session, tmp) _create_or_update_code_dir( - model_dir, inference_script, source_directory, dependencies, sagemaker_session, tmp + model_dir, + inference_script, + source_directory, + dependencies, + sagemaker_session, + tmp, ) tmp_model_path = os.path.join(tmp, "temp-model.tar.gz") @@ -544,7 +549,11 @@ def sts_regional_endpoint(region): return "https://{}".format(endpoint_data["hostname"]) -def retries(max_retry_count, exception_message_prefix, seconds_to_sleep=DEFAULT_SLEEP_TIME_SECONDS): +def retries( + max_retry_count, + exception_message_prefix, + seconds_to_sleep=DEFAULT_SLEEP_TIME_SECONDS, +): """Retries until max retry count is reached. Args: diff --git a/src/sagemaker/workflow/airflow.py b/src/sagemaker/workflow/airflow.py index 115e09a9c9..739abc841a 100644 --- a/src/sagemaker/workflow/airflow.py +++ b/src/sagemaker/workflow/airflow.py @@ -195,6 +195,11 @@ def training_base_config(estimator, inputs=None, job_name=None, mini_batch_size= if s3_operations: train_config["S3Operations"] = s3_operations + if (estimator.checkpoint_local_path is not None) & (estimator.checkpoint_s3_uri is not None): + train_config["CheckpointConfig"] = { + "LocalPath": estimator.checkpoint_local_path, + "S3Uri": estimator.checkpoint_s3_uri, + } return train_config diff --git a/src/sagemaker/workflow/emr_step.py b/src/sagemaker/workflow/emr_step.py new file mode 100644 index 0000000000..8b244c78f2 --- /dev/null +++ b/src/sagemaker/workflow/emr_step.py @@ -0,0 +1,119 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""The step definitions for workflow.""" +from __future__ import absolute_import + +from typing import List + +from sagemaker.workflow.entities import ( + RequestType, +) +from sagemaker.workflow.properties import ( + Properties, +) +from sagemaker.workflow.steps import Step, StepTypeEnum, CacheConfig + + +class EMRStepConfig: + """Config for a Hadoop Jar step.""" + + def __init__( + self, jar, args: List[str] = None, main_class: str = None, properties: List[dict] = None + ): + """Create a definition for input data used by an EMR cluster(job flow) step. + + See AWS documentation on the ``StepConfig`` API for more details on the parameters. + + Args: + args(List[str]): + A list of command line arguments passed to + the JAR file's main function when executed. + jar(str): A path to a JAR file run during the step. + main_class(str): The name of the main class in the specified Java file. + properties(List(dict)): A list of key-value pairs that are set when the step runs. + """ + self.jar = jar + self.args = args + self.main_class = main_class + self.properties = properties + + def to_request(self) -> RequestType: + """Convert EMRStepConfig object to request dict.""" + config = {"HadoopJarStep": {"Jar": self.jar}} + if self.args is not None: + config["HadoopJarStep"]["Args"] = self.args + if self.main_class is not None: + config["HadoopJarStep"]["MainClass"] = self.main_class + if self.properties is not None: + config["HadoopJarStep"]["Properties"] = self.properties + + return config + + +class EMRStep(Step): + """EMR step for workflow.""" + + def __init__( + self, + name: str, + display_name: str, + description: str, + cluster_id: str, + step_config: EMRStepConfig, + depends_on: List[str] = None, + cache_config: CacheConfig = None, + ): + """Constructs a EMRStep. + + Args: + name(str): The name of the EMR step. + display_name(str): The display name of the EMR step. + description(str): The description of the EMR step. + cluster_id(str): The ID of the running EMR cluster. + step_config(EMRStepConfig): One StepConfig to be executed by the job flow. + depends_on(List[str]): + A list of step names this `sagemaker.workflow.steps.EMRStep` depends on + cache_config(CacheConfig): A `sagemaker.workflow.steps.CacheConfig` instance. + + """ + super(EMRStep, self).__init__(name, display_name, description, StepTypeEnum.EMR, depends_on) + + emr_step_args = {"ClusterId": cluster_id, "StepConfig": step_config.to_request()} + self.args = emr_step_args + self.cache_config = cache_config + + root_property = Properties(path=f"Steps.{name}", shape_name="Step", service_name="emr") + root_property.__dict__["ClusterId"] = cluster_id + self._properties = root_property + + @property + def arguments(self) -> RequestType: + """The arguments dict that is used to call `AddJobFlowSteps`. + + NOTE: The AddFlowJobSteps request is not quite the args list that workflow needs. + The Name attribute in AddJobFlowSteps cannot be passed; it will be set during runtime. + In addition to that, we will also need to include emr job inputs and output config. + """ + return self.args + + @property + def properties(self) -> RequestType: + """A Properties object representing the EMR DescribeStepResponse model""" + return self._properties + + def to_request(self) -> RequestType: + """Updates the dictionary with cache configuration.""" + request_dict = super().to_request() + if self.cache_config: + request_dict.update(self.cache_config.config) + return request_dict diff --git a/src/sagemaker/workflow/execution_variables.py b/src/sagemaker/workflow/execution_variables.py index 5e8ee664a4..d81b359bde 100644 --- a/src/sagemaker/workflow/execution_variables.py +++ b/src/sagemaker/workflow/execution_variables.py @@ -37,7 +37,15 @@ def expr(self) -> RequestType: class ExecutionVariables: - """All available ExecutionVariable.""" + """Provide access to all available execution variables: + + - ExecutionVariables.START_DATETIME + - ExecutionVariables.CURRENT_DATETIME + - ExecutionVariables.PIPELINE_NAME + - ExecutionVariables.PIPELINE_ARN + - ExecutionVariables.PIPELINE_EXECUTION_ID + - ExecutionVariables.PIPELINE_EXECUTION_ARN + """ START_DATETIME = ExecutionVariable("StartDateTime") CURRENT_DATETIME = ExecutionVariable("CurrentDateTime") diff --git a/src/sagemaker/workflow/properties.py b/src/sagemaker/workflow/properties.py index 96147e8e8b..6e9aba4408 100644 --- a/src/sagemaker/workflow/properties.py +++ b/src/sagemaker/workflow/properties.py @@ -23,17 +23,24 @@ class PropertiesMeta(type): - """Load an internal shapes attribute from the botocore sagemaker service model.""" + """Load an internal shapes attribute from the botocore service model - _shapes = None + for sagemaker and emr service. + """ + + _shapes_map = dict() _primitive_types = {"string", "boolean", "integer", "float"} def __new__(mcs, *args, **kwargs): - """Loads up the shapes from the botocore sagemaker service model.""" - if mcs._shapes is None: + """Loads up the shapes from the botocore service model.""" + if len(mcs._shapes_map.keys()) == 0: loader = botocore.loaders.Loader() - model = loader.load_service_model("sagemaker", "service-2") - mcs._shapes = model["shapes"] + + sagemaker_model = loader.load_service_model("sagemaker", "service-2") + emr_model = loader.load_service_model("emr", "service-2") + mcs._shapes_map["sagemaker"] = sagemaker_model["shapes"] + mcs._shapes_map["emr"] = emr_model["shapes"] + return super().__new__(mcs, *args, **kwargs) @@ -45,32 +52,41 @@ def __init__( path: str, shape_name: str = None, shape_names: List[str] = None, + service_name: str = "sagemaker", ): """Create a Properties instance representing the given shape. Args: path (str): The parent path of the Properties instance. - shape_name (str): The botocore sagemaker service model shape name. - shape_names (str): A List of the botocore sagemaker service model shape name. + shape_name (str): The botocore service model shape name. + shape_names (str): A List of the botocore service model shape name. """ self._path = path shape_names = [] if shape_names is None else shape_names self._shape_names = shape_names if shape_name is None else [shape_name] + shape_names + shapes = Properties._shapes_map.get(service_name, {}) + for name in self._shape_names: - shape = Properties._shapes.get(name, {}) + shape = shapes.get(name, {}) shape_type = shape.get("type") if shape_type in Properties._primitive_types: self.__str__ = name elif shape_type == "structure": members = shape["members"] for key, info in members.items(): - if Properties._shapes.get(info["shape"], {}).get("type") == "list": - self.__dict__[key] = PropertiesList(f"{path}.{key}", info["shape"]) - elif Properties._shapes.get(info["shape"], {}).get("type") == "map": - self.__dict__[key] = PropertiesMap(f"{path}.{key}", info["shape"]) + if shapes.get(info["shape"], {}).get("type") == "list": + self.__dict__[key] = PropertiesList( + f"{path}.{key}", info["shape"], service_name + ) + elif shapes.get(info["shape"], {}).get("type") == "map": + self.__dict__[key] = PropertiesMap( + f"{path}.{key}", info["shape"], service_name + ) else: - self.__dict__[key] = Properties(f"{path}.{key}", info["shape"]) + self.__dict__[key] = Properties( + f"{path}.{key}", info["shape"], service_name=service_name + ) @property def expr(self): @@ -81,16 +97,17 @@ def expr(self): class PropertiesList(Properties): """PropertiesList for use in workflow expressions.""" - def __init__(self, path: str, shape_name: str = None): + def __init__(self, path: str, shape_name: str = None, service_name: str = "sagemaker"): """Create a PropertiesList instance representing the given shape. Args: path (str): The parent path of the PropertiesList instance. - shape_name (str): The botocore sagemaker service model shape name. - root_shape_name (str): The botocore sagemaker service model shape name. + shape_name (str): The botocore service model shape name. + service_name (str): The botocore service name. """ super(PropertiesList, self).__init__(path, shape_name) self.shape_name = shape_name + self.service_name = service_name self._items: Dict[Union[int, str], Properties] = dict() def __getitem__(self, item: Union[int, str]): @@ -100,7 +117,7 @@ def __getitem__(self, item: Union[int, str]): item (Union[int, str]): The index of the item in sequence. """ if item not in self._items.keys(): - shape = Properties._shapes.get(self.shape_name) + shape = Properties._shapes_map.get(self.service_name, {}).get(self.shape_name) member = shape["member"]["shape"] if isinstance(item, str): property_item = Properties(f"{self._path}['{item}']", member) @@ -114,15 +131,17 @@ def __getitem__(self, item: Union[int, str]): class PropertiesMap(Properties): """PropertiesMap for use in workflow expressions.""" - def __init__(self, path: str, shape_name: str = None): + def __init__(self, path: str, shape_name: str = None, service_name: str = "sagemaker"): """Create a PropertiesMap instance representing the given shape. Args: path (str): The parent path of the PropertiesMap instance. shape_name (str): The botocore sagemaker service model shape name. + service_name (str): The botocore service name. """ super(PropertiesMap, self).__init__(path, shape_name) self.shape_name = shape_name + self.service_name = service_name self._items: Dict[Union[int, str], Properties] = dict() def __getitem__(self, item: Union[int, str]): @@ -132,7 +151,7 @@ def __getitem__(self, item: Union[int, str]): item (Union[int, str]): The index of the item in sequence. """ if item not in self._items.keys(): - shape = Properties._shapes.get(self.shape_name) + shape = Properties._shapes_map.get(self.service_name, {}).get(self.shape_name) member = shape["value"]["shape"] if isinstance(item, str): property_item = Properties(f"{self._path}['{item}']", member) diff --git a/src/sagemaker/workflow/steps.py b/src/sagemaker/workflow/steps.py index dd81553a02..329bd1d950 100644 --- a/src/sagemaker/workflow/steps.py +++ b/src/sagemaker/workflow/steps.py @@ -30,6 +30,7 @@ TransformInput, ) from sagemaker.model import Model +from sagemaker.pipeline import PipelineModel from sagemaker.processing import ( ProcessingInput, ProcessingJob, @@ -59,6 +60,7 @@ class StepTypeEnum(Enum, metaclass=DefaultEnumMeta): LAMBDA = "Lambda" QUALITY_CHECK = "QualityCheck" CLARIFY_CHECK = "ClarifyCheck" + EMR = "EMR" @attr.s @@ -319,7 +321,7 @@ class CreateModelStep(ConfigurableRetryStep): def __init__( self, name: str, - model: Model, + model: Union[Model, PipelineModel], inputs: CreateModelInput, depends_on: Union[List[str], List[Step]] = None, retry_policies: List[RetryPolicy] = None, @@ -333,7 +335,8 @@ def __init__( Args: name (str): The name of the CreateModel step. - model (Model): A `sagemaker.model.Model` instance. + model (Model or PipelineModel): A `sagemaker.model.Model` + or `sagemaker.pipeline.PipelineModel` instance. inputs (CreateModelInput): A `sagemaker.inputs.CreateModelInput` instance. Defaults to `None`. depends_on (List[str] or List[Step]): A list of step names or step instances @@ -358,16 +361,25 @@ def arguments(self) -> RequestType: ModelName cannot be included in the arguments. """ - request_dict = self.model.sagemaker_session._create_model_request( - name="", - role=self.model.role, - container_defs=self.model.prepare_container_def( - instance_type=self.inputs.instance_type, - accelerator_type=self.inputs.accelerator_type, - ), - vpc_config=self.model.vpc_config, - enable_network_isolation=self.model.enable_network_isolation(), - ) + if isinstance(self.model, PipelineModel): + request_dict = self.model.sagemaker_session._create_model_request( + name="", + role=self.model.role, + container_defs=self.model.pipeline_container_def(self.inputs.instance_type), + vpc_config=self.model.vpc_config, + enable_network_isolation=self.model.enable_network_isolation, + ) + else: + request_dict = self.model.sagemaker_session._create_model_request( + name="", + role=self.model.role, + container_defs=self.model.prepare_container_def( + instance_type=self.inputs.instance_type, + accelerator_type=self.inputs.accelerator_type, + ), + vpc_config=self.model.vpc_config, + enable_network_isolation=self.model.enable_network_isolation(), + ) request_dict.pop("ModelName") return request_dict @@ -475,6 +487,7 @@ def __init__( cache_config: CacheConfig = None, depends_on: Union[List[str], List[Step]] = None, retry_policies: List[RetryPolicy] = None, + kms_key=None, ): """Construct a ProcessingStep, given a `Processor` instance. @@ -500,6 +513,8 @@ def __init__( depends_on (List[str] or List[Step]): A list of step names or step instance this `sagemaker.workflow.steps.ProcessingStep` depends on retry_policies (List[RetryPolicy]): A list of retry policy + kms_key (str): The ARN of the KMS key that is used to encrypt the + user code file. Defaults to `None`. """ super(ProcessingStep, self).__init__( name, StepTypeEnum.PROCESSING, display_name, description, depends_on, retry_policies @@ -511,6 +526,7 @@ def __init__( self.code = code self.property_files = property_files self.job_name = None + self.kms_key = kms_key # Examine why run method in sagemaker.processing.Processor mutates the processor instance # by setting the instance's arguments attribute. Refactor Processor.run, if possible. @@ -545,8 +561,8 @@ def arguments(self) -> RequestType: inputs=self.inputs, outputs=self.outputs, code=self.code, + kms_key=self.kms_key, ) - process_args = ProcessingJob._get_process_args( self.processor, normalized_inputs, normalized_outputs, experiment_config=dict() ) diff --git a/tests/conftest.py b/tests/conftest.py index 6bdcf0ee24..1886258d55 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -363,7 +363,7 @@ def cpu_instance_type(sagemaker_session, request): @pytest.fixture(scope="module") def gpu_instance_type(request): - return "ml.p2.xlarge" + return "ml.p3.2xlarge" @pytest.fixture(scope="session") @@ -408,7 +408,7 @@ def pytest_generate_tests(metafunc): region in tests.integ.HOSTING_NO_P2_REGIONS or region in tests.integ.TRAINING_NO_P2_REGIONS ): - params.append("ml.p2.xlarge") + params.append("ml.p3.2xlarge") metafunc.parametrize("instance_type", params, scope="session") _generate_all_framework_version_fixtures(metafunc) diff --git a/tests/data/async_inference_input/async-inference-pca-input.csv b/tests/data/async_inference_input/async-inference-pca-input.csv new file mode 100644 index 0000000000..76fb62a89e --- /dev/null +++ b/tests/data/async_inference_input/async-inference-pca-input.csv @@ -0,0 +1,5 @@ +0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.171875000000000000e-02,7.031250000000000000e-02,7.031250000000000000e-02,7.031250000000000000e-02,4.921875000000000000e-01,5.312500000000000000e-01,6.835937500000000000e-01,1.015625000000000000e-01,6.484375000000000000e-01,9.960937500000000000e-01,9.648437500000000000e-01,4.960937500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.171875000000000000e-01,1.406250000000000000e-01,3.671875000000000000e-01,6.015625000000000000e-01,6.640625000000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,8.789062500000000000e-01,6.718750000000000000e-01,9.882812500000000000e-01,9.453125000000000000e-01,7.617187500000000000e-01,2.500000000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.914062500000000000e-01,9.296875000000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.804687500000000000e-01,3.632812500000000000e-01,3.203125000000000000e-01,3.203125000000000000e-01,2.187500000000000000e-01,1.523437500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,7.031250000000000000e-02,8.554687500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,7.734375000000000000e-01,7.109375000000000000e-01,9.648437500000000000e-01,9.414062500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.125000000000000000e-01,6.093750000000000000e-01,4.179687500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,8.007812500000000000e-01,4.296875000000000000e-02,0.000000000000000000e+00,1.679687500000000000e-01,6.015625000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,5.468750000000000000e-02,3.906250000000000000e-03,6.015625000000000000e-01,9.882812500000000000e-01,3.515625000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,5.429687500000000000e-01,9.882812500000000000e-01,7.421875000000000000e-01,7.812500000000000000e-03,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.296875000000000000e-02,7.421875000000000000e-01,9.882812500000000000e-01,2.734375000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.367187500000000000e-01,9.414062500000000000e-01,8.789062500000000000e-01,6.250000000000000000e-01,4.218750000000000000e-01,3.906250000000000000e-03,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.164062500000000000e-01,9.375000000000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,4.648437500000000000e-01,9.765625000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.757812500000000000e-01,7.265625000000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,5.859375000000000000e-01,1.054687500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.250000000000000000e-02,3.632812500000000000e-01,9.843750000000000000e-01,9.882812500000000000e-01,7.304687500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.726562500000000000e-01,9.882812500000000000e-01,9.726562500000000000e-01,2.500000000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.796875000000000000e-01,5.078125000000000000e-01,7.148437500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,8.085937500000000000e-01,7.812500000000000000e-03,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.523437500000000000e-01,5.781250000000000000e-01,8.945312500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.765625000000000000e-01,7.109375000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.375000000000000000e-02,4.453125000000000000e-01,8.632812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,7.851562500000000000e-01,3.046875000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,8.984375000000000000e-02,2.578125000000000000e-01,8.320312500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,7.734375000000000000e-01,3.164062500000000000e-01,7.812500000000000000e-03,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,7.031250000000000000e-02,6.679687500000000000e-01,8.554687500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,7.617187500000000000e-01,3.125000000000000000e-01,3.515625000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.148437500000000000e-01,6.718750000000000000e-01,8.828125000000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.531250000000000000e-01,5.195312500000000000e-01,4.296875000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,5.312500000000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,8.281250000000000000e-01,5.273437500000000000e-01,5.156250000000000000e-01,6.250000000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00 +0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.992187500000000000e-01,6.210937500000000000e-01,9.882812500000000000e-01,6.210937500000000000e-01,1.953125000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.875000000000000000e-01,9.296875000000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,9.257812500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.109375000000000000e-01,8.867187500000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,9.335937500000000000e-01,9.101562500000000000e-01,9.843750000000000000e-01,2.226562500000000000e-01,2.343750000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.906250000000000000e-02,2.343750000000000000e-01,8.750000000000000000e-01,9.843750000000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,7.890625000000000000e-01,3.281250000000000000e-01,9.843750000000000000e-01,9.882812500000000000e-01,4.765625000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.367187500000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,3.750000000000000000e-01,7.382812500000000000e-01,9.882812500000000000e-01,6.523437500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.992187500000000000e-01,9.296875000000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,7.421875000000000000e-01,4.453125000000000000e-01,9.882812500000000000e-01,8.906250000000000000e-01,1.835937500000000000e-01,3.085937500000000000e-01,9.960937500000000000e-01,6.562500000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.875000000000000000e-01,9.296875000000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,6.992187500000000000e-01,4.687500000000000000e-02,2.929687500000000000e-01,4.726562500000000000e-01,8.203125000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,9.882812500000000000e-01,9.492187500000000000e-01,1.953125000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.484375000000000000e-01,6.445312500000000000e-01,9.882812500000000000e-01,9.101562500000000000e-01,8.125000000000000000e-01,3.281250000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.882812500000000000e-01,9.843750000000000000e-01,6.445312500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.734375000000000000e-02,6.953125000000000000e-01,9.843750000000000000e-01,9.375000000000000000e-01,2.773437500000000000e-01,7.421875000000000000e-02,1.093750000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.882812500000000000e-01,9.843750000000000000e-01,7.617187500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.226562500000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,2.460937500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.882812500000000000e-01,9.843750000000000000e-01,7.617187500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,7.734375000000000000e-01,9.882812500000000000e-01,7.421875000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.960937500000000000e-01,9.882812500000000000e-01,7.656250000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.968750000000000000e-01,9.609375000000000000e-01,9.843750000000000000e-01,4.375000000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.882812500000000000e-01,9.843750000000000000e-01,5.781250000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.320312500000000000e-01,9.843750000000000000e-01,8.984375000000000000e-01,9.765625000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.734375000000000000e-02,5.273437500000000000e-01,9.882812500000000000e-01,7.265625000000000000e-01,4.687500000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.320312500000000000e-01,9.843750000000000000e-01,8.710937500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.734375000000000000e-02,5.117187500000000000e-01,9.843750000000000000e-01,8.789062500000000000e-01,2.773437500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.320312500000000000e-01,9.843750000000000000e-01,5.664062500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.875000000000000000e-01,6.445312500000000000e-01,9.843750000000000000e-01,6.757812500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.359375000000000000e-01,9.882812500000000000e-01,8.789062500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.453125000000000000e-01,9.296875000000000000e-01,9.882812500000000000e-01,6.328125000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.320312500000000000e-01,9.843750000000000000e-01,9.726562500000000000e-01,5.703125000000000000e-01,1.875000000000000000e-01,1.132812500000000000e-01,3.320312500000000000e-01,6.953125000000000000e-01,8.789062500000000000e-01,9.882812500000000000e-01,8.710937500000000000e-01,6.523437500000000000e-01,2.187500000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.320312500000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,8.945312500000000000e-01,8.398437500000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,7.656250000000000000e-01,5.078125000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.093750000000000000e-01,7.773437500000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,9.101562500000000000e-01,5.664062500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.765625000000000000e-02,5.000000000000000000e-01,9.843750000000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,5.507812500000000000e-01,1.445312500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00 +0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.617187500000000000e-01,9.062500000000000000e-01,1.523437500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.421875000000000000e-01,3.164062500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.687500000000000000e-01,7.031250000000000000e-01,1.523437500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.921875000000000000e-01,6.367187500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,7.812500000000000000e-03,5.976562500000000000e-01,8.203125000000000000e-01,1.562500000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,8.593750000000000000e-01,6.367187500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.054687500000000000e-01,9.921875000000000000e-01,6.328125000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,8.671875000000000000e-01,6.367187500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,7.148437500000000000e-01,9.921875000000000000e-01,4.882812500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.796875000000000000e-01,9.570312500000000000e-01,6.367187500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,7.734375000000000000e-01,9.921875000000000000e-01,2.187500000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.687500000000000000e-01,9.921875000000000000e-01,6.367187500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,8.984375000000000000e-02,9.023437500000000000e-01,9.921875000000000000e-01,1.132812500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.210937500000000000e-01,9.921875000000000000e-01,4.687500000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.367187500000000000e-01,9.921875000000000000e-01,8.437500000000000000e-01,6.250000000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.210937500000000000e-01,9.921875000000000000e-01,2.617187500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,5.468750000000000000e-02,3.359375000000000000e-01,6.953125000000000000e-01,9.687500000000000000e-01,9.921875000000000000e-01,3.554687500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.210937500000000000e-01,9.921875000000000000e-01,3.320312500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.835937500000000000e-01,1.914062500000000000e-01,4.531250000000000000e-01,5.625000000000000000e-01,5.859375000000000000e-01,9.414062500000000000e-01,9.492187500000000000e-01,9.140625000000000000e-01,6.992187500000000000e-01,9.414062500000000000e-01,9.843750000000000000e-01,1.562500000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,5.859375000000000000e-01,9.882812500000000000e-01,9.257812500000000000e-01,8.085937500000000000e-01,8.085937500000000000e-01,8.085937500000000000e-01,9.882812500000000000e-01,9.921875000000000000e-01,9.765625000000000000e-01,9.375000000000000000e-01,7.734375000000000000e-01,5.585937500000000000e-01,3.554687500000000000e-01,1.093750000000000000e-01,1.953125000000000000e-02,9.101562500000000000e-01,9.765625000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.648437500000000000e-01,6.914062500000000000e-01,6.914062500000000000e-01,6.914062500000000000e-01,6.914062500000000000e-01,6.914062500000000000e-01,3.828125000000000000e-01,2.187500000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.984375000000000000e-01,9.921875000000000000e-01,8.593750000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.601562500000000000e-01,9.921875000000000000e-01,5.351562500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.601562500000000000e-01,9.921875000000000000e-01,2.226562500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.601562500000000000e-01,9.921875000000000000e-01,2.226562500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.601562500000000000e-01,9.960937500000000000e-01,3.671875000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.601562500000000000e-01,9.921875000000000000e-01,3.750000000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.601562500000000000e-01,9.921875000000000000e-01,5.976562500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.601562500000000000e-01,9.960937500000000000e-01,5.976562500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.750000000000000000e-01,9.921875000000000000e-01,5.976562500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00 +0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.843750000000000000e-01,9.882812500000000000e-01,9.960937500000000000e-01,2.460937500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.750000000000000000e-01,9.531250000000000000e-01,9.804687500000000000e-01,9.882812500000000000e-01,2.421875000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.960937500000000000e-01,9.804687500000000000e-01,9.804687500000000000e-01,9.882812500000000000e-01,2.421875000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.656250000000000000e-01,9.218750000000000000e-01,9.804687500000000000e-01,8.242187500000000000e-01,1.210937500000000000e-01,3.125000000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.343750000000000000e-01,8.906250000000000000e-01,9.804687500000000000e-01,9.804687500000000000e-01,3.671875000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.054687500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,7.382812500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,7.812500000000000000e-02,9.882812500000000000e-01,9.804687500000000000e-01,9.179687500000000000e-01,2.578125000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.250000000000000000e-01,8.007812500000000000e-01,9.882812500000000000e-01,9.804687500000000000e-01,4.921875000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.062500000000000000e-01,9.804687500000000000e-01,9.882812500000000000e-01,7.187500000000000000e-01,5.859375000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.125000000000000000e-01,9.375000000000000000e-01,9.804687500000000000e-01,7.539062500000000000e-01,8.984375000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.250000000000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,6.210937500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,5.898437500000000000e-01,9.804687500000000000e-01,9.804687500000000000e-01,9.804687500000000000e-01,1.523437500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.875000000000000000e-01,8.632812500000000000e-01,9.804687500000000000e-01,9.804687500000000000e-01,6.718750000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.140625000000000000e-01,9.804687500000000000e-01,9.804687500000000000e-01,7.656250000000000000e-01,4.687500000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.882812500000000000e-01,9.804687500000000000e-01,9.804687500000000000e-01,3.476562500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.210937500000000000e-01,9.960937500000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,1.210937500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.875000000000000000e-01,8.906250000000000000e-01,9.882812500000000000e-01,9.648437500000000000e-01,5.468750000000000000e-01,3.125000000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.500000000000000000e-01,9.804687500000000000e-01,9.882812500000000000e-01,8.593750000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.500000000000000000e-01,9.804687500000000000e-01,9.882812500000000000e-01,8.593750000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.375000000000000000e-02,7.539062500000000000e-01,9.882812500000000000e-01,8.593750000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00 +0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.148437500000000000e-01,5.781250000000000000e-01,8.203125000000000000e-01,9.882812500000000000e-01,9.882812500000000000e-01,4.414062500000000000e-01,3.398437500000000000e-01,5.781250000000000000e-01,2.148437500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.398437500000000000e-01,9.062500000000000000e-01,9.843750000000000000e-01,9.882812500000000000e-01,7.382812500000000000e-01,8.203125000000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,9.882812500000000000e-01,6.562500000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.562500000000000000e-02,2.226562500000000000e-01,9.453125000000000000e-01,9.843750000000000000e-01,7.421875000000000000e-01,2.539062500000000000e-01,1.953125000000000000e-02,4.687500000000000000e-02,7.109375000000000000e-01,9.843750000000000000e-01,9.882812500000000000e-01,4.531250000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.750000000000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,7.148437500000000000e-01,5.468750000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,3.593750000000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,8.789062500000000000e-01,8.203125000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,5.156250000000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,5.703125000000000000e-01,5.468750000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,8.398437500000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,3.085937500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.921875000000000000e-01,9.882812500000000000e-01,9.648437500000000000e-01,6.875000000000000000e-01,3.515625000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,3.125000000000000000e-02,3.046875000000000000e-01,9.570312500000000000e-01,9.882812500000000000e-01,5.039062500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.250000000000000000e-02,9.062500000000000000e-01,9.843750000000000000e-01,6.875000000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.406250000000000000e-01,7.851562500000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,6.601562500000000000e-01,4.296875000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,8.593750000000000000e-02,9.843750000000000000e-01,9.843750000000000000e-01,1.171875000000000000e-01,8.593750000000000000e-02,4.648437500000000000e-01,7.695312500000000000e-01,9.414062500000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,9.804687500000000000e-01,3.007812500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,6.250000000000000000e-02,9.023437500000000000e-01,9.843750000000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,9.843750000000000000e-01,8.828125000000000000e-01,8.867187500000000000e-01,9.843750000000000000e-01,9.023437500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.148437500000000000e-01,9.179687500000000000e-01,9.882812500000000000e-01,8.476562500000000000e-01,5.390625000000000000e-01,1.640625000000000000e-01,9.375000000000000000e-02,7.500000000000000000e-01,9.843750000000000000e-01,5.585937500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.421875000000000000e-01,9.960937500000000000e-01,9.882812500000000000e-01,4.257812500000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.773437500000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,8.203125000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,9.882812500000000000e-01,9.843750000000000000e-01,8.203125000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,2.773437500000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,8.203125000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,4.140625000000000000e-01,9.882812500000000000e-01,9.843750000000000000e-01,8.203125000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,1.757812500000000000e-01,9.960937500000000000e-01,9.882812500000000000e-01,8.203125000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,8.515625000000000000e-01,9.843750000000000000e-01,2.187500000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,3.750000000000000000e-01,9.843750000000000000e-01,7.382812500000000000e-01,1.640625000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,5.468750000000000000e-02,7.187500000000000000e-01,9.843750000000000000e-01,6.640625000000000000e-01,4.296875000000000000e-02,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,5.468750000000000000e-02,5.742187500000000000e-01,9.843750000000000000e-01,1.640625000000000000e-01,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00 diff --git a/tests/data/multimodel/container/Dockerfile b/tests/data/multimodel/container/Dockerfile index 8290a6641a..4792a429c1 100644 --- a/tests/data/multimodel/container/Dockerfile +++ b/tests/data/multimodel/container/Dockerfile @@ -15,7 +15,7 @@ RUN apt-get update && \ curl \ vim \ && rm -rf /var/lib/apt/lists/* \ - && curl -O https://bootstrap.pypa.io/get-pip.py \ + && curl -O https://bootstrap.pypa.io/pip/3.6/get-pip.py \ && python3 get-pip.py RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1 diff --git a/tests/data/workflow/emr-script.sh b/tests/data/workflow/emr-script.sh new file mode 100644 index 0000000000..aeee24ec95 --- /dev/null +++ b/tests/data/workflow/emr-script.sh @@ -0,0 +1,2 @@ +echo "This is emr test script..." +sleep 15 diff --git a/tests/integ/sagemaker/lineage/conftest.py b/tests/integ/sagemaker/lineage/conftest.py index e4966ab67c..672af41de9 100644 --- a/tests/integ/sagemaker/lineage/conftest.py +++ b/tests/integ/sagemaker/lineage/conftest.py @@ -32,6 +32,14 @@ from smexperiments import trial_component, trial, experiment from random import randint from botocore.exceptions import ClientError +from sagemaker.lineage.query import ( + LineageQuery, + LineageFilter, + LineageSourceEnum, + LineageEntityEnum, + LineageQueryDirectionEnum, +) +from sagemaker.lineage.lineage_trial_component import LineageTrialComponent from tests.integ.sagemaker.lineage.helpers import name, names @@ -39,6 +47,7 @@ SLEEP_TIME_TWO_SECONDS = 2 STATIC_PIPELINE_NAME = "SdkIntegTestStaticPipeline17" STATIC_ENDPOINT_NAME = "SdkIntegTestStaticEndpoint17" +STATIC_MODEL_PACKAGE_GROUP_NAME = "SdkIntegTestStaticPipeline17ModelPackageGroup" @pytest.fixture @@ -207,6 +216,24 @@ def trial_associated_artifact(artifact_obj, trial_obj, trial_component_obj, sage sagemaker_session=sagemaker_session, ) trial_obj.add_trial_component(trial_component_obj) + time.sleep(4) + yield artifact_obj + trial_obj.remove_trial_component(trial_component_obj) + assntn.delete() + + +@pytest.fixture +def upstream_trial_associated_artifact( + artifact_obj, trial_obj, trial_component_obj, sagemaker_session +): + assntn = association.Association.create( + source_arn=trial_component_obj.trial_component_arn, + destination_arn=artifact_obj.artifact_arn, + association_type="ContributedTo", + sagemaker_session=sagemaker_session, + ) + trial_obj.add_trial_component(trial_component_obj) + time.sleep(3) yield artifact_obj trial_obj.remove_trial_component(trial_component_obj) assntn.delete() @@ -514,6 +541,103 @@ def _get_static_pipeline_execution_arn(sagemaker_session): return pipeline_execution_arn +@pytest.fixture +def static_approval_action( + sagemaker_session, static_endpoint_context, static_pipeline_execution_arn +): + query_filter = LineageFilter( + entities=[LineageEntityEnum.ACTION], sources=[LineageSourceEnum.APPROVAL] + ) + query_result = LineageQuery(sagemaker_session).query( + start_arns=[static_endpoint_context.context_arn], + query_filter=query_filter, + direction=LineageQueryDirectionEnum.ASCENDANTS, + include_edges=False, + ) + action_name = query_result.vertices[0].arn.split("/")[1] + yield action.ModelPackageApprovalAction.load( + action_name=action_name, sagemaker_session=sagemaker_session + ) + + +@pytest.fixture +def static_model_deployment_action(sagemaker_session, static_endpoint_context): + query_filter = LineageFilter( + entities=[LineageEntityEnum.ACTION], sources=[LineageSourceEnum.MODEL_DEPLOYMENT] + ) + query_result = LineageQuery(sagemaker_session).query( + start_arns=[static_endpoint_context.context_arn], + query_filter=query_filter, + direction=LineageQueryDirectionEnum.ASCENDANTS, + include_edges=False, + ) + model_approval_actions = [] + for vertex in query_result.vertices: + model_approval_actions.append(vertex.to_lineage_object()) + yield model_approval_actions[0] + + +@pytest.fixture +def static_processing_job_trial_component( + sagemaker_session, static_endpoint_context +) -> LineageTrialComponent: + query_filter = LineageFilter( + entities=[LineageEntityEnum.TRIAL_COMPONENT], sources=[LineageSourceEnum.PROCESSING_JOB] + ) + + query_result = LineageQuery(sagemaker_session).query( + start_arns=[static_endpoint_context.context_arn], + query_filter=query_filter, + direction=LineageQueryDirectionEnum.ASCENDANTS, + include_edges=False, + ) + processing_jobs = [] + for vertex in query_result.vertices: + processing_jobs.append(vertex.to_lineage_object()) + + return processing_jobs[0] + + +@pytest.fixture +def static_training_job_trial_component( + sagemaker_session, static_endpoint_context +) -> LineageTrialComponent: + query_filter = LineageFilter( + entities=[LineageEntityEnum.TRIAL_COMPONENT], sources=[LineageSourceEnum.TRAINING_JOB] + ) + + query_result = LineageQuery(sagemaker_session).query( + start_arns=[static_endpoint_context.context_arn], + query_filter=query_filter, + direction=LineageQueryDirectionEnum.ASCENDANTS, + include_edges=False, + ) + training_jobs = [] + for vertex in query_result.vertices: + training_jobs.append(vertex.to_lineage_object()) + + return training_jobs[0] + + +@pytest.fixture +def static_transform_job_trial_component( + static_processing_job_trial_component, sagemaker_session, static_endpoint_context +) -> LineageTrialComponent: + query_filter = LineageFilter( + entities=[LineageEntityEnum.TRIAL_COMPONENT], sources=[LineageSourceEnum.TRANSFORM_JOB] + ) + query_result = LineageQuery(sagemaker_session).query( + start_arns=[static_processing_job_trial_component.trial_component_arn], + query_filter=query_filter, + direction=LineageQueryDirectionEnum.DESCENDANTS, + include_edges=False, + ) + transform_jobs = [] + for vertex in query_result.vertices: + transform_jobs.append(vertex.to_lineage_object()) + yield transform_jobs[0] + + @pytest.fixture def static_endpoint_context(sagemaker_session, static_pipeline_execution_arn): endpoint_arn = get_endpoint_arn_from_static_pipeline(sagemaker_session) @@ -543,6 +667,29 @@ def static_endpoint_context(sagemaker_session, static_pipeline_execution_arn): ) +@pytest.fixture +def static_model_package_group_context(sagemaker_session, static_pipeline_execution_arn): + + model_package_group_arn = get_model_package_group_arn_from_static_pipeline(sagemaker_session) + + contexts = sagemaker_session.sagemaker_client.list_contexts(SourceUri=model_package_group_arn)[ + "ContextSummaries" + ] + if len(contexts) != 1: + raise ( + Exception( + f"Got an unexpected number of Contexts for \ + model package group {STATIC_MODEL_PACKAGE_GROUP_NAME} from pipeline \ + execution {static_pipeline_execution_arn}. \ + Expected 1 but got {len(contexts)}" + ) + ) + + yield context.ModelPackageGroup.load( + contexts[0]["ContextName"], sagemaker_session=sagemaker_session + ) + + @pytest.fixture def static_model_artifact(sagemaker_session, static_pipeline_execution_arn): model_package_arn = get_model_package_arn_from_static_pipeline( @@ -590,6 +737,23 @@ def static_dataset_artifact(static_model_artifact, sagemaker_session): ) +@pytest.fixture +def static_image_artifact(static_model_artifact, sagemaker_session): + query_filter = LineageFilter( + entities=[LineageEntityEnum.ARTIFACT], sources=[LineageSourceEnum.IMAGE] + ) + query_result = LineageQuery(sagemaker_session).query( + start_arns=[static_model_artifact.artifact_arn], + query_filter=query_filter, + direction=LineageQueryDirectionEnum.ASCENDANTS, + include_edges=False, + ) + image_artifact = [] + for vertex in query_result.vertices: + image_artifact.append(vertex.to_lineage_object()) + return image_artifact[0] + + def get_endpoint_arn_from_static_pipeline(sagemaker_session): try: endpoint_arn = sagemaker_session.sagemaker_client.describe_endpoint( @@ -604,6 +768,15 @@ def get_endpoint_arn_from_static_pipeline(sagemaker_session): raise e +def get_model_package_group_arn_from_static_pipeline(sagemaker_session): + static_model_package_group_arn = ( + sagemaker_session.sagemaker_client.describe_model_package_group( + ModelPackageGroupName=STATIC_MODEL_PACKAGE_GROUP_NAME + )["ModelPackageGroupArn"] + ) + return static_model_package_group_arn + + def get_model_package_arn_from_static_pipeline(pipeline_execution_arn, sagemaker_session): # get the model package ARN from the pipeline pipeline_execution_steps = sagemaker_session.sagemaker_client.list_pipeline_execution_steps( diff --git a/tests/integ/sagemaker/lineage/helpers.py b/tests/integ/sagemaker/lineage/helpers.py index bdeabf5b48..fb71d1d88c 100644 --- a/tests/integ/sagemaker/lineage/helpers.py +++ b/tests/integ/sagemaker/lineage/helpers.py @@ -42,7 +42,7 @@ def retry(callable, num_attempts=8): if i == num_attempts - 1: raise ex print("Retrying", ex) - time.sleep(2 ** i) + time.sleep(2**i) assert False, "logic error in retry" diff --git a/tests/integ/sagemaker/lineage/test_action.py b/tests/integ/sagemaker/lineage/test_action.py index a0531450b5..8b462279ca 100644 --- a/tests/integ/sagemaker/lineage/test_action.py +++ b/tests/integ/sagemaker/lineage/test_action.py @@ -20,6 +20,7 @@ import pytest from sagemaker.lineage import action +from sagemaker.lineage.query import LineageQueryDirectionEnum def test_create_delete(action_obj): @@ -117,3 +118,50 @@ def test_tags(action_obj, sagemaker_session): # length of actual tags will be greater than 1 assert len(actual_tags) > 0 assert [actual_tags[-1]] == tags + + +def test_upstream_artifacts(static_model_deployment_action): + artifacts_from_query = static_model_deployment_action.artifacts( + direction=LineageQueryDirectionEnum.ASCENDANTS + ) + assert len(artifacts_from_query) > 0 + for artifact in artifacts_from_query: + assert "artifact" in artifact.artifact_arn + + +def test_downstream_artifacts(static_approval_action): + artifacts_from_query = static_approval_action.artifacts( + direction=LineageQueryDirectionEnum.DESCENDANTS + ) + assert len(artifacts_from_query) > 0 + for artifact in artifacts_from_query: + assert "artifact" in artifact.artifact_arn + + +def test_datasets(static_approval_action, static_dataset_artifact, sagemaker_session): + + sagemaker_session.sagemaker_client.add_association( + SourceArn=static_dataset_artifact.artifact_arn, + DestinationArn=static_approval_action.action_arn, + AssociationType="ContributedTo", + ) + time.sleep(3) + artifacts_from_query = static_approval_action.datasets() + + assert len(artifacts_from_query) > 0 + for artifact in artifacts_from_query: + assert "artifact" in artifact.artifact_arn + assert artifact.artifact_type == "DataSet" + + sagemaker_session.sagemaker_client.delete_association( + SourceArn=static_dataset_artifact.artifact_arn, + DestinationArn=static_approval_action.action_arn, + ) + + +def test_endpoints(static_approval_action): + endpoint_contexts_from_query = static_approval_action.endpoints() + assert len(endpoint_contexts_from_query) > 0 + for endpoint in endpoint_contexts_from_query: + assert endpoint.context_type == "Endpoint" + assert "endpoint" in endpoint.context_arn diff --git a/tests/integ/sagemaker/lineage/test_artifact.py b/tests/integ/sagemaker/lineage/test_artifact.py index 4a0c6398b2..7ecbd0ac15 100644 --- a/tests/integ/sagemaker/lineage/test_artifact.py +++ b/tests/integ/sagemaker/lineage/test_artifact.py @@ -102,6 +102,13 @@ def test_list_by_type(artifact_objs, sagemaker_session): assert artifact_names_listed[0] == expected_name +def test_get_artifact(static_dataset_artifact): + s3_uri = static_dataset_artifact.source.source_uri + expected_artifact = static_dataset_artifact.s3_uri_artifacts(s3_uri=s3_uri) + for ar in expected_artifact["ArtifactSummaries"]: + assert ar.get("Source")["SourceUri"] == s3_uri + + def test_downstream_trials(trial_associated_artifact, trial_obj, sagemaker_session): # allow trial components to index, 30 seconds max def validate(): @@ -120,6 +127,18 @@ def validate(): retry(validate, num_attempts=3) +def test_downstream_trials_v2(trial_associated_artifact, trial_obj, sagemaker_session): + trials = trial_associated_artifact.downstream_trials_v2() + assert len(trials) == 1 + assert trial_obj.trial_name in trials + + +def test_upstream_trials(upstream_trial_associated_artifact, trial_obj, sagemaker_session): + trials = upstream_trial_associated_artifact.upstream_trials() + assert len(trials) == 1 + assert trial_obj.trial_name in trials + + @pytest.mark.timeout(30) def test_tag(artifact_obj, sagemaker_session): tag = {"Key": "foo", "Value": "bar"} diff --git a/tests/integ/sagemaker/lineage/test_context.py b/tests/integ/sagemaker/lineage/test_context.py index 5b36cee746..bdc4cb34e3 100644 --- a/tests/integ/sagemaker/lineage/test_context.py +++ b/tests/integ/sagemaker/lineage/test_context.py @@ -20,6 +20,7 @@ import pytest from sagemaker.lineage import context +from sagemaker.lineage.query import LineageQueryDirectionEnum def test_create_delete(context_obj): @@ -32,6 +33,16 @@ def test_create_delete_with_association(context_obj_with_association): assert context_obj_with_association.context_arn +def test_action(static_endpoint_context, sagemaker_session): + actions_from_query = static_endpoint_context.actions( + direction=LineageQueryDirectionEnum.ASCENDANTS + ) + + assert len(actions_from_query) > 0 + for action in actions_from_query: + assert "action" in action.action_arn + + def test_save(context_obj, sagemaker_session): context_obj.description = "updated description" context_obj.properties = {"k3": "v3"} diff --git a/tests/integ/sagemaker/lineage/test_dataset_artifact.py b/tests/integ/sagemaker/lineage/test_dataset_artifact.py index be03a85e86..ee81b7e137 100644 --- a/tests/integ/sagemaker/lineage/test_dataset_artifact.py +++ b/tests/integ/sagemaker/lineage/test_dataset_artifact.py @@ -35,3 +35,19 @@ def test_endpoint_contexts( assert len(contexts_from_query) > 0 for context in contexts_from_query: assert context.context_type == "Endpoint" + + +def test_get_upstream_datasets(static_dataset_artifact, sagemaker_session): + artifacts_from_query = static_dataset_artifact.upstream_datasets() + assert len(artifacts_from_query) > 0 + for artifact in artifacts_from_query: + assert artifact.artifact_type == "DataSet" + assert "artifact" in artifact.artifact_arn + + +def test_get_down_datasets(static_dataset_artifact, sagemaker_session): + artifacts_from_query = static_dataset_artifact.downstream_datasets() + assert len(artifacts_from_query) > 0 + for artifact in artifacts_from_query: + assert artifact.artifact_type == "DataSet" + assert "artifact" in artifact.artifact_arn diff --git a/tests/integ/sagemaker/lineage/test_endpoint_context.py b/tests/integ/sagemaker/lineage/test_endpoint_context.py index 78a33e8ef9..2a797bd5cb 100644 --- a/tests/integ/sagemaker/lineage/test_endpoint_context.py +++ b/tests/integ/sagemaker/lineage/test_endpoint_context.py @@ -15,6 +15,7 @@ import time SLEEP_TIME_ONE_SECONDS = 1 +SLEEP_TIME_THREE_SECONDS = 3 def test_model(endpoint_context_associate_with_model, model_obj, endpoint_action_obj): @@ -59,3 +60,46 @@ def test_pipeline_execution_arn(static_endpoint_context, static_pipeline_executi pipeline_execution_arn = static_endpoint_context.pipeline_execution_arn() assert pipeline_execution_arn == static_pipeline_execution_arn + + +def test_transform_jobs( + sagemaker_session, static_transform_job_trial_component, static_endpoint_context +): + sagemaker_session.sagemaker_client.add_association( + SourceArn=static_transform_job_trial_component.trial_component_arn, + DestinationArn=static_endpoint_context.context_arn, + AssociationType="ContributedTo", + ) + time.sleep(SLEEP_TIME_THREE_SECONDS) + transform_jobs_from_query = static_endpoint_context.transform_jobs() + + assert len(transform_jobs_from_query) > 0 + for transform_job in transform_jobs_from_query: + assert "transform-job" in transform_job.trial_component_arn + assert "TransformJob" in transform_job.source.get("SourceType") + + sagemaker_session.sagemaker_client.delete_association( + SourceArn=static_transform_job_trial_component.trial_component_arn, + DestinationArn=static_endpoint_context.context_arn, + ) + + +def test_processing_jobs( + sagemaker_session, static_transform_job_trial_component, static_endpoint_context +): + processing_jobs_from_query = static_endpoint_context.processing_jobs() + assert len(processing_jobs_from_query) > 0 + for processing_job in processing_jobs_from_query: + assert "processing-job" in processing_job.trial_component_arn + assert "ProcessingJob" in processing_job.source.get("SourceType") + + +def test_trial_components( + sagemaker_session, static_transform_job_trial_component, static_endpoint_context +): + trial_components_from_query = static_endpoint_context.trial_components() + + assert len(trial_components_from_query) > 0 + for trial_component in trial_components_from_query: + assert "job" in trial_component.trial_component_arn + assert "Job" in trial_component.source.get("SourceType") diff --git a/tests/integ/sagemaker/lineage/test_image_artifact.py b/tests/integ/sagemaker/lineage/test_image_artifact.py new file mode 100644 index 0000000000..bd0f76445d --- /dev/null +++ b/tests/integ/sagemaker/lineage/test_image_artifact.py @@ -0,0 +1,26 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""This module contains code to test SageMaker ``ImageArtifact``""" +from __future__ import absolute_import + +from sagemaker.lineage.query import LineageQueryDirectionEnum + + +def test_dataset(static_image_artifact, sagemaker_session): + artifacts_from_query = static_image_artifact.datasets( + direction=LineageQueryDirectionEnum.DESCENDANTS + ) + assert len(artifacts_from_query) > 0 + for artifact in artifacts_from_query: + assert artifact.artifact_type == "DataSet" + assert "artifact" in artifact.artifact_arn diff --git a/tests/integ/sagemaker/lineage/test_lineage_trial_component.py b/tests/integ/sagemaker/lineage/test_lineage_trial_component.py new file mode 100644 index 0000000000..d8a8a5d9c8 --- /dev/null +++ b/tests/integ/sagemaker/lineage/test_lineage_trial_component.py @@ -0,0 +1,33 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""This module contains code to test SageMaker ``Trial Component``""" +from __future__ import absolute_import + + +def test_dataset_artifacts(static_training_job_trial_component): + artifacts_from_query = static_training_job_trial_component.dataset_artifacts() + assert len(artifacts_from_query) > 0 + for artifact in artifacts_from_query: + assert artifact.artifact_type == "DataSet" + + +def test_models(static_processing_job_trial_component): + artifacts_from_query = static_processing_job_trial_component.models() + assert len(artifacts_from_query) > 0 + for artifact in artifacts_from_query: + assert artifact.artifact_type == "Model" + + +def test_pipeline_execution_arn(static_training_job_trial_component, static_pipeline_execution_arn): + pipeline_execution_arn = static_training_job_trial_component.pipeline_execution_arn() + assert pipeline_execution_arn == static_pipeline_execution_arn diff --git a/tests/integ/sagemaker/lineage/test_model_package_group_context.py b/tests/integ/sagemaker/lineage/test_model_package_group_context.py new file mode 100644 index 0000000000..8f6cd85e77 --- /dev/null +++ b/tests/integ/sagemaker/lineage/test_model_package_group_context.py @@ -0,0 +1,20 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""This module contains code to test SageMaker ``ModelPackageGroup``""" +from __future__ import absolute_import + + +def test_pipeline_execution_arn(static_model_package_group_context, static_pipeline_execution_arn): + pipeline_execution_arn = static_model_package_group_context.pipeline_execution_arn() + + assert pipeline_execution_arn == static_pipeline_execution_arn diff --git a/tests/integ/test_async_inference.py b/tests/integ/test_async_inference.py new file mode 100644 index 0000000000..e4a16da54f --- /dev/null +++ b/tests/integ/test_async_inference.py @@ -0,0 +1,110 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +import pytest +import os +import time + +import sagemaker.amazon.pca +from sagemaker.utils import unique_name_from_base +from sagemaker.async_inference import AsyncInferenceConfig, AsyncInferenceResponse +from sagemaker.predictor_async import AsyncPredictor +from tests.integ import datasets, TRAINING_DEFAULT_TIMEOUT_MINUTES +from tests.integ.timeout import timeout, timeout_and_delete_endpoint_by_name + +INPUT_LOCAL_PATH = "tests/data/async_inference_input/async-inference-pca-input.csv" + + +@pytest.fixture +def training_set(): + return datasets.one_p_mnist() + + +def test_async_walkthrough(sagemaker_session, cpu_instance_type, training_set): + job_name = unique_name_from_base("pca") + + with timeout(minutes=TRAINING_DEFAULT_TIMEOUT_MINUTES): + pca = sagemaker.amazon.pca.PCA( + role="SageMakerRole", + instance_count=1, + instance_type=cpu_instance_type, + num_components=48, + sagemaker_session=sagemaker_session, + ) + + pca.algorithm_mode = "randomized" + pca.subtract_mean = True + pca.extra_components = 5 + pca.fit(pca.record_set(training_set[0][:100]), job_name=job_name) + + with timeout_and_delete_endpoint_by_name(job_name, sagemaker_session): + predictor_async = pca.deploy( + endpoint_name=job_name, + initial_instance_count=1, + instance_type=cpu_instance_type, + async_inference_config=AsyncInferenceConfig(), + ) + assert isinstance(predictor_async, AsyncPredictor) + + data = training_set[0][:5] + result_no_wait_with_data = predictor_async.predict_async(data=data) + assert isinstance(result_no_wait_with_data, AsyncInferenceResponse) + assert result_no_wait_with_data.output_path.startswith( + "s3://" + sagemaker_session.default_bucket() + ) + time.sleep(5) + result_no_wait_with_data = result_no_wait_with_data.get_result() + assert len(result_no_wait_with_data) == 5 + for record in result_no_wait_with_data: + assert record.label["projection"] is not None + + result_wait_with_data = predictor_async.predict(data=data) + assert len(result_wait_with_data) == 5 + for idx, record in enumerate(result_wait_with_data): + assert record.label["projection"] is not None + assert record.label["projection"] == result_no_wait_with_data[idx].label["projection"] + + s3_key_prefix = os.path.join( + "integ-test-test-async-inference", + job_name, + ) + + input_s3_path = os.path.join( + "s3://", + sagemaker_session.default_bucket(), + s3_key_prefix, + "async-inference-pca-input.csv", + ) + + sagemaker_session.upload_data( + path=INPUT_LOCAL_PATH, + bucket=sagemaker_session.default_bucket(), + key_prefix=s3_key_prefix, + extra_args={"ContentType": "text/csv"}, + ) + + result_not_wait = predictor_async.predict_async(input_path=input_s3_path) + assert isinstance(result_not_wait, AsyncInferenceResponse) + assert result_not_wait.output_path.startswith("s3://" + sagemaker_session.default_bucket()) + time.sleep(5) + result_not_wait = result_not_wait.get_result() + assert len(result_not_wait) == 5 + for record in result_not_wait: + assert record.label["projection"] is not None + + result_wait = predictor_async.predict(input_path=input_s3_path) + assert len(result_wait) == 5 + for idx, record in enumerate(result_wait): + assert record.label["projection"] is not None + assert record.label["projection"] == result_not_wait[idx].label["projection"] diff --git a/tests/integ/test_horovod.py b/tests/integ/test_horovod.py index a5dfa79c8c..756d511b32 100644 --- a/tests/integ/test_horovod.py +++ b/tests/integ/test_horovod.py @@ -59,7 +59,7 @@ def test_hvd_gpu( sagemaker_session, tensorflow_training_latest_version, tensorflow_training_latest_py_version, - "ml.p2.xlarge", + "ml.p3.2xlarge", tmpdir, ) diff --git a/tests/integ/test_serverless_inference.py b/tests/integ/test_serverless_inference.py new file mode 100644 index 0000000000..40b1ace147 --- /dev/null +++ b/tests/integ/test_serverless_inference.py @@ -0,0 +1,57 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +import pytest + +import sagemaker.amazon.pca +from sagemaker.utils import unique_name_from_base +from sagemaker.serverless import ServerlessInferenceConfig +from tests.integ import datasets, TRAINING_DEFAULT_TIMEOUT_MINUTES +from tests.integ.timeout import timeout, timeout_and_delete_endpoint_by_name + + +@pytest.fixture +def training_set(): + return datasets.one_p_mnist() + + +def test_serverless_walkthrough(sagemaker_session, cpu_instance_type, training_set): + job_name = unique_name_from_base("pca") + + with timeout(minutes=TRAINING_DEFAULT_TIMEOUT_MINUTES): + pca = sagemaker.amazon.pca.PCA( + role="SageMakerRole", + instance_count=1, + instance_type=cpu_instance_type, + num_components=48, + sagemaker_session=sagemaker_session, + enable_network_isolation=True, + ) + + pca.algorithm_mode = "randomized" + pca.subtract_mean = True + pca.extra_components = 5 + pca.fit(pca.record_set(training_set[0][:100]), job_name=job_name) + + with timeout_and_delete_endpoint_by_name(job_name, sagemaker_session): + + predictor_serverless = pca.deploy( + endpoint_name=job_name, serverless_inference_config=ServerlessInferenceConfig() + ) + + result = predictor_serverless.predict(training_set[0][:5]) + + assert len(result) == 5 + for record in result: + assert record.label["projection"] is not None diff --git a/tests/integ/test_workflow.py b/tests/integ/test_workflow.py index 58b681fd0e..d2c142ee38 100644 --- a/tests/integ/test_workflow.py +++ b/tests/integ/test_workflow.py @@ -66,14 +66,25 @@ ConditionIn, ConditionLessThanOrEqualTo, ) -from sagemaker.workflow.condition_step import ConditionStep, JsonGet -from sagemaker.workflow.callback_step import CallbackStep, CallbackOutput, CallbackOutputTypeEnum -from sagemaker.workflow.lambda_step import LambdaStep, LambdaOutput, LambdaOutputTypeEnum -from sagemaker.workflow.properties import PropertyFile +from sagemaker.workflow.condition_step import ConditionStep +from sagemaker.workflow.callback_step import ( + CallbackStep, + CallbackOutput, + CallbackOutputTypeEnum, +) +from sagemaker.workflow.lambda_step import ( + LambdaStep, + LambdaOutput, + LambdaOutputTypeEnum, +) +from sagemaker.workflow.emr_step import EMRStep, EMRStepConfig from sagemaker.wrangler.processing import DataWranglerProcessor -from sagemaker.dataset_definition.inputs import DatasetDefinition, AthenaDatasetDefinition +from sagemaker.dataset_definition.inputs import ( + DatasetDefinition, + AthenaDatasetDefinition, +) from sagemaker.workflow.execution_variables import ExecutionVariables -from sagemaker.workflow.functions import Join +from sagemaker.workflow.functions import Join, JsonGet from sagemaker.wrangler.ingestion import generate_data_ingestion_flow_from_s3_input from sagemaker.workflow.parameters import ( ParameterInteger, @@ -87,11 +98,16 @@ TuningStep, TransformStep, TransformInput, + PropertyFile, ) from sagemaker.workflow.step_collections import RegisterModel from sagemaker.workflow.pipeline import Pipeline from sagemaker.lambda_helper import Lambda -from sagemaker.feature_store.feature_group import FeatureGroup, FeatureDefinition, FeatureTypeEnum +from sagemaker.feature_store.feature_group import ( + FeatureGroup, + FeatureDefinition, + FeatureTypeEnum, +) from tests.integ import DATA_DIR from tests.integ.kms_utils import get_or_create_kms_key from tests.integ.retry import retries @@ -137,7 +153,7 @@ def feature_store_session(sagemaker_session): @pytest.fixture def pipeline_name(): - return f"my-pipeline-{int(time.time() * 10**7)}" + return f"my-pipeline-{int(time.time() * 10 ** 7)}" @pytest.fixture @@ -150,7 +166,7 @@ def athena_dataset_definition(sagemaker_session): catalog="AwsDataCatalog", database="default", work_group="workgroup", - query_string='SELECT * FROM "default"."s3_test_table_$STAGE_$REGIONUNDERSCORED";', + query_string=('SELECT * FROM "default"."s3_test_table_$STAGE_$REGIONUNDERSCORED";'), output_s3_uri=f"s3://{sagemaker_session.default_bucket()}/add", output_format="JSON", output_compression="GZIP", @@ -261,7 +277,10 @@ def build_jar(): ) else: subprocess.run( - ["javac", os.path.join(jar_file_path, java_file_path, "HelloJavaSparkApp.java")] + [ + "javac", + os.path.join(jar_file_path, java_file_path, "HelloJavaSparkApp.java"), + ] ) subprocess.run( @@ -382,10 +401,20 @@ def test_three_step_definition( assert set(tuple(param.items()) for param in definition["Parameters"]) == set( [ tuple( - {"Name": "InstanceType", "Type": "String", "DefaultValue": "ml.m5.xlarge"}.items() + { + "Name": "InstanceType", + "Type": "String", + "DefaultValue": "ml.m5.xlarge", + }.items() ), tuple({"Name": "InstanceCount", "Type": "Integer", "DefaultValue": 1}.items()), - tuple({"Name": "OutputPrefix", "Type": "String", "DefaultValue": "output"}.items()), + tuple( + { + "Name": "OutputPrefix", + "Type": "String", + "DefaultValue": "output", + }.items() + ), ] ) @@ -442,7 +471,7 @@ def test_three_step_definition( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) finally: @@ -505,14 +534,14 @@ def test_one_step_sklearn_processing_pipeline( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) pipeline.parameters = [ParameterInteger(name="InstanceCount", default_value=1)] execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) @@ -596,7 +625,7 @@ def test_one_step_framework_processing_pipeline( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) @@ -604,13 +633,13 @@ def test_one_step_framework_processing_pipeline( response = pipeline.update(role) update_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", update_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) @@ -698,7 +727,7 @@ def test_one_step_pyspark_processing_pipeline( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) @@ -706,13 +735,13 @@ def test_one_step_pyspark_processing_pipeline( response = pipeline.update(role) update_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", update_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) @@ -739,7 +768,13 @@ def test_one_step_pyspark_processing_pipeline( def test_one_step_sparkjar_processing_pipeline( - sagemaker_session, role, cpu_instance_type, pipeline_name, region_name, configuration, build_jar + sagemaker_session, + role, + cpu_instance_type, + pipeline_name, + region_name, + configuration, + build_jar, ): instance_count = ParameterInteger(name="InstanceCount", default_value=2) cache_config = CacheConfig(enable_caching=True, expire_after="T30m") @@ -757,7 +792,9 @@ def test_one_step_sparkjar_processing_pipeline( body = data.read() input_data_uri = f"s3://{bucket}/spark/input/data.jsonl" S3Uploader.upload_string_as_file_body( - body=body, desired_s3_uri=input_data_uri, sagemaker_session=sagemaker_session + body=body, + desired_s3_uri=input_data_uri, + sagemaker_session=sagemaker_session, ) output_data_uri = f"s3://{bucket}/spark/output/sales/{datetime.now().isoformat()}" @@ -795,7 +832,7 @@ def test_one_step_sparkjar_processing_pipeline( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) @@ -803,13 +840,13 @@ def test_one_step_sparkjar_processing_pipeline( response = pipeline.update(role) update_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", update_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) @@ -857,7 +894,7 @@ def test_one_step_callback_pipeline(sagemaker_session, role, pipeline_name, regi response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) @@ -865,7 +902,7 @@ def test_one_step_callback_pipeline(sagemaker_session, role, pipeline_name, regi response = pipeline.update(role) update_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", update_arn, ) finally: @@ -876,7 +913,12 @@ def test_one_step_callback_pipeline(sagemaker_session, role, pipeline_name, regi def test_steps_with_map_params_pipeline( - sagemaker_session, role, script_dir, pipeline_name, region_name, athena_dataset_definition + sagemaker_session, + role, + script_dir, + pipeline_name, + region_name, + athena_dataset_definition, ): instance_count = ParameterInteger(name="InstanceCount", default_value=2) framework_version = "0.20.0" @@ -1006,7 +1048,7 @@ def test_steps_with_map_params_pipeline( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) @@ -1048,7 +1090,7 @@ def test_two_step_callback_pipeline_with_output_reference( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) finally: @@ -1065,7 +1107,7 @@ def test_one_step_lambda_pipeline(sagemaker_session, role, pipeline_name, region step_lambda = LambdaStep( name="lambda-step", lambda_func=Lambda( - function_arn="arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda", + function_arn=("arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda"), session=sagemaker_session, ), inputs={"arg1": "foo"}, @@ -1083,7 +1125,7 @@ def test_one_step_lambda_pipeline(sagemaker_session, role, pipeline_name, region response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) @@ -1091,7 +1133,7 @@ def test_one_step_lambda_pipeline(sagemaker_session, role, pipeline_name, region response = pipeline.update(role) update_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", update_arn, ) finally: @@ -1110,7 +1152,7 @@ def test_two_step_lambda_pipeline_with_output_reference( step_lambda1 = LambdaStep( name="lambda-step1", lambda_func=Lambda( - function_arn="arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda", + function_arn=("arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda"), session=sagemaker_session, ), inputs={"arg1": "foo"}, @@ -1120,7 +1162,7 @@ def test_two_step_lambda_pipeline_with_output_reference( step_lambda2 = LambdaStep( name="lambda-step2", lambda_func=Lambda( - function_arn="arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda", + function_arn=("arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda"), session=sagemaker_session, ), inputs={"arg1": outputParam1}, @@ -1138,7 +1180,52 @@ def test_two_step_lambda_pipeline_with_output_reference( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + create_arn, + ) + finally: + try: + pipeline.delete() + except Exception: + pass + + +def test_two_steps_emr_pipeline(sagemaker_session, role, pipeline_name, region_name): + instance_count = ParameterInteger(name="InstanceCount", default_value=2) + + emr_step_config = EMRStepConfig( + jar="s3://us-west-2.elasticmapreduce/libs/script-runner/script-runner.jar", + args=["dummy_emr_script_path"], + ) + + step_emr_1 = EMRStep( + name="emr-step-1", + cluster_id="j-1YONHTCP3YZKC", + display_name="emr_step_1", + description="MyEMRStepDescription", + step_config=emr_step_config, + ) + + step_emr_2 = EMRStep( + name="emr-step-2", + cluster_id=step_emr_1.properties.ClusterId, + display_name="emr_step_2", + description="MyEMRStepDescription", + step_config=emr_step_config, + ) + + pipeline = Pipeline( + name=pipeline_name, + parameters=[instance_count], + steps=[step_emr_1, step_emr_2], + sagemaker_session=sagemaker_session, + ) + + try: + response = pipeline.create(role) + create_arn = response["PipelineArn"] + assert re.match( + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) finally: @@ -1222,7 +1309,12 @@ def test_conditional_pytorch_training_model_registration( pipeline = Pipeline( name=pipeline_name, - parameters=[in_condition_input, good_enough_input, instance_count, instance_type], + parameters=[ + in_condition_input, + good_enough_input, + instance_count, + instance_type, + ], steps=[step_cond], sagemaker_session=sagemaker_session, ) @@ -1231,18 +1323,19 @@ def test_conditional_pytorch_training_model_registration( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + create_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) execution = pipeline.start(parameters={"GoodEnoughInput": 0}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) finally: @@ -1350,12 +1443,13 @@ def test_tuning_single_algo( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + create_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) finally: @@ -1371,6 +1465,8 @@ def test_tuning_multi_algos( cpu_instance_type, pipeline_name, region_name, + script_dir, + athena_dataset_definition, ): base_dir = os.path.join(DATA_DIR, "pytorch_mnist") entry_point = os.path.join(base_dir, "mnist.py") @@ -1382,6 +1478,42 @@ def test_tuning_multi_algos( instance_count = ParameterInteger(name="InstanceCount", default_value=1) instance_type = ParameterString(name="InstanceType", default_value="ml.m5.xlarge") + input_data = f"s3://sagemaker-sample-data-{region_name}/processing/census/census-income.csv" + + sklearn_processor = SKLearnProcessor( + framework_version="0.20.0", + instance_type=instance_type, + instance_count=instance_count, + base_job_name="test-sklearn", + sagemaker_session=sagemaker_session, + role=role, + ) + + property_file = PropertyFile( + name="DataAttributes", output_name="attributes", path="attributes.json" + ) + + step_process = ProcessingStep( + name="my-process", + display_name="ProcessingStep", + description="description for Processing step", + processor=sklearn_processor, + inputs=[ + ProcessingInput(source=input_data, destination="/opt/ml/processing/input"), + ProcessingInput(dataset_definition=athena_dataset_definition), + ], + outputs=[ + ProcessingOutput(output_name="train_data", source="/opt/ml/processing/train"), + ProcessingOutput(output_name="attributes", source="/opt/ml/processing/attributes.json"), + ], + property_files=[property_file], + code=os.path.join(script_dir, "preprocessing.py"), + ) + + static_hp_1 = ParameterString(name="InstanceType", default_value="ml.m5.xlarge") + json_get_hp = JsonGet( + step_name=step_process.name, property_file=property_file, json_path="train_size" + ) pytorch_estimator = PyTorch( entry_point=entry_point, role=role, @@ -1392,10 +1524,11 @@ def test_tuning_multi_algos( sagemaker_session=sagemaker_session, enable_sagemaker_metrics=True, max_retry_attempts=3, + hyperparameters={"static-hp": static_hp_1, "train_size": json_get_hp}, ) min_batch_size = ParameterString(name="MinBatchSize", default_value="64") - max_batch_size = ParameterString(name="MaxBatchSize", default_value="128") + max_batch_size = json_get_hp tuner = HyperparameterTuner.create( estimator_dict={ @@ -1415,6 +1548,7 @@ def test_tuning_multi_algos( "estimator-2": [{"Name": "test:acc", "Regex": "Overall test accuracy: (.*?);"}], }, ) + inputs = { "estimator-1": TrainingInput(s3_data=input_path), "estimator-2": TrainingInput(s3_data=input_path), @@ -1429,7 +1563,7 @@ def test_tuning_multi_algos( pipeline = Pipeline( name=pipeline_name, parameters=[instance_count, instance_type, min_batch_size, max_batch_size], - steps=[step_tune], + steps=[step_process, step_tune], sagemaker_session=sagemaker_session, ) @@ -1437,12 +1571,13 @@ def test_tuning_multi_algos( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + create_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) finally: @@ -1498,18 +1633,19 @@ def test_mxnet_model_registration( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + create_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) execution = pipeline.start() assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) finally: @@ -1570,10 +1706,14 @@ def test_sklearn_xgboost_sip_model_registration( destination=train_data_path_param, ), ProcessingOutput( - output_name="val_data", source="/opt/ml/processing/val", destination=val_data_path_param + output_name="val_data", + source="/opt/ml/processing/val", + destination=val_data_path_param, ), ProcessingOutput( - output_name="model", source="/opt/ml/processing/model", destination=model_path_param + output_name="model", + source="/opt/ml/processing/model", + destination=model_path_param, ), ] @@ -1690,18 +1830,19 @@ def test_sklearn_xgboost_sip_model_registration( response = pipeline.upsert(role_arn=role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + create_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) execution = pipeline.start() assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) finally: @@ -1713,7 +1854,9 @@ def test_sklearn_xgboost_sip_model_registration( @pytest.mark.skipif( tests.integ.test_region() not in tests.integ.DRIFT_CHECK_BASELINES_SUPPORTED_REGIONS, - reason=f"DriftCheckBaselines changes are not fully deployed in {tests.integ.test_region()}.", + reason=( + "DriftCheckBaselines changes are not fully deployed in" f" {tests.integ.test_region()}." + ), ) def test_model_registration_with_drift_check_baselines( sagemaker_session, @@ -1746,7 +1889,9 @@ def test_model_registration_with_drift_check_baselines( utils.unique_name_from_base("metrics"), ) metrics_uri = S3Uploader.upload_string_as_file_body( - body=metrics_data, desired_s3_uri=metrics_base_uri, sagemaker_session=sagemaker_session + body=metrics_data, + desired_s3_uri=metrics_base_uri, + sagemaker_session=sagemaker_session, ) metrics_uri_param = ParameterString(name="metrics_uri", default_value=metrics_uri) @@ -1866,7 +2011,9 @@ def test_model_registration_with_drift_check_baselines( assert len(execution_steps) == 1 failure_reason = execution_steps[0].get("FailureReason", "") if failure_reason != "": - logging.error(f"Pipeline execution failed with error: {failure_reason}. Retrying..") + logging.error( + f"Pipeline execution failed with error: {failure_reason}." " Retrying.." + ) continue assert execution_steps[0]["StepStatus"] == "Succeeded" assert execution_steps[0]["StepName"] == "MyRegisterModelStep" @@ -1985,18 +2132,19 @@ def test_model_registration_with_model_repack( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + create_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) execution = pipeline.start(parameters={"GoodEnoughInput": 0}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) finally: @@ -2022,7 +2170,7 @@ def test_training_job_with_debugger_and_profiler( Rule.sagemaker(rule_configs.loss_not_decreasing()), ] debugger_hook_config = DebuggerHookConfig( - s3_output_path=f"s3://{sagemaker_session.default_bucket()}/{uuid.uuid4()}/tensors" + s3_output_path=(f"s3://{sagemaker_session.default_bucket()}/{uuid.uuid4()}/tensors") ) base_dir = os.path.join(DATA_DIR, "pytorch_mnist") @@ -2175,7 +2323,7 @@ def test_two_processing_job_depends_on( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) @@ -2183,13 +2331,13 @@ def test_two_processing_job_depends_on( response = pipeline.update(role) update_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", update_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) @@ -2332,13 +2480,17 @@ def test_one_step_ingestion_pipeline( input_name = "features.csv" input_file_path = os.path.join(DATA_DIR, "workflow", "features.csv") input_data_uri = os.path.join( - "s3://", sagemaker_session.default_bucket(), "py-sdk-ingestion-test-input/features.csv" + "s3://", + sagemaker_session.default_bucket(), + "py-sdk-ingestion-test-input/features.csv", ) with open(input_file_path, "r") as data: body = data.read() S3Uploader.upload_string_as_file_body( - body=body, desired_s3_uri=input_data_uri, sagemaker_session=sagemaker_session + body=body, + desired_s3_uri=input_data_uri, + sagemaker_session=sagemaker_session, ) inputs = [ @@ -2650,7 +2802,9 @@ def test_end_to_end_pipeline_successful_execution( sagemaker_session=sagemaker_session, ) step_transform = TransformStep( - name="AbaloneTransform", transformer=transformer, inputs=TransformInput(data=batch_data) + name="AbaloneTransform", + transformer=transformer, + inputs=TransformInput(data=batch_data), ) # define register model step @@ -2784,7 +2938,7 @@ def test_large_pipeline(sagemaker_session, role, pipeline_name, region_name): response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) response = pipeline.describe() @@ -2794,7 +2948,7 @@ def test_large_pipeline(sagemaker_session, role, pipeline_name, region_name): response = pipeline.update(role) update_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", update_arn, ) finally: @@ -2831,7 +2985,7 @@ def test_create_and_update_with_parallelism_config( response = pipeline.create(role, parallelism_config={"MaxParallelExecutionSteps": 50}) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn, ) response = pipeline.describe() @@ -2841,7 +2995,7 @@ def test_create_and_update_with_parallelism_config( response = pipeline.update(role, parallelism_config={"MaxParallelExecutionSteps": 55}) update_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", update_arn, ) diff --git a/tests/integ/test_workflow_retry.py b/tests/integ/test_workflow_retry.py index a1fd996b1f..04ac3054da 100644 --- a/tests/integ/test_workflow_retry.py +++ b/tests/integ/test_workflow_retry.py @@ -22,7 +22,10 @@ from sagemaker.processing import ProcessingInput from sagemaker.session import get_execution_role from sagemaker.sklearn.processing import SKLearnProcessor -from sagemaker.dataset_definition.inputs import DatasetDefinition, AthenaDatasetDefinition +from sagemaker.dataset_definition.inputs import ( + DatasetDefinition, + AthenaDatasetDefinition, +) from sagemaker.workflow.parameters import ( ParameterInteger, ParameterString, @@ -134,7 +137,8 @@ def test_pipeline_execution_processing_step_with_retry( expire_after_mins=5, ), SageMakerJobStepRetryPolicy( - exception_types=[SageMakerJobExceptionTypeEnum.CAPACITY_ERROR], max_attempts=10 + exception_types=[SageMakerJobExceptionTypeEnum.CAPACITY_ERROR], + max_attempts=10, ), ], ) @@ -252,18 +256,19 @@ def test_model_registration_with_model_repack( response = pipeline.create(role) create_arn = response["PipelineArn"] assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", create_arn + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}", + create_arn, ) execution = pipeline.start(parameters={}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) execution = pipeline.start(parameters={"GoodEnoughInput": 0}) assert re.match( - fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", + rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/", execution.arn, ) finally: diff --git a/tests/unit/sagemaker/async_inference/__init__.py b/tests/unit/sagemaker/async_inference/__init__.py new file mode 100644 index 0000000000..a6987bc6a6 --- /dev/null +++ b/tests/unit/sagemaker/async_inference/__init__.py @@ -0,0 +1,13 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import diff --git a/tests/unit/sagemaker/async_inference/test_async_inference_config.py b/tests/unit/sagemaker/async_inference/test_async_inference_config.py new file mode 100644 index 0000000000..d941939fa5 --- /dev/null +++ b/tests/unit/sagemaker/async_inference/test_async_inference_config.py @@ -0,0 +1,86 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +from sagemaker.async_inference import AsyncInferenceConfig + +S3_OUTPUT_PATH = "s3://some-output-path" +DEFAULT_KMS_KEY_ID = None +DEFAULT_MAX_CONCURRENT_INVOCATIONS = None +DEFAULT_NOTIFICATION_CONFIG = None +DEFAULT_ASYNC_INFERENCE_DICT = { + "OutputConfig": { + "S3OutputPath": S3_OUTPUT_PATH, + }, +} + +OPTIONAL_KMS_KEY_ID = "some-kms-key-id" +OPTIONAL_MAX_CONCURRENT_INVOCATIONS = 2 +OPTIONAL_NOTIFICATION_CONFIG = { + "SuccessTopic": "some-sunccess-topic", + "ErrorTopic": "some-error-topic", +} +ASYNC_INFERENCE_DICT_WITH_OPTIONAL = { + "OutputConfig": { + "S3OutputPath": S3_OUTPUT_PATH, + "KmsKeyId": OPTIONAL_KMS_KEY_ID, + "NotificationConfig": OPTIONAL_NOTIFICATION_CONFIG, + }, + "ClientConfig": {"MaxConcurrentInvocationsPerInstance": OPTIONAL_MAX_CONCURRENT_INVOCATIONS}, +} + + +def test_init_without_optional(): + async_inference_config = AsyncInferenceConfig(output_path=S3_OUTPUT_PATH) + + assert async_inference_config.output_path == S3_OUTPUT_PATH + assert async_inference_config.kms_key_id == DEFAULT_KMS_KEY_ID + assert ( + async_inference_config.max_concurrent_invocations_per_instance + == DEFAULT_MAX_CONCURRENT_INVOCATIONS + ) + assert async_inference_config.notification_config == DEFAULT_NOTIFICATION_CONFIG + + +def test_init_with_optional(): + async_inference_config = AsyncInferenceConfig( + output_path=S3_OUTPUT_PATH, + max_concurrent_invocations_per_instance=OPTIONAL_MAX_CONCURRENT_INVOCATIONS, + kms_key_id=OPTIONAL_KMS_KEY_ID, + notification_config=OPTIONAL_NOTIFICATION_CONFIG, + ) + + assert async_inference_config.output_path == S3_OUTPUT_PATH + assert async_inference_config.kms_key_id == OPTIONAL_KMS_KEY_ID + assert ( + async_inference_config.max_concurrent_invocations_per_instance + == OPTIONAL_MAX_CONCURRENT_INVOCATIONS + ) + assert async_inference_config.notification_config == OPTIONAL_NOTIFICATION_CONFIG + + +def test_to_request_dict(): + async_inference_config = AsyncInferenceConfig(output_path=S3_OUTPUT_PATH) + assert async_inference_config._to_request_dict() == DEFAULT_ASYNC_INFERENCE_DICT + + async_inference_config_with_optional = AsyncInferenceConfig( + output_path=S3_OUTPUT_PATH, + max_concurrent_invocations_per_instance=OPTIONAL_MAX_CONCURRENT_INVOCATIONS, + kms_key_id=OPTIONAL_KMS_KEY_ID, + notification_config=OPTIONAL_NOTIFICATION_CONFIG, + ) + + assert ( + async_inference_config_with_optional._to_request_dict() + == ASYNC_INFERENCE_DICT_WITH_OPTIONAL + ) diff --git a/tests/unit/sagemaker/async_inference/test_async_inference_response.py b/tests/unit/sagemaker/async_inference/test_async_inference_response.py new file mode 100644 index 0000000000..8a55dd46fa --- /dev/null +++ b/tests/unit/sagemaker/async_inference/test_async_inference_response.py @@ -0,0 +1,98 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +import pytest +from mock import Mock +from botocore.exceptions import ClientError +from sagemaker.predictor import Predictor +from sagemaker.predictor_async import AsyncPredictor +from sagemaker.async_inference import AsyncInferenceResponse +from sagemaker.exceptions import ObjectNotExistedError, UnexpectedClientError + +DEFAULT_OUTPUT_PATH = "s3://some-output-path/object-name" +ENDPOINT_NAME = "some-endpoint-name" +RETURN_VALUE = 0 + + +def empty_s3_client(): + s3_client = Mock(name="s3-client") + + client_other_error = ClientError( + error_response={"Error": {"Code": "SomeOtherError", "Message": "some-error-message"}}, + operation_name="client-other-error", + ) + + client_error = ClientError( + error_response={"Error": {"Code": "NoSuchKey"}}, + operation_name="async-inference-response-test", + ) + + response_body = Mock("body") + response_body.read = Mock("read", return_value=RETURN_VALUE) + response_body.close = Mock("close", return_value=None) + + s3_client.get_object = Mock( + name="get_object", + side_effect=[client_other_error, client_error, {"Body": response_body}], + ) + + return s3_client + + +def empty_deserializer(): + deserializer = Mock(name="deserializer") + deserializer.deserialize = Mock(name="deserialize", return_value=RETURN_VALUE) + return deserializer + + +def test_init_(): + predictor_async = AsyncPredictor(Predictor(ENDPOINT_NAME)) + async_inference_response = AsyncInferenceResponse( + output_path=DEFAULT_OUTPUT_PATH, + predictor_async=predictor_async, + ) + assert async_inference_response.output_path == DEFAULT_OUTPUT_PATH + + +def test_get_result(): + predictor_async = AsyncPredictor(Predictor(ENDPOINT_NAME)) + predictor_async.s3_client = empty_s3_client() + async_inference_response = AsyncInferenceResponse( + output_path=DEFAULT_OUTPUT_PATH, + predictor_async=predictor_async, + ) + + with pytest.raises(UnexpectedClientError): + async_inference_response.get_result() + + with pytest.raises(ObjectNotExistedError, match="Inference could still be running"): + async_inference_response.get_result() + + result = async_inference_response.get_result() + assert async_inference_response._result == result + assert result == RETURN_VALUE + + +def test_wrong_waiter_config_object(): + predictor_async = AsyncPredictor(Predictor(ENDPOINT_NAME)) + async_inference_response = AsyncInferenceResponse( + output_path=DEFAULT_OUTPUT_PATH, + predictor_async=predictor_async, + ) + + with pytest.raises( + ValueError, + match="waiter_config should be a WaiterConfig object", + ): + async_inference_response.get_result(waiter_config={}) diff --git a/tests/unit/sagemaker/async_inference/test_waiter_config.py b/tests/unit/sagemaker/async_inference/test_waiter_config.py new file mode 100644 index 0000000000..c15a67a25b --- /dev/null +++ b/tests/unit/sagemaker/async_inference/test_waiter_config.py @@ -0,0 +1,46 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +from sagemaker.async_inference.waiter_config import WaiterConfig + +DEFAULT_DELAY = 15 +DEFAULT_MAX_ATTEMPTS = 60 +DEFAULT_WAITER_DICT = { + "Delay": DEFAULT_DELAY, + "MaxAttempts": DEFAULT_MAX_ATTEMPTS, +} + +DELAY = 10 +MAX_ATTEMPTS = 10 + + +def test_init(): + waiter_config = WaiterConfig() + + assert waiter_config.delay == DEFAULT_DELAY + assert waiter_config.max_attempts == DEFAULT_MAX_ATTEMPTS + + waiter_config_self_defined = WaiterConfig( + max_attempts=DELAY, + delay=MAX_ATTEMPTS, + ) + + assert waiter_config_self_defined.delay == DELAY + assert waiter_config_self_defined.max_attempts == MAX_ATTEMPTS + + +def test_to_dict(): + waiter_config = WaiterConfig() + + assert waiter_config._to_request_dict() == DEFAULT_WAITER_DICT diff --git a/tests/unit/sagemaker/feature_store/test_feature_store.py b/tests/unit/sagemaker/feature_store/test_feature_store.py index 0192287c35..ef6a36980b 100644 --- a/tests/unit/sagemaker/feature_store/test_feature_store.py +++ b/tests/unit/sagemaker/feature_store/test_feature_store.py @@ -175,7 +175,11 @@ def test_load_feature_definition(sagemaker_session_mock): names = [fd.feature_name for fd in feature_definitions] types = [fd.feature_type for fd in feature_definitions] assert names == ["float", "int", "string"] - assert types == [FeatureTypeEnum.FRACTIONAL, FeatureTypeEnum.INTEGRAL, FeatureTypeEnum.STRING] + assert types == [ + FeatureTypeEnum.FRACTIONAL, + FeatureTypeEnum.INTEGRAL, + FeatureTypeEnum.STRING, + ] def test_load_feature_definition_unsupported_types(sagemaker_session_mock): @@ -304,16 +308,13 @@ def test_as_hive_ddl(create_table_ddl, feature_group_dummy_definitions, sagemake feature_group = FeatureGroup(name="MyGroup", sagemaker_session=sagemaker_session_mock) feature_group.feature_definitions = feature_group_dummy_definitions - assert ( - create_table_ddl.format( - database="MyDatabase", - table_name="MyTable", - account="1234", - region="us-west-2", - feature_group_name="MyGroup", - ) - == feature_group.as_hive_ddl(database="MyDatabase", table_name="MyTable") - ) + assert create_table_ddl.format( + database="MyDatabase", + table_name="MyTable", + account="1234", + region="us-west-2", + feature_group_name="MyGroup", + ) == feature_group.as_hive_ddl(database="MyDatabase", table_name="MyTable") @patch( diff --git a/tests/unit/sagemaker/jumpstart/test_utils.py b/tests/unit/sagemaker/jumpstart/test_utils.py index 1877ede054..fe494eb459 100644 --- a/tests/unit/sagemaker/jumpstart/test_utils.py +++ b/tests/unit/sagemaker/jumpstart/test_utils.py @@ -169,38 +169,29 @@ def test_add_jumpstart_tags_inference(): tags = [{"Key": "some", "Value": "tag"}] inference_model_uri = "dfsdfsd" inference_script_uri = "dfsdfs" - assert ( - utils.add_jumpstart_tags( - tags=tags, - inference_model_uri=inference_model_uri, - inference_script_uri=inference_script_uri, - ) - == [{"Key": "some", "Value": "tag"}] - ) + assert utils.add_jumpstart_tags( + tags=tags, + inference_model_uri=inference_model_uri, + inference_script_uri=inference_script_uri, + ) == [{"Key": "some", "Value": "tag"}] tags = None inference_model_uri = random_jumpstart_s3_uri("random_key") inference_script_uri = "dfsdfs" - assert ( - utils.add_jumpstart_tags( - tags=tags, - inference_model_uri=inference_model_uri, - inference_script_uri=inference_script_uri, - ) - == [{"Key": JumpStartTag.INFERENCE_MODEL_URI.value, "Value": inference_model_uri}] - ) + assert utils.add_jumpstart_tags( + tags=tags, + inference_model_uri=inference_model_uri, + inference_script_uri=inference_script_uri, + ) == [{"Key": JumpStartTag.INFERENCE_MODEL_URI.value, "Value": inference_model_uri}] tags = [] inference_model_uri = random_jumpstart_s3_uri("random_key") inference_script_uri = "dfsdfs" - assert ( - utils.add_jumpstart_tags( - tags=tags, - inference_model_uri=inference_model_uri, - inference_script_uri=inference_script_uri, - ) - == [{"Key": JumpStartTag.INFERENCE_MODEL_URI.value, "Value": inference_model_uri}] - ) + assert utils.add_jumpstart_tags( + tags=tags, + inference_model_uri=inference_model_uri, + inference_script_uri=inference_script_uri, + ) == [{"Key": JumpStartTag.INFERENCE_MODEL_URI.value, "Value": inference_model_uri}] tags = [{"Key": "some", "Value": "tag"}] inference_model_uri = random_jumpstart_s3_uri("random_key") @@ -217,26 +208,20 @@ def test_add_jumpstart_tags_inference(): tags = None inference_script_uri = random_jumpstart_s3_uri("random_key") inference_model_uri = "dfsdfs" - assert ( - utils.add_jumpstart_tags( - tags=tags, - inference_model_uri=inference_model_uri, - inference_script_uri=inference_script_uri, - ) - == [{"Key": JumpStartTag.INFERENCE_SCRIPT_URI.value, "Value": inference_script_uri}] - ) + assert utils.add_jumpstart_tags( + tags=tags, + inference_model_uri=inference_model_uri, + inference_script_uri=inference_script_uri, + ) == [{"Key": JumpStartTag.INFERENCE_SCRIPT_URI.value, "Value": inference_script_uri}] tags = [] inference_script_uri = random_jumpstart_s3_uri("random_key") inference_model_uri = "dfsdfs" - assert ( - utils.add_jumpstart_tags( - tags=tags, - inference_model_uri=inference_model_uri, - inference_script_uri=inference_script_uri, - ) - == [{"Key": JumpStartTag.INFERENCE_SCRIPT_URI.value, "Value": inference_script_uri}] - ) + assert utils.add_jumpstart_tags( + tags=tags, + inference_model_uri=inference_model_uri, + inference_script_uri=inference_script_uri, + ) == [{"Key": JumpStartTag.INFERENCE_SCRIPT_URI.value, "Value": inference_script_uri}] tags = [{"Key": "some", "Value": "tag"}] inference_script_uri = random_jumpstart_s3_uri("random_key") @@ -364,38 +349,29 @@ def test_add_jumpstart_tags_training(): tags = [{"Key": "some", "Value": "tag"}] training_model_uri = "dfsdfsd" training_script_uri = "dfsdfs" - assert ( - utils.add_jumpstart_tags( - tags=tags, - training_model_uri=training_model_uri, - training_script_uri=training_script_uri, - ) - == [{"Key": "some", "Value": "tag"}] - ) + assert utils.add_jumpstart_tags( + tags=tags, + training_model_uri=training_model_uri, + training_script_uri=training_script_uri, + ) == [{"Key": "some", "Value": "tag"}] tags = None training_model_uri = random_jumpstart_s3_uri("random_key") training_script_uri = "dfsdfs" - assert ( - utils.add_jumpstart_tags( - tags=tags, - training_model_uri=training_model_uri, - training_script_uri=training_script_uri, - ) - == [{"Key": JumpStartTag.TRAINING_MODEL_URI.value, "Value": training_model_uri}] - ) + assert utils.add_jumpstart_tags( + tags=tags, + training_model_uri=training_model_uri, + training_script_uri=training_script_uri, + ) == [{"Key": JumpStartTag.TRAINING_MODEL_URI.value, "Value": training_model_uri}] tags = [] training_model_uri = random_jumpstart_s3_uri("random_key") training_script_uri = "dfsdfs" - assert ( - utils.add_jumpstart_tags( - tags=tags, - training_model_uri=training_model_uri, - training_script_uri=training_script_uri, - ) - == [{"Key": JumpStartTag.TRAINING_MODEL_URI.value, "Value": training_model_uri}] - ) + assert utils.add_jumpstart_tags( + tags=tags, + training_model_uri=training_model_uri, + training_script_uri=training_script_uri, + ) == [{"Key": JumpStartTag.TRAINING_MODEL_URI.value, "Value": training_model_uri}] tags = [{"Key": "some", "Value": "tag"}] training_model_uri = random_jumpstart_s3_uri("random_key") @@ -412,26 +388,20 @@ def test_add_jumpstart_tags_training(): tags = None training_script_uri = random_jumpstart_s3_uri("random_key") training_model_uri = "dfsdfs" - assert ( - utils.add_jumpstart_tags( - tags=tags, - training_model_uri=training_model_uri, - training_script_uri=training_script_uri, - ) - == [{"Key": JumpStartTag.TRAINING_SCRIPT_URI.value, "Value": training_script_uri}] - ) + assert utils.add_jumpstart_tags( + tags=tags, + training_model_uri=training_model_uri, + training_script_uri=training_script_uri, + ) == [{"Key": JumpStartTag.TRAINING_SCRIPT_URI.value, "Value": training_script_uri}] tags = [] training_script_uri = random_jumpstart_s3_uri("random_key") training_model_uri = "dfsdfs" - assert ( - utils.add_jumpstart_tags( - tags=tags, - training_model_uri=training_model_uri, - training_script_uri=training_script_uri, - ) - == [{"Key": JumpStartTag.TRAINING_SCRIPT_URI.value, "Value": training_script_uri}] - ) + assert utils.add_jumpstart_tags( + tags=tags, + training_model_uri=training_model_uri, + training_script_uri=training_script_uri, + ) == [{"Key": JumpStartTag.TRAINING_SCRIPT_URI.value, "Value": training_script_uri}] tags = [{"Key": "some", "Value": "tag"}] training_script_uri = random_jumpstart_s3_uri("random_key") diff --git a/tests/unit/sagemaker/lineage/test_action.py b/tests/unit/sagemaker/lineage/test_action.py index 79e59b679b..120d643063 100644 --- a/tests/unit/sagemaker/lineage/test_action.py +++ b/tests/unit/sagemaker/lineage/test_action.py @@ -16,6 +16,7 @@ import unittest.mock from sagemaker.lineage import action, _api_types +from sagemaker.lineage._api_types import ActionSource def test_create(sagemaker_session): @@ -333,3 +334,23 @@ def test_create_delete_with_association(sagemaker_session): delete_with_association_expected_calls == sagemaker_session.sagemaker_client.delete_association.mock_calls ) + + +def test_model_package(sagemaker_session): + obj = action.ModelPackageApprovalAction( + sagemaker_session, + action_name="abcd-aws-model-package", + source=ActionSource( + source_uri="arn:aws:sagemaker:us-west-2:123456789012:model-package/pipeline88modelpackage/1", + source_type="ARN", + ), + status="updated-status", + properties={"k1": "v1"}, + properties_to_remove=["k2"], + ) + sagemaker_session.sagemaker_client.describe_model_package.return_value = {} + obj.model_package() + + sagemaker_session.sagemaker_client.describe_model_package.assert_called_with( + ModelPackageName="pipeline88modelpackage", + ) diff --git a/tests/unit/sagemaker/lineage/test_artifact.py b/tests/unit/sagemaker/lineage/test_artifact.py index 72228ec964..218532c1b7 100644 --- a/tests/unit/sagemaker/lineage/test_artifact.py +++ b/tests/unit/sagemaker/lineage/test_artifact.py @@ -377,3 +377,143 @@ def test_downstream_trials(sagemaker_session): ), ] assert expected_calls == sagemaker_session.sagemaker_client.list_associations.mock_calls + + +def test_downstream_trials_v2(sagemaker_session): + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": "B" + str(i), "Type": "DataSet", "LineageType": "Artifact"} for i in range(10) + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + sagemaker_session.sagemaker_client.search.return_value = { + "Results": [ + { + "TrialComponent": { + "TrialComponentName": "tc-1", + "TrialComponentArn": "arn::tc-1", + "DisplayName": "TC1", + "Parents": [{"TrialName": "test-trial-name"}], + } + } + ] + } + + obj = artifact.Artifact( + sagemaker_session=sagemaker_session, + artifact_arn="test-arn", + artifact_name="foo", + properties={"k1": "v1", "k2": "v2"}, + properties_to_remove=["r1"], + ) + + result = obj.downstream_trials_v2() + + expected_trials = ["test-trial-name"] + + assert expected_trials == result + + expected_calls = [ + unittest.mock.call( + Direction="Descendants", + Filters={"LineageTypes": ["TrialComponent"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=["test-arn"], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + + +def test_upstream_trials(sagemaker_session): + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": "B" + str(i), "Type": "DataSet", "LineageType": "Artifact"} for i in range(10) + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + sagemaker_session.sagemaker_client.search.return_value = { + "Results": [ + { + "TrialComponent": { + "TrialComponentName": "tc-1", + "TrialComponentArn": "arn::tc-1", + "DisplayName": "TC1", + "Parents": [{"TrialName": "test-trial-name"}], + } + } + ] + } + + obj = artifact.Artifact( + sagemaker_session=sagemaker_session, + artifact_arn="test-arn", + artifact_name="foo", + properties={"k1": "v1", "k2": "v2"}, + properties_to_remove=["r1"], + ) + + result = obj.upstream_trials() + + expected_trials = ["test-trial-name"] + + assert expected_trials == result + + expected_calls = [ + unittest.mock.call( + Direction="Ascendants", + Filters={"LineageTypes": ["TrialComponent"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=["test-arn"], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + + +def test_s3_uri_artifacts(sagemaker_session): + obj = artifact.Artifact( + sagemaker_session=sagemaker_session, + artifact_arn="test-arn", + artifact_name="foo", + source_uri="s3://abced", + properties={"k1": "v1", "k2": "v2"}, + properties_to_remove=["r1"], + ) + sagemaker_session.sagemaker_client.list_artifacts.side_effect = [ + { + "ArtifactSummaries": [ + { + "ArtifactArn": "A", + "ArtifactName": "B", + "Source": { + "SourceUri": "D", + "source_types": [{"SourceIdType": "source_id_type", "Value": "value1"}], + }, + "ArtifactType": "test-type", + } + ], + "NextToken": "100", + }, + ] + result = obj.s3_uri_artifacts(s3_uri="s3://abced") + + expected_calls = [ + unittest.mock.call(SourceUri="s3://abced"), + ] + expected_result = { + "ArtifactSummaries": [ + { + "ArtifactArn": "A", + "ArtifactName": "B", + "Source": { + "SourceUri": "D", + "source_types": [{"SourceIdType": "source_id_type", "Value": "value1"}], + }, + "ArtifactType": "test-type", + } + ], + "NextToken": "100", + } + assert expected_calls == sagemaker_session.sagemaker_client.list_artifacts.mock_calls + assert result == expected_result diff --git a/tests/unit/sagemaker/lineage/test_context.py b/tests/unit/sagemaker/lineage/test_context.py index 5cf48dea67..d87120dde2 100644 --- a/tests/unit/sagemaker/lineage/test_context.py +++ b/tests/unit/sagemaker/lineage/test_context.py @@ -17,6 +17,9 @@ import pytest from sagemaker.lineage import context, _api_types +from sagemaker.lineage.action import Action +from sagemaker.lineage.lineage_trial_component import LineageTrialComponent +from sagemaker.lineage.query import LineageQueryDirectionEnum @pytest.fixture @@ -328,3 +331,182 @@ def test_create_delete_with_association(sagemaker_session): delete_with_association_expected_calls == sagemaker_session.sagemaker_client.delete_association.mock_calls ) + + +def test_actions(sagemaker_session): + context_arn = "arn:aws:sagemaker:us-west-2:123456789012:context/lineage-unit-3b05f017-0d87-4c37" + action_arn = "arn:aws:sagemaker:us-west-2:123456789012:action/lineage-unit-3b05f017-0d87-4c37" + obj = context.EndpointContext(sagemaker_session, context_name="foo", context_arn=context_arn) + + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": action_arn, "Type": "Approval", "LineageType": "Action"}, + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + + sagemaker_session.sagemaker_client.describe_action.return_value = { + "ActionName": "MyAction", + "ActionArn": action_arn, + } + + action_list = obj.actions(direction=LineageQueryDirectionEnum.DESCENDANTS) + + expected_calls = [ + unittest.mock.call( + Direction="Descendants", + Filters={"LineageTypes": ["Action"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=[context_arn], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + + expected_action_list = [ + Action( + action_arn=action_arn, + action_name="MyAction", + ) + ] + + assert expected_action_list[0].action_arn == action_list[0].action_arn + assert expected_action_list[0].action_name == action_list[0].action_name + + +def test_processing_jobs(sagemaker_session): + context_arn = "arn:aws:sagemaker:us-west-2:123456789012:context/lineage-unit-3b05f017-0d87-4c37" + processing_job_arn = ( + "arn:aws:sagemaker:us-west-2:123456789012:trial_component/lineage-unit-3b05f017-0d87-4c37" + ) + obj = context.EndpointContext(sagemaker_session, context_name="foo", context_arn=context_arn) + + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": processing_job_arn, "Type": "ProcessingJob", "LineageType": "TrialComponent"}, + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + sagemaker_session.sagemaker_client.describe_trial_component.return_value = { + "TrialComponentName": "MyProcessingJob", + "TrialComponentArn": processing_job_arn, + } + + trial_component_list = obj.processing_jobs(direction=LineageQueryDirectionEnum.ASCENDANTS) + expected_calls = [ + unittest.mock.call( + Direction="Ascendants", + Filters={"Types": ["ProcessingJob"], "LineageTypes": ["TrialComponent"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=[context_arn], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + expected_trial_component_list = [ + LineageTrialComponent( + trial_component_name="MyProcessingJob", + trial_component_arn=processing_job_arn, + ) + ] + + assert ( + expected_trial_component_list[0].trial_component_arn + == trial_component_list[0].trial_component_arn + ) + assert ( + expected_trial_component_list[0].trial_component_name + == trial_component_list[0].trial_component_name + ) + + +def test_transform_jobs(sagemaker_session): + context_arn = "arn:aws:sagemaker:us-west-2:123456789012:context/lineage-unit-3b05f017-0d87-4c37" + transform_job_arn = ( + "arn:aws:sagemaker:us-west-2:123456789012:trial_component/lineage-unit-3b05f017-0d87-4c37" + ) + obj = context.EndpointContext(sagemaker_session, context_name="foo", context_arn=context_arn) + + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": transform_job_arn, "Type": "TransformJob", "LineageType": "TrialComponent"}, + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + sagemaker_session.sagemaker_client.describe_trial_component.return_value = { + "TrialComponentName": "MyTransformJob", + "TrialComponentArn": transform_job_arn, + } + + trial_component_list = obj.transform_jobs(direction=LineageQueryDirectionEnum.ASCENDANTS) + expected_calls = [ + unittest.mock.call( + Direction="Ascendants", + Filters={"Types": ["TransformJob"], "LineageTypes": ["TrialComponent"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=[context_arn], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + expected_trial_component_list = [ + LineageTrialComponent( + trial_component_name="MyTransformJob", + trial_component_arn=transform_job_arn, + ) + ] + + assert ( + expected_trial_component_list[0].trial_component_arn + == trial_component_list[0].trial_component_arn + ) + assert ( + expected_trial_component_list[0].trial_component_name + == trial_component_list[0].trial_component_name + ) + + +def test_trial_components(sagemaker_session): + context_arn = "arn:aws:sagemaker:us-west-2:123456789012:context/lineage-unit-3b05f017-0d87-4c37" + trial_component_arn = ( + "arn:aws:sagemaker:us-west-2:123456789012:trial_component/lineage-unit-3b05f017-0d87-4c37" + ) + obj = context.EndpointContext(sagemaker_session, context_name="foo", context_arn=context_arn) + + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": trial_component_arn, "Type": "TransformJob", "LineageType": "TrialComponent"}, + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + sagemaker_session.sagemaker_client.describe_trial_component.return_value = { + "TrialComponentName": "MyTransformJob", + "TrialComponentArn": trial_component_arn, + } + + trial_component_list = obj.trial_components(direction=LineageQueryDirectionEnum.ASCENDANTS) + expected_calls = [ + unittest.mock.call( + Direction="Ascendants", + Filters={"LineageTypes": ["TrialComponent"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=[context_arn], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + expected_trial_component_list = [ + LineageTrialComponent( + trial_component_name="MyTransformJob", + trial_component_arn=trial_component_arn, + ) + ] + + assert ( + expected_trial_component_list[0].trial_component_arn + == trial_component_list[0].trial_component_arn + ) + assert ( + expected_trial_component_list[0].trial_component_name + == trial_component_list[0].trial_component_name + ) diff --git a/tests/unit/sagemaker/lineage/test_dataset_artifact.py b/tests/unit/sagemaker/lineage/test_dataset_artifact.py index 6db5a215f6..074efb488c 100644 --- a/tests/unit/sagemaker/lineage/test_dataset_artifact.py +++ b/tests/unit/sagemaker/lineage/test_dataset_artifact.py @@ -83,3 +83,89 @@ def test_trained_models(sagemaker_session): ) ] assert expected_model_list == model_list + + +def test_upstream_datasets(sagemaker_session): + artifact_arn = ( + "arn:aws:sagemaker:us-west-2:123456789012:artifact/lineage-unit-3b05f017-0d87-4c37" + ) + artifact_dataset_arn = "arn:aws:sagemaker:us-west-2:123456789012:artifact/datasets" + artifact_dataset_name = "myDataset" + + obj = artifact.DatasetArtifact( + sagemaker_session, artifact_name="foo", artifact_arn=artifact_arn + ) + + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": artifact_dataset_arn, "Type": "DataSet", "LineageType": "Artifact"}, + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + sagemaker_session.sagemaker_client.describe_artifact.return_value = { + "ArtifactName": artifact_dataset_name, + "ArtifactArn": artifact_dataset_arn, + } + + dataset_list = obj.upstream_datasets() + expected_calls = [ + unittest.mock.call( + Direction="Ascendants", + Filters={"Types": ["DataSet"], "LineageTypes": ["Artifact"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=[artifact_arn], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + expected_dataset_list = [ + artifact.DatasetArtifact( + artifact_name=artifact_dataset_name, + artifact_arn=artifact_dataset_arn, + ) + ] + assert expected_dataset_list[0].artifact_arn == dataset_list[0].artifact_arn + assert expected_dataset_list[0].artifact_name == dataset_list[0].artifact_name + + +def test_downstream_datasets(sagemaker_session): + artifact_arn = ( + "arn:aws:sagemaker:us-west-2:123456789012:artifact/lineage-unit-3b05f017-0d87-4c37" + ) + artifact_dataset_arn = "arn:aws:sagemaker:us-west-2:123456789012:artifact/datasets" + artifact_dataset_name = "myDataset" + + obj = artifact.DatasetArtifact( + sagemaker_session, artifact_name="foo", artifact_arn=artifact_arn + ) + + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": artifact_dataset_arn, "Type": "DataSet", "LineageType": "Artifact"}, + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + sagemaker_session.sagemaker_client.describe_artifact.return_value = { + "ArtifactName": artifact_dataset_name, + "ArtifactArn": artifact_dataset_arn, + } + + dataset_list = obj.downstream_datasets() + expected_calls = [ + unittest.mock.call( + Direction="Descendants", + Filters={"Types": ["DataSet"], "LineageTypes": ["Artifact"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=[artifact_arn], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + expected_dataset_list = [ + artifact.DatasetArtifact( + artifact_name=artifact_dataset_name, + artifact_arn=artifact_dataset_arn, + ) + ] + assert expected_dataset_list[0].artifact_arn == dataset_list[0].artifact_arn + assert expected_dataset_list[0].artifact_name == dataset_list[0].artifact_name diff --git a/tests/unit/sagemaker/lineage/test_image_artifact.py b/tests/unit/sagemaker/lineage/test_image_artifact.py new file mode 100644 index 0000000000..485d942db3 --- /dev/null +++ b/tests/unit/sagemaker/lineage/test_image_artifact.py @@ -0,0 +1,65 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +import unittest.mock + +import pytest +from sagemaker.lineage import artifact +from sagemaker.lineage.query import LineageQueryDirectionEnum + + +@pytest.fixture +def sagemaker_session(): + return unittest.mock.Mock() + + +def test_datasets(sagemaker_session): + artifact_arn = ( + "arn:aws:sagemaker:us-west-2:123456789012:artifact/lineage-unit-3b05f017-0d87-4c37" + ) + artifact_dataset_arn = "arn:aws:sagemaker:us-west-2:123456789012:artifact/datasets" + artifact_dataset_name = "myDataset" + + obj = artifact.ImageArtifact(sagemaker_session, artifact_name="foo", artifact_arn=artifact_arn) + + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": artifact_dataset_arn, "Type": "DataSet", "LineageType": "Artifact"}, + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + sagemaker_session.sagemaker_client.describe_artifact.return_value = { + "ArtifactName": artifact_dataset_name, + "ArtifactArn": artifact_dataset_arn, + } + + dataset_list = obj.datasets(direction=LineageQueryDirectionEnum.DESCENDANTS) + expected_calls = [ + unittest.mock.call( + Direction="Descendants", + Filters={"Types": ["DataSet"], "LineageTypes": ["Artifact"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=[artifact_arn], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + expected_dataset_list = [ + artifact.DatasetArtifact( + artifact_name=artifact_dataset_name, + artifact_arn=artifact_dataset_arn, + ) + ] + assert expected_dataset_list[0].artifact_arn == dataset_list[0].artifact_arn + assert expected_dataset_list[0].artifact_name == dataset_list[0].artifact_name diff --git a/tests/unit/sagemaker/lineage/test_lineage_trial_component.py b/tests/unit/sagemaker/lineage/test_lineage_trial_component.py new file mode 100644 index 0000000000..9b466832a1 --- /dev/null +++ b/tests/unit/sagemaker/lineage/test_lineage_trial_component.py @@ -0,0 +1,153 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +import unittest.mock + +import pytest +from sagemaker.lineage import artifact, lineage_trial_component + + +@pytest.fixture +def sagemaker_session(): + return unittest.mock.Mock() + + +def test_dataset_artifacts(sagemaker_session): + trial_component_arn = ( + "arn:aws:sagemaker:us-west-2:123456789012:trial_component/lineage-unit-3b05f017-0d87-4c37" + ) + artifact_dataset_arn = "arn:aws:sagemaker:us-west-2:123456789012:artifact/datasets" + artifact_dataset_name = "myDataset" + + obj = lineage_trial_component.LineageTrialComponent( + sagemaker_session, trial_component_name="foo", trial_component_arn=trial_component_arn + ) + + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": artifact_dataset_arn, "Type": "DataSet", "LineageType": "Artifact"}, + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + sagemaker_session.sagemaker_client.describe_artifact.return_value = { + "ArtifactName": artifact_dataset_name, + "ArtifactArn": artifact_dataset_arn, + } + + dataset_list = obj.dataset_artifacts() + expected_calls = [ + unittest.mock.call( + Direction="Ascendants", + Filters={"Types": ["DataSet"], "LineageTypes": ["Artifact"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=[trial_component_arn], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + expected_dataset_list = [ + artifact.DatasetArtifact( + artifact_name=artifact_dataset_name, + artifact_arn=artifact_dataset_arn, + ) + ] + assert expected_dataset_list[0].artifact_arn == dataset_list[0].artifact_arn + assert expected_dataset_list[0].artifact_name == dataset_list[0].artifact_name + + +def test_models(sagemaker_session): + trial_component_arn = ( + "arn:aws:sagemaker:us-west-2:123456789012:trial_component/lineage-unit-3b05f017-0d87-4c37" + ) + model_arn = "arn:aws:sagemaker:us-west-2:123456789012:context/models" + model_name = "myDataset" + + obj = lineage_trial_component.LineageTrialComponent( + sagemaker_session, trial_component_name="foo", trial_component_arn=trial_component_arn + ) + + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": model_arn, "Type": "Model", "LineageType": "Artifact"}, + ], + "Edges": [{"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}], + } + + sagemaker_session.sagemaker_client.describe_artifact.return_value = { + "ArtifactName": model_name, + "ArtifactArn": model_arn, + } + + model_list = obj.models() + expected_calls = [ + unittest.mock.call( + Direction="Descendants", + Filters={"Types": ["Model"], "LineageTypes": ["Artifact"]}, + IncludeEdges=False, + MaxDepth=10, + StartArns=[trial_component_arn], + ), + ] + assert expected_calls == sagemaker_session.sagemaker_client.query_lineage.mock_calls + expected_model_list = [ + artifact.DatasetArtifact( + artifact_name=model_name, + artifact_arn=model_arn, + ) + ] + assert expected_model_list[0].artifact_arn == model_list[0].artifact_arn + assert expected_model_list[0].artifact_name == model_list[0].artifact_name + + +def test_pipeline_execution_arn(sagemaker_session): + trial_component_arn = ( + "arn:aws:sagemaker:us-west-2:123456789012:trial_component/lineage-unit-3b05f017-0d87-4c37" + ) + obj = lineage_trial_component.LineageTrialComponent( + sagemaker_session, trial_component_name="foo", trial_component_arn=trial_component_arn + ) + + sagemaker_session.sagemaker_client.list_tags.return_value = { + "Tags": [ + {"Key": "sagemaker:pipeline-execution-arn", "Value": "tag1"}, + ], + } + expected_calls = [ + unittest.mock.call(ResourceArn=trial_component_arn), + ] + pipeline_execution_arn_result = obj.pipeline_execution_arn() + assert pipeline_execution_arn_result == "tag1" + assert expected_calls == sagemaker_session.sagemaker_client.list_tags.mock_calls + + +def test_no_pipeline_execution_arn(sagemaker_session): + trial_component_arn = ( + "arn:aws:sagemaker:us-west-2:123456789012:trial_component/lineage-unit-3b05f017-0d87-4c37" + ) + obj = lineage_trial_component.LineageTrialComponent( + sagemaker_session, trial_component_name="foo", trial_component_arn=trial_component_arn + ) + + sagemaker_session.sagemaker_client.list_tags.return_value = { + "Tags": [ + {"Key": "abcd", "Value": "efg"}, + ], + } + expected_calls = [ + unittest.mock.call(ResourceArn=trial_component_arn), + ] + pipeline_execution_arn_result = obj.pipeline_execution_arn() + expected_result = None + assert pipeline_execution_arn_result == expected_result + assert expected_calls == sagemaker_session.sagemaker_client.list_tags.mock_calls diff --git a/tests/unit/sagemaker/lineage/test_model_package_group_context.py b/tests/unit/sagemaker/lineage/test_model_package_group_context.py new file mode 100644 index 0000000000..8c14773df7 --- /dev/null +++ b/tests/unit/sagemaker/lineage/test_model_package_group_context.py @@ -0,0 +1,36 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""This module contains code to test SageMaker ``ModelPackageGroup``""" +from __future__ import absolute_import + +import unittest.mock +import pytest +from sagemaker.lineage import context + + +@pytest.fixture +def sagemaker_session(): + return unittest.mock.Mock() + + +def test_pipeline_execution_arn(sagemaker_session): + obj = context.ModelPackageGroup( + sagemaker_session, + context_name="foo", + description="test-description", + properties={"PipelineExecutionArn": "abcd", "k2": "v2"}, + properties_to_remove=["E"], + ) + actual_result = obj.pipeline_execution_arn() + expected_result = "abcd" + assert expected_result == actual_result diff --git a/tests/unit/sagemaker/lineage/test_query.py b/tests/unit/sagemaker/lineage/test_query.py index 595e7e1d0f..ae76fd199c 100644 --- a/tests/unit/sagemaker/lineage/test_query.py +++ b/tests/unit/sagemaker/lineage/test_query.py @@ -11,9 +11,11 @@ # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from __future__ import absolute_import +import unittest.mock from sagemaker.lineage.artifact import DatasetArtifact, ModelArtifact, Artifact from sagemaker.lineage.context import EndpointContext, Context from sagemaker.lineage.action import Action +from sagemaker.lineage.lineage_trial_component import LineageTrialComponent from sagemaker.lineage.query import LineageEntityEnum, LineageSourceEnum, Vertex, LineageQuery import pytest @@ -32,6 +34,38 @@ def test_lineage_query(sagemaker_session): start_arns=["arn:aws:sagemaker:us-west-2:0123456789012:context/mycontext"] ) + assert len(response.edges) == 1 + assert response.edges[0].source_arn == "arn1" + assert response.edges[0].destination_arn == "arn2" + assert response.edges[0].association_type == "Produced" + assert len(response.vertices) == 2 + + assert response.vertices[0].arn == "arn1" + assert response.vertices[0].lineage_source == "Endpoint" + assert response.vertices[0].lineage_entity == "Artifact" + assert response.vertices[1].arn == "arn2" + assert response.vertices[1].lineage_source == "Model" + assert response.vertices[1].lineage_entity == "Context" + + +def test_lineage_query_duplication(sagemaker_session): + lineage_query = LineageQuery(sagemaker_session) + sagemaker_session.sagemaker_client.query_lineage.return_value = { + "Vertices": [ + {"Arn": "arn1", "Type": "Endpoint", "LineageType": "Artifact"}, + {"Arn": "arn1", "Type": "Endpoint", "LineageType": "Artifact"}, + {"Arn": "arn2", "Type": "Model", "LineageType": "Context"}, + ], + "Edges": [ + {"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}, + {"SourceArn": "arn1", "DestinationArn": "arn2", "AssociationType": "Produced"}, + ], + } + + response = lineage_query.query( + start_arns=["arn:aws:sagemaker:us-west-2:0123456789012:context/mycontext"] + ) + assert len(response.edges) == 1 assert response.edges[0].source_arn == "arn1" assert response.edges[0].destination_arn == "arn2" @@ -254,6 +288,49 @@ def test_vertex_to_object_context(sagemaker_session): assert isinstance(context, Context) +def test_vertex_to_object_trial_component(sagemaker_session): + + tc_arn = "arn:aws:sagemaker:us-west-2:963951943925:trial-component/abaloneprocess-ixyt08z3ru-aws-processing-job" + vertex = Vertex( + arn=tc_arn, + lineage_entity=LineageEntityEnum.TRIAL_COMPONENT.value, + lineage_source=LineageSourceEnum.TRANSFORM_JOB.value, + sagemaker_session=sagemaker_session, + ) + + sagemaker_session.sagemaker_client.describe_trial_component.return_value = { + "TrialComponentName": "MyTrialComponent", + "TrialComponentArn": tc_arn, + "Source": { + "SourceUri": "arn:aws:sagemaker:us-west-2:0123456789012:model/my_trial_component", + "SourceType": "ARN", + "SourceId": "Thu Dec 17 17:16:24 UTC 2020", + }, + "TrialComponentType": "ModelDeployment", + "Properties": { + "PipelineExecutionArn": "arn:aws:sagemaker:us-west-2:0123456789012:\ + pipeline/mypipeline/execution/0irnteql64d0", + "PipelineStepName": "MyStep", + "Status": "Completed", + }, + "CreationTime": 1608225384.0, + "CreatedBy": {}, + "LastModifiedTime": 1608225384.0, + "LastModifiedBy": {}, + } + + trial_component = vertex.to_lineage_object() + + expected_calls = [ + unittest.mock.call(TrialComponentName="abaloneprocess-ixyt08z3ru-aws-processing-job"), + ] + assert expected_calls == sagemaker_session.sagemaker_client.describe_trial_component.mock_calls + + assert trial_component.trial_component_arn == tc_arn + assert trial_component.trial_component_name == "MyTrialComponent" + assert isinstance(trial_component, LineageTrialComponent) + + def test_vertex_to_object_model_artifact(sagemaker_session): vertex = Vertex( arn="arn:aws:sagemaker:us-west-2:0123456789012:artifact/e66eef7f19c05e75284089183491bd4f", @@ -285,6 +362,37 @@ def test_vertex_to_object_model_artifact(sagemaker_session): assert isinstance(artifact, ModelArtifact) +def test_vertex_to_object_artifact(sagemaker_session): + vertex = Vertex( + arn="arn:aws:sagemaker:us-west-2:0123456789012:artifact/e66eef7f19c05e75284089183491bd4f", + lineage_entity=LineageEntityEnum.ARTIFACT.value, + lineage_source=LineageSourceEnum.MODEL.value, + sagemaker_session=sagemaker_session, + ) + + sagemaker_session.sagemaker_client.describe_artifact.return_value = { + "ArtifactArn": "arn:aws:sagemaker:us-west-2:0123456789012:artifact/e66eef7f19c05e75284089183491bd4f", + "Source": { + "SourceUri": "arn:aws:sagemaker:us-west-2:0123456789012:model/mymodel", + "SourceTypes": [], + }, + "ArtifactType": None, + "Properties": {}, + "CreationTime": 1608224704.149, + "CreatedBy": {}, + "LastModifiedTime": 1608224704.149, + "LastModifiedBy": {}, + } + + artifact = vertex.to_lineage_object() + + assert ( + artifact.artifact_arn + == "arn:aws:sagemaker:us-west-2:0123456789012:artifact/e66eef7f19c05e75284089183491bd4f" + ) + assert isinstance(artifact, Artifact) + + def test_vertex_to_dataset_artifact(sagemaker_session): vertex = Vertex( arn="arn:aws:sagemaker:us-west-2:0123456789012:artifact/e66eef7f19c05e75284089183491bd4f", @@ -347,7 +455,7 @@ def test_vertex_to_model_artifact(sagemaker_session): assert isinstance(artifact, ModelArtifact) -def test_vertex_to_object_artifact(sagemaker_session): +def test_vertex_to_object_image_artifact(sagemaker_session): vertex = Vertex( arn="arn:aws:sagemaker:us-west-2:0123456789012:artifact/e66eef7f19c05e75284089183491bd4f", lineage_entity=LineageEntityEnum.ARTIFACT.value, @@ -409,7 +517,7 @@ def test_vertex_to_object_action(sagemaker_session): def test_vertex_to_object_unconvertable(sagemaker_session): vertex = Vertex( arn="arn:aws:sagemaker:us-west-2:0123456789012:artifact/e66eef7f19c05e75284089183491bd4f", - lineage_entity=LineageEntityEnum.TRIAL_COMPONENT.value, + lineage_entity=LineageEntityEnum.TRIAL.value, lineage_source=LineageSourceEnum.TENSORBOARD.value, sagemaker_session=sagemaker_session, ) diff --git a/tests/unit/sagemaker/model/test_deploy.py b/tests/unit/sagemaker/model/test_deploy.py index 284956aa75..33366578de 100644 --- a/tests/unit/sagemaker/model/test_deploy.py +++ b/tests/unit/sagemaker/model/test_deploy.py @@ -19,6 +19,8 @@ import sagemaker from sagemaker.model import Model +from sagemaker.async_inference import AsyncInferenceConfig +from sagemaker.serverless import ServerlessInferenceConfig MODEL_DATA = "s3://bucket/model.tar.gz" MODEL_IMAGE = "mi" @@ -62,7 +64,11 @@ def test_deploy(name_from_base, prepare_container_def, production_variant, sagem prepare_container_def.assert_called_with(INSTANCE_TYPE, accelerator_type=None) production_variant.assert_called_with( - MODEL_NAME, INSTANCE_TYPE, INSTANCE_COUNT, accelerator_type=None + MODEL_NAME, + INSTANCE_TYPE, + INSTANCE_COUNT, + accelerator_type=None, + serverless_inference_config=None, ) sagemaker_session.create_model.assert_called_with( @@ -76,6 +82,7 @@ def test_deploy(name_from_base, prepare_container_def, production_variant, sagem kms_key=None, wait=True, data_capture_config_dict=None, + async_inference_config_dict=None, ) @@ -101,7 +108,11 @@ def test_deploy_accelerator_type( create_sagemaker_model.assert_called_with(INSTANCE_TYPE, ACCELERATOR_TYPE, None) production_variant.assert_called_with( - MODEL_NAME, INSTANCE_TYPE, INSTANCE_COUNT, accelerator_type=ACCELERATOR_TYPE + MODEL_NAME, + INSTANCE_TYPE, + INSTANCE_COUNT, + accelerator_type=ACCELERATOR_TYPE, + serverless_inference_config=None, ) sagemaker_session.endpoint_from_production_variants.assert_called_with( @@ -111,6 +122,7 @@ def test_deploy_accelerator_type( kms_key=None, wait=True, data_capture_config_dict=None, + async_inference_config_dict=None, ) @@ -134,6 +146,7 @@ def test_deploy_endpoint_name(sagemaker_session): kms_key=None, wait=True, data_capture_config_dict=None, + async_inference_config_dict=None, ) @@ -207,6 +220,7 @@ def test_deploy_tags(create_sagemaker_model, production_variant, name_from_base, kms_key=None, wait=True, data_capture_config_dict=None, + async_inference_config_dict=None, ) @@ -228,6 +242,7 @@ def test_deploy_kms_key(production_variant, name_from_base, sagemaker_session): kms_key=key, wait=True, data_capture_config_dict=None, + async_inference_config_dict=None, ) @@ -248,6 +263,7 @@ def test_deploy_async(production_variant, name_from_base, sagemaker_session): kms_key=None, wait=False, data_capture_config_dict=None, + async_inference_config_dict=None, ) @@ -276,9 +292,108 @@ def test_deploy_data_capture_config(production_variant, name_from_base, sagemake kms_key=None, wait=True, data_capture_config_dict=data_capture_config_dict, + async_inference_config_dict=None, + ) + + +@patch("sagemaker.model.Model._create_sagemaker_model", Mock()) +@patch("sagemaker.utils.name_from_base", return_value=ENDPOINT_NAME) +@patch("sagemaker.production_variant", return_value=BASE_PRODUCTION_VARIANT) +def test_deploy_async_inference(production_variant, name_from_base, sagemaker_session): + model = Model( + MODEL_IMAGE, MODEL_DATA, role=ROLE, name=MODEL_NAME, sagemaker_session=sagemaker_session + ) + + async_inference_config = AsyncInferenceConfig(output_path="s3://some-path") + async_inference_config_dict = { + "OutputConfig": { + "S3OutputPath": "s3://some-path", + }, + } + + model.deploy( + instance_type=INSTANCE_TYPE, + initial_instance_count=INSTANCE_COUNT, + async_inference_config=async_inference_config, + ) + + sagemaker_session.endpoint_from_production_variants.assert_called_with( + name=ENDPOINT_NAME, + production_variants=[BASE_PRODUCTION_VARIANT], + tags=None, + kms_key=None, + wait=True, + data_capture_config_dict=None, + async_inference_config_dict=async_inference_config_dict, ) +@patch("sagemaker.utils.name_from_base", return_value=ENDPOINT_NAME) +@patch("sagemaker.model.Model._create_sagemaker_model") +@patch("sagemaker.production_variant") +def test_deploy_serverless_inference(production_variant, create_sagemaker_model, sagemaker_session): + model = Model( + MODEL_IMAGE, MODEL_DATA, role=ROLE, name=MODEL_NAME, sagemaker_session=sagemaker_session + ) + + production_variant_result = copy.deepcopy(BASE_PRODUCTION_VARIANT) + production_variant.return_value = production_variant_result + + serverless_inference_config = ServerlessInferenceConfig() + serverless_inference_config_dict = { + "MemorySizeInMB": 2048, + "MaxConcurrency": 5, + } + + model.deploy( + serverless_inference_config=serverless_inference_config, + ) + + create_sagemaker_model.assert_called_with(None, None, None) + production_variant.assert_called_with( + MODEL_NAME, + None, + None, + accelerator_type=None, + serverless_inference_config=serverless_inference_config_dict, + ) + + sagemaker_session.endpoint_from_production_variants.assert_called_with( + name=ENDPOINT_NAME, + production_variants=[production_variant_result], + tags=None, + kms_key=None, + wait=True, + data_capture_config_dict=None, + async_inference_config_dict=None, + ) + + +def test_deploy_wrong_inference_type(sagemaker_session): + model = Model(MODEL_IMAGE, MODEL_DATA, role=ROLE) + + bad_args = ( + {"instance_type": INSTANCE_TYPE}, + {"initial_instance_count": INSTANCE_COUNT}, + {"instance_type": None, "initial_instance_count": None}, + ) + for args in bad_args: + with pytest.raises( + ValueError, + match="Must specify instance type and instance count unless using serverless inference", + ): + model.deploy(args) + + +def test_deploy_wrong_serverless_config(sagemaker_session): + model = Model(MODEL_IMAGE, MODEL_DATA, role=ROLE) + with pytest.raises( + ValueError, + match="serverless_inference_config needs to be a ServerlessInferenceConfig object", + ): + model.deploy(serverless_inference_config={}) + + @patch("sagemaker.session.Session") @patch("sagemaker.local.LocalSession") def test_deploy_creates_correct_session(local_session, session): @@ -302,6 +417,19 @@ def test_deploy_no_role(sagemaker_session): model.deploy(instance_type=INSTANCE_TYPE, initial_instance_count=INSTANCE_COUNT) +def test_deploy_wrong_async_inferenc_config(sagemaker_session): + model = Model(MODEL_IMAGE, MODEL_DATA, sagemaker_session=sagemaker_session, role=ROLE) + + with pytest.raises( + ValueError, match="async_inference_config needs to be a AsyncInferenceConfig object" + ): + model.deploy( + instance_type=INSTANCE_TYPE, + initial_instance_count=INSTANCE_COUNT, + async_inference_config={}, + ) + + @patch("sagemaker.model.Model._create_sagemaker_model", Mock()) @patch("sagemaker.predictor.Predictor._get_endpoint_config_name", Mock()) @patch("sagemaker.predictor.Predictor._get_model_names", Mock()) @@ -326,3 +454,16 @@ def test_deploy_predictor_cls(production_variant, sagemaker_session): assert isinstance(predictor, sagemaker.predictor.Predictor) assert predictor.endpoint_name == endpoint_name assert predictor.sagemaker_session == sagemaker_session + + endpoint_name_async = "foo-async" + predictor_async = model.deploy( + instance_type=INSTANCE_TYPE, + initial_instance_count=INSTANCE_COUNT, + endpoint_name=endpoint_name_async, + async_inference_config=AsyncInferenceConfig(), + ) + + assert isinstance(predictor_async, sagemaker.predictor_async.AsyncPredictor) + assert predictor_async.name == model.name + assert predictor_async.endpoint_name == endpoint_name_async + assert predictor_async.sagemaker_session == sagemaker_session diff --git a/tests/unit/sagemaker/serverless/test_serverless_inference_config.py b/tests/unit/sagemaker/serverless/test_serverless_inference_config.py new file mode 100644 index 0000000000..fab80748a4 --- /dev/null +++ b/tests/unit/sagemaker/serverless/test_serverless_inference_config.py @@ -0,0 +1,36 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +from sagemaker.serverless import ServerlessInferenceConfig + +DEFAULT_MEMORY_SIZE_IN_MB = 2048 +DEFAULT_MAX_CONCURRENCY = 5 + +DEFAULT_REQUEST_DICT = { + "MemorySizeInMB": DEFAULT_MEMORY_SIZE_IN_MB, + "MaxConcurrency": DEFAULT_MAX_CONCURRENCY, +} + + +def test_init(): + serverless_inference_config = ServerlessInferenceConfig() + + assert serverless_inference_config.memory_size_in_mb == DEFAULT_MEMORY_SIZE_IN_MB + assert serverless_inference_config.max_concurrency == DEFAULT_MAX_CONCURRENCY + + +def test_to_request_dict(): + serverless_inference_config_dict = ServerlessInferenceConfig()._to_request_dict() + + assert serverless_inference_config_dict == DEFAULT_REQUEST_DICT diff --git a/tests/unit/sagemaker/workflow/test_emr_step.py b/tests/unit/sagemaker/workflow/test_emr_step.py new file mode 100644 index 0000000000..e0dd81ebb5 --- /dev/null +++ b/tests/unit/sagemaker/workflow/test_emr_step.py @@ -0,0 +1,175 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +import json + +import pytest + +from mock import Mock + +from sagemaker.workflow.emr_step import EMRStep, EMRStepConfig +from sagemaker.workflow.steps import CacheConfig +from sagemaker.workflow.pipeline import Pipeline +from sagemaker.workflow.parameters import ParameterString + + +@pytest.fixture() +def sagemaker_session(): + boto_mock = Mock(name="boto_session", region_name="us-west-2") + session_mock = Mock( + name="sagemaker_session", + boto_session=boto_mock, + boto_region_name="us-west-2", + config=None, + local_mode=False, + ) + return session_mock + + +def test_emr_step_with_one_step_config(sagemaker_session): + emr_step_config = EMRStepConfig( + jar="s3:/script-runner/script-runner.jar", + args=["--arg_0", "arg_0_value"], + main_class="com.my.main", + properties=[{"Key": "Foo", "Value": "Foo_value"}, {"Key": "Bar", "Value": "Bar_value"}], + ) + + emr_step = EMRStep( + name="MyEMRStep", + display_name="MyEMRStep", + description="MyEMRStepDescription", + cluster_id="MyClusterID", + step_config=emr_step_config, + depends_on=["TestStep"], + cache_config=CacheConfig(enable_caching=True, expire_after="PT1H"), + ) + emr_step.add_depends_on(["SecondTestStep"]) + assert emr_step.to_request() == { + "Name": "MyEMRStep", + "Type": "EMR", + "Arguments": { + "ClusterId": "MyClusterID", + "StepConfig": { + "HadoopJarStep": { + "Args": ["--arg_0", "arg_0_value"], + "Jar": "s3:/script-runner/script-runner.jar", + "MainClass": "com.my.main", + "Properties": [ + {"Key": "Foo", "Value": "Foo_value"}, + {"Key": "Bar", "Value": "Bar_value"}, + ], + } + }, + }, + "DependsOn": ["TestStep", "SecondTestStep"], + "DisplayName": "MyEMRStep", + "Description": "MyEMRStepDescription", + "CacheConfig": {"Enabled": True, "ExpireAfter": "PT1H"}, + } + + assert emr_step.properties.ClusterId == "MyClusterID" + assert emr_step.properties.ActionOnFailure.expr == {"Get": "Steps.MyEMRStep.ActionOnFailure"} + assert emr_step.properties.Config.Args.expr == {"Get": "Steps.MyEMRStep.Config.Args"} + assert emr_step.properties.Config.Jar.expr == {"Get": "Steps.MyEMRStep.Config.Jar"} + assert emr_step.properties.Config.MainClass.expr == {"Get": "Steps.MyEMRStep.Config.MainClass"} + assert emr_step.properties.Id.expr == {"Get": "Steps.MyEMRStep.Id"} + assert emr_step.properties.Name.expr == {"Get": "Steps.MyEMRStep.Name"} + assert emr_step.properties.Status.State.expr == {"Get": "Steps.MyEMRStep.Status.State"} + assert emr_step.properties.Status.FailureDetails.Reason.expr == { + "Get": "Steps.MyEMRStep.Status.FailureDetails.Reason" + } + + +def test_pipeline_interpolates_emr_outputs(sagemaker_session): + parameter = ParameterString("MyStr") + + emr_step_config_1 = EMRStepConfig( + jar="s3:/script-runner/script-runner_1.jar", + args=["--arg_0", "arg_0_value"], + main_class="com.my.main", + properties=[{"Key": "Foo", "Value": "Foo_value"}, {"Key": "Bar", "Value": "Bar_value"}], + ) + + step_emr_1 = EMRStep( + name="emr_step_1", + cluster_id="MyClusterID", + display_name="emr_step_1", + description="MyEMRStepDescription", + depends_on=["TestStep"], + step_config=emr_step_config_1, + ) + + emr_step_config_2 = EMRStepConfig(jar="s3:/script-runner/script-runner_2.jar") + + step_emr_2 = EMRStep( + name="emr_step_2", + cluster_id="MyClusterID", + display_name="emr_step_2", + description="MyEMRStepDescription", + depends_on=["TestStep"], + step_config=emr_step_config_2, + ) + + pipeline = Pipeline( + name="MyPipeline", + parameters=[parameter], + steps=[step_emr_1, step_emr_2], + sagemaker_session=sagemaker_session, + ) + + assert json.loads(pipeline.definition()) == { + "Version": "2020-12-01", + "Metadata": {}, + "Parameters": [{"Name": "MyStr", "Type": "String"}], + "PipelineExperimentConfig": { + "ExperimentName": {"Get": "Execution.PipelineName"}, + "TrialName": {"Get": "Execution.PipelineExecutionId"}, + }, + "Steps": [ + { + "Name": "emr_step_1", + "Type": "EMR", + "Arguments": { + "ClusterId": "MyClusterID", + "StepConfig": { + "HadoopJarStep": { + "Args": ["--arg_0", "arg_0_value"], + "Jar": "s3:/script-runner/script-runner_1.jar", + "MainClass": "com.my.main", + "Properties": [ + {"Key": "Foo", "Value": "Foo_value"}, + {"Key": "Bar", "Value": "Bar_value"}, + ], + } + }, + }, + "DependsOn": ["TestStep"], + "Description": "MyEMRStepDescription", + "DisplayName": "emr_step_1", + }, + { + "Name": "emr_step_2", + "Type": "EMR", + "Arguments": { + "ClusterId": "MyClusterID", + "StepConfig": { + "HadoopJarStep": {"Jar": "s3:/script-runner/script-runner_2.jar"} + }, + }, + "Description": "MyEMRStepDescription", + "DisplayName": "emr_step_2", + "DependsOn": ["TestStep"], + }, + ], + } diff --git a/tests/unit/sagemaker/workflow/test_properties.py b/tests/unit/sagemaker/workflow/test_properties.py index accaf46533..405de5c0b2 100644 --- a/tests/unit/sagemaker/workflow/test_properties.py +++ b/tests/unit/sagemaker/workflow/test_properties.py @@ -70,6 +70,19 @@ def test_properties_tuning_job(): } +def test_properties_emr_step(): + prop = Properties("Steps.MyStep", "Step", service_name="emr") + some_prop_names = ["Id", "Name", "Config", "ActionOnFailure", "Status"] + for name in some_prop_names: + assert name in prop.__dict__.keys() + + assert prop.Id.expr == {"Get": "Steps.MyStep.Id"} + assert prop.Name.expr == {"Get": "Steps.MyStep.Name"} + assert prop.ActionOnFailure.expr == {"Get": "Steps.MyStep.ActionOnFailure"} + assert prop.Config.Jar.expr == {"Get": "Steps.MyStep.Config.Jar"} + assert prop.Status.State.expr == {"Get": "Steps.MyStep.Status.State"} + + def test_properties_describe_model_package_output(): prop = Properties("Steps.MyStep", "DescribeModelPackageOutput") some_prop_names = ["ModelPackageName", "ModelPackageGroupName", "ModelPackageArn"] diff --git a/tests/unit/sagemaker/workflow/test_steps.py b/tests/unit/sagemaker/workflow/test_steps.py index 3c2adc7bd9..72e37b80f8 100644 --- a/tests/unit/sagemaker/workflow/test_steps.py +++ b/tests/unit/sagemaker/workflow/test_steps.py @@ -62,6 +62,10 @@ CreateModelStep, CacheConfig, ) +from sagemaker.pipeline import PipelineModel +from sagemaker.sparkml import SparkMLModel +from sagemaker.predictor import Predictor +from sagemaker.model import FrameworkModel from tests.unit import DATA_DIR DUMMY_SCRIPT_PATH = os.path.join(DATA_DIR, "dummy_script.py") @@ -89,6 +93,21 @@ def properties(self): return self._properties +class DummyFrameworkModel(FrameworkModel): + def __init__(self, sagemaker_session, **kwargs): + super(DummyFrameworkModel, self).__init__( + "s3://bucket/model_1.tar.gz", + "mi-1", + ROLE, + os.path.join(DATA_DIR, "dummy_script.py"), + sagemaker_session=sagemaker_session, + **kwargs, + ) + + def create_predictor(self, endpoint_name): + return Predictor(endpoint_name, self.sagemaker_session) + + @pytest.fixture def boto_session(): role_mock = Mock() @@ -598,6 +617,7 @@ def test_processing_step_normalizes_args_with_local_code(mock_normalize_args, sc inputs=step.inputs, outputs=step.outputs, code=step.code, + kms_key=None, ) @@ -624,6 +644,7 @@ def test_processing_step_normalizes_args_with_s3_code(mock_normalize_args, scrip outputs=outputs, job_arguments=["arg1", "arg2"], cache_config=cache_config, + kms_key="arn:aws:kms:us-west-2:012345678901:key/s3-kms-key", ) mock_normalize_args.return_value = [step.inputs, step.outputs] step.to_request() @@ -633,6 +654,7 @@ def test_processing_step_normalizes_args_with_s3_code(mock_normalize_args, scrip inputs=step.inputs, outputs=step.outputs, code=step.code, + kms_key=step.kms_key, ) @@ -667,6 +689,7 @@ def test_processing_step_normalizes_args_with_no_code(mock_normalize_args, scrip inputs=step.inputs, outputs=step.outputs, code=None, + kms_key=None, ) @@ -704,6 +727,63 @@ def test_create_model_step(sagemaker_session): assert step.properties.ModelName.expr == {"Get": "Steps.MyCreateModelStep.ModelName"} +@patch("tarfile.open") +@patch("time.strftime", return_value="2017-10-10-14-14-15") +def test_create_model_step_with_model_pipeline(tfo, time, sagemaker_session): + framework_model = DummyFrameworkModel(sagemaker_session) + sparkml_model = SparkMLModel( + model_data="s3://bucket/model_2.tar.gz", + role=ROLE, + sagemaker_session=sagemaker_session, + env={"SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT": "text/csv"}, + ) + model = PipelineModel( + models=[framework_model, sparkml_model], role=ROLE, sagemaker_session=sagemaker_session + ) + inputs = CreateModelInput( + instance_type="c4.4xlarge", + accelerator_type="ml.eia1.medium", + ) + step = CreateModelStep( + name="MyCreateModelStep", + depends_on=["TestStep"], + display_name="MyCreateModelStep", + description="TestDescription", + model=model, + inputs=inputs, + ) + step.add_depends_on(["SecondTestStep"]) + + assert step.to_request() == { + "Name": "MyCreateModelStep", + "Type": "Model", + "Description": "TestDescription", + "DisplayName": "MyCreateModelStep", + "DependsOn": ["TestStep", "SecondTestStep"], + "Arguments": { + "Containers": [ + { + "Environment": { + "SAGEMAKER_PROGRAM": "dummy_script.py", + "SAGEMAKER_SUBMIT_DIRECTORY": "s3://my-bucket/mi-1-2017-10-10-14-14-15/sourcedir.tar.gz", + "SAGEMAKER_CONTAINER_LOG_LEVEL": "20", + "SAGEMAKER_REGION": "us-west-2", + }, + "Image": "mi-1", + "ModelDataUrl": "s3://bucket/model_1.tar.gz", + }, + { + "Environment": {"SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT": "text/csv"}, + "Image": "246618743249.dkr.ecr.us-west-2.amazonaws.com/sagemaker-sparkml-serving:2.4", + "ModelDataUrl": "s3://bucket/model_2.tar.gz", + }, + ], + "ExecutionRoleArn": "DummyRole", + }, + } + assert step.properties.ModelName.expr == {"Get": "Steps.MyCreateModelStep.ModelName"} + + def test_transform_step(sagemaker_session): transformer = Transformer( model_name=MODEL_NAME, diff --git a/tests/unit/test_airflow.py b/tests/unit/test_airflow.py index 9104210b0c..fa4b4d2e55 100644 --- a/tests/unit/test_airflow.py +++ b/tests/unit/test_airflow.py @@ -262,7 +262,7 @@ def test_framework_training_config_all_args(retrieve_image_uri, sagemaker_sessio py_version="py3", framework_version="1.15.2", role="{{ role }}", - instance_count="{{ instance_count }}", + instance_count=1, instance_type="ml.c4.2xlarge", volume_size="{{ volume_size }}", volume_kms_key="{{ volume_kms_key }}", @@ -276,6 +276,8 @@ def test_framework_training_config_all_args(retrieve_image_uri, sagemaker_sessio security_group_ids=["{{ security_group_ids }}"], metric_definitions=[{"Name": "{{ name }}", "Regex": "{{ regex }}"}], sagemaker_session=sagemaker_session, + checkpoint_local_path="{{ checkpoint_local_path }}", + checkpoint_s3_uri="{{ checkpoint_s3_uri }}", ) data = "{{ training_data }}" @@ -294,7 +296,7 @@ def test_framework_training_config_all_args(retrieve_image_uri, sagemaker_sessio "TrainingJobName": "{{ base_job_name }}-%s" % TIME_STAMP, "StoppingCondition": {"MaxRuntimeInSeconds": "{{ max_run }}"}, "ResourceConfig": { - "InstanceCount": "{{ instance_count }}", + "InstanceCount": 1, "InstanceType": "ml.c4.2xlarge", "VolumeSizeInGB": "{{ volume_size }}", "VolumeKmsKeyId": "{{ volume_kms_key }}", @@ -338,6 +340,10 @@ def test_framework_training_config_all_args(retrieve_image_uri, sagemaker_sessio } ] }, + "CheckpointConfig": { + "LocalPath": "{{ checkpoint_local_path }}", + "S3Uri": "{{ checkpoint_s3_uri }}", + }, } assert config == expected_config diff --git a/tests/unit/test_clarify.py b/tests/unit/test_clarify.py index 48a096a69f..910268a7dc 100644 --- a/tests/unit/test_clarify.py +++ b/tests/unit/test_clarify.py @@ -82,7 +82,20 @@ def test_invalid_data_config(): ) -def test_data_bias_config(): +def test_s3_data_distribution_type_ignorance(): + data_config = DataConfig( + s3_data_input_path="s3://input/train.csv", + s3_output_path="s3://output/analysis_test_result", + label="Label", + headers=["Label", "F1", "F2", "F3", "F4"], + dataset_type="text/csv", + joinsource="F4", + s3_data_distribution_type="ShardedByS3Key", + ) + assert data_config.s3_data_distribution_type == "FullyReplicated" + + +def test_bias_config(): label_values = [1] facet_name = "F1" facet_threshold = 0.3 @@ -103,52 +116,122 @@ def test_data_bias_config(): assert expected_config == data_bias_config.get_config() -def test_data_bias_config_multi_facet(): - label_values = [1] - facet_name = ["Facet1", "Facet2"] - facet_threshold = [[0], [1, 2]] - group_name = "A151" - - data_bias_config = BiasConfig( - label_values_or_threshold=label_values, - facet_name=facet_name, - facet_values_or_threshold=facet_threshold, - group_name=group_name, - ) +def test_invalid_bias_config(): + # Empty facet list, + with pytest.raises(AssertionError, match="Please provide at least one facet"): + BiasConfig( + label_values_or_threshold=[1], + facet_name=[], + ) - expected_config = { - "label_values_or_threshold": label_values, - "facet": [ - {"name_or_index": facet_name[0], "value_or_threshold": facet_threshold[0]}, - {"name_or_index": facet_name[1], "value_or_threshold": facet_threshold[1]}, - ], - "group_variable": group_name, - } - assert expected_config == data_bias_config.get_config() + # Two facets but only one value + with pytest.raises( + ValueError, match="The number of facet names doesn't match the number of facet values" + ): + BiasConfig( + label_values_or_threshold=[1], + facet_name=["Feature1", "Feature2"], + facet_values_or_threshold=[[1]], + ) -def test_data_bias_config_multi_facet_not_all_with_value(): +@pytest.mark.parametrize( + "facet_name,facet_values_or_threshold,expected_result", + [ + # One facet, assume that it is binary and value 1 indicates the sensitive group + [ + "Feature1", + [1], + { + "facet": [{"name_or_index": "Feature1", "value_or_threshold": [1]}], + }, + ], + # The same facet as above, facet value is not specified. (Clarify will compute bias metrics + # for each binary value). + [ + "Feature1", + None, + { + "facet": [{"name_or_index": "Feature1"}], + }, + ], + # Assume that the 2nd column (index 1, zero-based) of the dataset as facet, it has + # four categories and two of them indicate the sensitive group. + [ + 1, + ["category1, category2"], + { + "facet": [{"name_or_index": 1, "value_or_threshold": ["category1, category2"]}], + }, + ], + # The same facet as above, facet values are not specified. (Clarify will iterate + # the categories and compute bias metrics for each category). + [ + 1, + None, + { + "facet": [{"name_or_index": 1}], + }, + ], + # Assume that the facet is numeric value in range [0.0, 1.0]. Given facet threshold 0.5, + # interval (0.5, 1.0] indicates the sensitive group. + [ + "Feature3", + [0.5], + { + "facet": [{"name_or_index": "Feature3", "value_or_threshold": [0.5]}], + }, + ], + # Multiple facets + [ + ["Feature1", 1, "Feature3"], + [[1], ["category1, category2"], [0.5]], + { + "facet": [ + {"name_or_index": "Feature1", "value_or_threshold": [1]}, + {"name_or_index": 1, "value_or_threshold": ["category1, category2"]}, + {"name_or_index": "Feature3", "value_or_threshold": [0.5]}, + ], + }, + ], + # Multiple facets, no value or threshold + [ + ["Feature1", 1, "Feature3"], + None, + { + "facet": [ + {"name_or_index": "Feature1"}, + {"name_or_index": 1}, + {"name_or_index": "Feature3"}, + ], + }, + ], + # Multiple facets, specify values or threshold for some of them + [ + ["Feature1", 1, "Feature3"], + [[1], None, [0.5]], + { + "facet": [ + {"name_or_index": "Feature1", "value_or_threshold": [1]}, + {"name_or_index": 1}, + {"name_or_index": "Feature3", "value_or_threshold": [0.5]}, + ], + }, + ], + ], +) +def test_facet_of_bias_config(facet_name, facet_values_or_threshold, expected_result): label_values = [1] - facet_name = ["Facet1", "Facet2"] - facet_threshold = [[0], None] - group_name = "A151" - - data_bias_config = BiasConfig( + bias_config = BiasConfig( label_values_or_threshold=label_values, facet_name=facet_name, - facet_values_or_threshold=facet_threshold, - group_name=group_name, + facet_values_or_threshold=facet_values_or_threshold, ) - expected_config = { "label_values_or_threshold": label_values, - "facet": [ - {"name_or_index": facet_name[0], "value_or_threshold": facet_threshold[0]}, - {"name_or_index": facet_name[1]}, - ], - "group_variable": group_name, + **expected_result, } - assert expected_config == data_bias_config.get_config() + assert bias_config.get_config() == expected_config def test_model_config(): diff --git a/tests/unit/test_estimator.py b/tests/unit/test_estimator.py index 37bdc4d8ed..656d773914 100644 --- a/tests/unit/test_estimator.py +++ b/tests/unit/test_estimator.py @@ -39,6 +39,7 @@ ProfilerRule, Rule, ) +from sagemaker.async_inference import AsyncInferenceConfig from sagemaker.estimator import Estimator, EstimatorBase, Framework, _TrainingJob from sagemaker.fw_utils import PROFILER_UNSUPPORTED_REGIONS from sagemaker.inputs import ShuffleConfig @@ -48,6 +49,7 @@ from sagemaker.pytorch.estimator import PyTorch from sagemaker.sklearn.estimator import SKLearn from sagemaker.tensorflow.estimator import TensorFlow +from sagemaker.predictor_async import AsyncPredictor from sagemaker.transformer import Transformer from sagemaker.xgboost.estimator import XGBoost @@ -2484,6 +2486,7 @@ def test_fit_deploy_tags_in_estimator(name_from_base, sagemaker_session): kms_key=None, wait=True, data_capture_config_dict=None, + async_inference_config_dict=None, ) sagemaker_session.create_model.assert_called_with( @@ -2529,6 +2532,7 @@ def test_fit_deploy_tags(name_from_base, sagemaker_session): kms_key=None, wait=True, data_capture_config_dict=None, + async_inference_config_dict=None, ) sagemaker_session.create_model.assert_called_with( @@ -2811,6 +2815,63 @@ def test_generic_to_deploy(time, sagemaker_session): assert predictor.sagemaker_session == sagemaker_session +def test_generic_to_deploy_async(sagemaker_session): + e = Estimator( + IMAGE_URI, + ROLE, + INSTANCE_COUNT, + INSTANCE_TYPE, + output_path=OUTPUT_PATH, + sagemaker_session=sagemaker_session, + ) + + e.fit() + s3_output_path = "s3://some-s3-path" + + predictor_async = e.deploy( + INSTANCE_COUNT, + INSTANCE_TYPE, + async_inference_config=AsyncInferenceConfig(output_path=s3_output_path), + ) + + sagemaker_session.create_model.assert_called_once() + _, kwargs = sagemaker_session.create_model.call_args + assert isinstance(predictor_async, AsyncPredictor) + assert predictor_async.endpoint_name.startswith(IMAGE_URI) + assert predictor_async.sagemaker_session == sagemaker_session + + +def test_generic_to_deploy_bad_arguments_combination(sagemaker_session): + e = Estimator( + IMAGE_URI, + ROLE, + INSTANCE_COUNT, + INSTANCE_TYPE, + output_path=OUTPUT_PATH, + sagemaker_session=sagemaker_session, + ) + + e.fit() + + bad_args = ( + {"instance_type": INSTANCE_TYPE}, + {"initial_instance_count": INSTANCE_COUNT}, + {"instance_type": None, "initial_instance_count": None}, + ) + for args in bad_args: + with pytest.raises( + ValueError, + match="Must specify instance type and instance count unless using serverless inference", + ): + e.deploy(args) + + with pytest.raises( + ValueError, + match="serverless_inference_config needs to be a ServerlessInferenceConfig object", + ): + e.deploy(serverless_inference_config={}) + + def test_generic_to_deploy_network_isolation(sagemaker_session): e = Estimator( IMAGE_URI, @@ -2860,6 +2921,8 @@ def test_generic_to_deploy_kms(create_model, sagemaker_session): wait=True, kms_key=kms_key, data_capture_config=None, + async_inference_config=None, + serverless_inference_config=None, ) diff --git a/tests/unit/test_predictor_async.py b/tests/unit/test_predictor_async.py new file mode 100644 index 0000000000..e4d2ee829f --- /dev/null +++ b/tests/unit/test_predictor_async.py @@ -0,0 +1,345 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +import pytest +from mock import Mock +from botocore.exceptions import WaiterError +from sagemaker.predictor import Predictor +from sagemaker.predictor_async import AsyncPredictor +from sagemaker.exceptions import PollingTimeoutError + +ENDPOINT = "mxnet_endpoint" +BUCKET_NAME = "mxnet_endpoint" +DEFAULT_CONTENT_TYPE = "application/octet-stream" +CSV_CONTENT_TYPE = "text/csv" +DEFAULT_ACCEPT = "*/*" +RETURN_VALUE = 0 +CSV_RETURN_VALUE = "1,2,3\r\n" +PRODUCTION_VARIANT_1 = "PRODUCTION_VARIANT_1" +INFERENCE_ID = "inference-id" +ASYNC_OUTPUT_LOCATION = "s3://some-output-path/object-name" +ASYNC_INPUT_LOCATION = "s3://some-input-path/object-name" +ASYNC_CHECK_PERIOD = 1 +ASYNC_PREDICTOR = "async-predictor" +DUMMY_DATA = [0, 1, 2, 3] + +ENDPOINT_DESC = {"EndpointArn": "foo", "EndpointConfigName": ENDPOINT} + +ENDPOINT_CONFIG_DESC = {"ProductionVariants": [{"ModelName": "model-1"}, {"ModelName": "model-2"}]} + + +def empty_sagemaker_session(): + ims = Mock(name="sagemaker_session") + ims.default_bucket = Mock(name="default_bucket", return_value=BUCKET_NAME) + ims.sagemaker_runtime_client = Mock(name="sagemaker_runtime") + ims.sagemaker_client.describe_endpoint = Mock(return_value=ENDPOINT_DESC) + ims.sagemaker_client.describe_endpoint_config = Mock(return_value=ENDPOINT_CONFIG_DESC) + + ims.sagemaker_runtime_client.invoke_endpoint_async = Mock( + name="invoke_endpoint_async", + return_value={ + "OutputLocation": ASYNC_OUTPUT_LOCATION, + }, + ) + + response_body = Mock("body") + response_body.read = Mock("read", return_value=RETURN_VALUE) + response_body.close = Mock("close", return_value=None) + + ims.s3_client = Mock(name="s3_client") + ims.s3_client.get_object = Mock( + name="get_object", + return_value={"Body": response_body}, + ) + + ims.s3_client.put_object = Mock(name="put_object") + + return ims + + +def empty_predictor(): + predictor = Mock(name="predictor") + predictor.update_endpoint = Mock(name="update_endpoint") + predictor.delete_endpoint = Mock(name="delete_endpoint") + predictor.delete_model = Mock(name="delete_model") + predictor.enable_data_capture = Mock(name="enable_data_capture") + predictor.disable_data_capture = Mock(name="disable_data_capture") + predictor.update_data_capture_config = Mock(name="update_data_capture_config") + predictor.list_monitor = Mock(name="list_monitor") + predictor.endpoint_context = Mock(name="endpoint_context") + + return predictor + + +def test_async_predict_call_pass_through(): + sagemaker_session = empty_sagemaker_session() + predictor_async = AsyncPredictor(Predictor(ENDPOINT, sagemaker_session)) + + result = predictor_async.predict_async(input_path=ASYNC_INPUT_LOCATION) + + assert sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async.called + assert sagemaker_session.sagemaker_client.describe_endpoint.not_called + assert sagemaker_session.sagemaker_client.describe_endpoint_config.not_called + + expected_request_args = { + "Accept": DEFAULT_ACCEPT, + "InputLocation": ASYNC_INPUT_LOCATION, + "EndpointName": ENDPOINT, + } + + call_args, kwargs = sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async.call_args + assert kwargs == expected_request_args + assert result.output_path == ASYNC_OUTPUT_LOCATION + + +def test_async_predict_call_with_data(): + sagemaker_session = empty_sagemaker_session() + predictor_async = AsyncPredictor(Predictor(ENDPOINT, sagemaker_session)) + predictor_async.name = ASYNC_PREDICTOR + data = DUMMY_DATA + + result = predictor_async.predict_async(data=data) + assert sagemaker_session.s3_client.put_object.called + + assert sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async.called + assert sagemaker_session.sagemaker_client.describe_endpoint.not_called + assert sagemaker_session.sagemaker_client.describe_endpoint_config.not_called + + expected_request_args = { + "Accept": DEFAULT_ACCEPT, + "InputLocation": predictor_async._input_path, + "EndpointName": ENDPOINT, + } + + call_args, kwargs = sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async.call_args + assert kwargs == expected_request_args + assert result.output_path == ASYNC_OUTPUT_LOCATION + + +def test_async_predict_call_with_data_and_input_path(): + sagemaker_session = empty_sagemaker_session() + predictor_async = AsyncPredictor(Predictor(ENDPOINT, sagemaker_session)) + predictor_async.name = ASYNC_PREDICTOR + data = DUMMY_DATA + + result = predictor_async.predict_async(data=data, input_path=ASYNC_INPUT_LOCATION) + assert sagemaker_session.s3_client.put_object.called + + assert sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async.called + assert sagemaker_session.sagemaker_client.describe_endpoint.not_called + assert sagemaker_session.sagemaker_client.describe_endpoint_config.not_called + + expected_request_args = { + "Accept": DEFAULT_ACCEPT, + "InputLocation": ASYNC_INPUT_LOCATION, + "EndpointName": ENDPOINT, + } + + call_args, kwargs = sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async.call_args + assert kwargs == expected_request_args + assert result.output_path == ASYNC_OUTPUT_LOCATION + + +def test_async_predict_call_pass_through_wait_result(capsys): + sagemaker_session = empty_sagemaker_session() + predictor_async = AsyncPredictor(Predictor(ENDPOINT, sagemaker_session)) + + s3_waiter = Mock(name="object_waiter") + waiter_error = WaiterError( + name="async-predictor-unit-test-waiter-error", + reason="test-waiter-error", + last_response="some response", + ) + s3_waiter.wait = Mock(name="wait", side_effect=[waiter_error, None]) + sagemaker_session.s3_client.get_waiter = Mock( + name="object_exists", + return_value=s3_waiter, + ) + + input_location = "s3://some-input-path" + with pytest.raises(PollingTimeoutError, match="Inference could still be running"): + predictor_async.predict(input_path=input_location) + + result_async = predictor_async.predict(input_path=input_location) + assert sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async.called + assert sagemaker_session.sagemaker_client.describe_endpoint.not_called + assert sagemaker_session.sagemaker_client.describe_endpoint_config.not_called + + expected_request_args = { + "Accept": DEFAULT_ACCEPT, + "InputLocation": input_location, + "EndpointName": ENDPOINT, + } + + call_args, kwargs = sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async.call_args + assert kwargs == expected_request_args + assert result_async == RETURN_VALUE + + +def test_predict_async_call_invalid_input(): + sagemaker_session = empty_sagemaker_session() + predictor_async = AsyncPredictor(Predictor(ENDPOINT, sagemaker_session)) + + with pytest.raises( + ValueError, + match="Please provide input data or input Amazon S3 location to use async prediction", + ): + predictor_async.predict_async() + + with pytest.raises( + ValueError, + match="Please provide input data or input Amazon S3 location to use async prediction", + ): + predictor_async.predict() + + +def test_predict_call_with_inference_id(): + sagemaker_session = empty_sagemaker_session() + predictor_async = AsyncPredictor(Predictor(ENDPOINT, sagemaker_session)) + + input_location = "s3://some-input-path" + result = predictor_async.predict_async(input_path=input_location, inference_id=INFERENCE_ID) + + assert sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async.called + + expected_request_args = { + "Accept": DEFAULT_ACCEPT, + "InputLocation": input_location, + "EndpointName": ENDPOINT, + "InferenceId": INFERENCE_ID, + } + + call_args, kwargs = sagemaker_session.sagemaker_runtime_client.invoke_endpoint_async.call_args + assert kwargs == expected_request_args + + assert result.output_path == ASYNC_OUTPUT_LOCATION + + +def test_update_endpoint_no_args(): + predictor = empty_predictor() + predictor_async = AsyncPredictor(predictor=predictor) + + predictor_async.update_endpoint() + predictor.update_endpoint.assert_called_with( + initial_instance_count=None, + instance_type=None, + accelerator_type=None, + model_name=None, + tags=None, + kms_key=None, + data_capture_config_dict=None, + wait=True, + ) + + +def test_update_endpoint_all_args(): + predictor = empty_predictor() + predictor_async = AsyncPredictor(predictor=predictor) + + predictor_async.update_endpoint() + + new_instance_count = 2 + new_instance_type = "ml.c4.xlarge" + new_accelerator_type = "ml.eia1.medium" + new_model_name = "new-model" + new_tags = {"Key": "foo", "Value": "bar"} + new_kms_key = "new-key" + new_data_capture_config_dict = {} + + predictor_async.update_endpoint( + initial_instance_count=new_instance_count, + instance_type=new_instance_type, + accelerator_type=new_accelerator_type, + model_name=new_model_name, + tags=new_tags, + kms_key=new_kms_key, + data_capture_config_dict=new_data_capture_config_dict, + wait=False, + ) + + predictor.update_endpoint.assert_called_with( + initial_instance_count=new_instance_count, + instance_type=new_instance_type, + accelerator_type=new_accelerator_type, + model_name=new_model_name, + tags=new_tags, + kms_key=new_kms_key, + data_capture_config_dict=new_data_capture_config_dict, + wait=False, + ) + + +def test_delete_endpoint_with_config(): + predictor = empty_predictor() + predictor_async = AsyncPredictor(predictor=predictor) + + predictor_async.delete_endpoint() + predictor.delete_endpoint.assert_called_with(True) + + +def test_delete_endpoint_only(): + predictor = empty_predictor() + predictor_async = AsyncPredictor(predictor=predictor) + + predictor_async.delete_endpoint(delete_endpoint_config=False) + predictor.delete_endpoint.assert_called_with(False) + + +def test_delete_model(): + predictor = empty_predictor() + predictor_async = AsyncPredictor(predictor=predictor) + + predictor_async.delete_model() + predictor.delete_model.assert_called_with() + + +def test_enable_data_capture(): + predictor = empty_predictor() + predictor_async = AsyncPredictor(predictor=predictor) + + predictor_async.enable_data_capture() + predictor.enable_data_capture.assert_called_with() + + +def test_disable_data_capture(): + predictor = empty_predictor() + predictor_async = AsyncPredictor(predictor=predictor) + + predictor_async.disable_data_capture() + predictor.disable_data_capture.assert_called_with() + + +def test_update_data_capture_config(): + predictor = empty_predictor() + predictor_async = AsyncPredictor(predictor=predictor) + + data_capture_config = Mock(name="data_capture_config") + predictor_async.update_data_capture_config(data_capture_config=data_capture_config) + predictor.update_data_capture_config.assert_called_with(data_capture_config) + + +def test_endpoint_context(): + predictor = empty_predictor() + predictor_async = AsyncPredictor(predictor=predictor) + + predictor_async.endpoint_context() + predictor.endpoint_context.assert_called_with() + + +def test_list_monitors(): + predictor = empty_predictor() + predictor_async = AsyncPredictor(predictor=predictor) + + predictor_async.list_monitors() + predictor.list_monitors.assert_called_with() diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index b2c14c5e5a..8604835890 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -24,6 +24,7 @@ import sagemaker from sagemaker import TrainingInput, Session, get_execution_role +from sagemaker.async_inference import AsyncInferenceConfig from sagemaker.session import ( _tuning_job_status, _transform_job_status, @@ -749,6 +750,11 @@ def test_training_input_all_arguments(): IN_PROGRESS_DESCRIBE_TRANSFORM_JOB_RESULT = dict(COMPLETED_DESCRIBE_TRANSFORM_JOB_RESULT) IN_PROGRESS_DESCRIBE_TRANSFORM_JOB_RESULT.update({"TransformJobStatus": "InProgress"}) +SERVERLESS_INFERENCE_CONFIG = { + "MemorySizeInMB": 2048, + "MaxConcurrency": 2, +} + @pytest.fixture() def sagemaker_session(): @@ -1911,6 +1917,57 @@ def test_endpoint_from_production_variants_with_accelerator_type(sagemaker_sessi ) +def test_endpoint_from_production_variants_with_serverless_inference_config(sagemaker_session): + ims = sagemaker_session + ims.sagemaker_client.describe_endpoint = Mock(return_value={"EndpointStatus": "InService"}) + pvs = [ + sagemaker.production_variant( + "A", "ml.p2.xlarge", serverless_inference_config=SERVERLESS_INFERENCE_CONFIG + ), + sagemaker.production_variant( + "B", "p299.4096xlarge", serverless_inference_config=SERVERLESS_INFERENCE_CONFIG + ), + ] + ex = ClientError( + {"Error": {"Code": "ValidationException", "Message": "Could not find your thing"}}, "b" + ) + ims.sagemaker_client.describe_endpoint_config = Mock(side_effect=ex) + tags = [{"ModelName": "TestModel"}] + sagemaker_session.endpoint_from_production_variants("some-endpoint", pvs, tags) + sagemaker_session.sagemaker_client.create_endpoint.assert_called_with( + EndpointConfigName="some-endpoint", EndpointName="some-endpoint", Tags=tags + ) + sagemaker_session.sagemaker_client.create_endpoint_config.assert_called_with( + EndpointConfigName="some-endpoint", ProductionVariants=pvs, Tags=tags + ) + + +def test_endpoint_from_production_variants_with_async_config(sagemaker_session): + ims = sagemaker_session + ims.sagemaker_client.describe_endpoint = Mock(return_value={"EndpointStatus": "InService"}) + pvs = [ + sagemaker.production_variant("A", "ml.p2.xlarge"), + sagemaker.production_variant("B", "p299.4096xlarge"), + ] + ex = ClientError( + {"Error": {"Code": "ValidationException", "Message": "Could not find your thing"}}, "b" + ) + ims.sagemaker_client.describe_endpoint_config = Mock(side_effect=ex) + sagemaker_session.endpoint_from_production_variants( + "some-endpoint", + pvs, + async_inference_config_dict=AsyncInferenceConfig, + ) + sagemaker_session.sagemaker_client.create_endpoint.assert_called_with( + EndpointConfigName="some-endpoint", EndpointName="some-endpoint", Tags=[] + ) + sagemaker_session.sagemaker_client.create_endpoint_config.assert_called_with( + EndpointConfigName="some-endpoint", + ProductionVariants=pvs, + AsyncInferenceConfig=AsyncInferenceConfig, + ) + + def test_update_endpoint_succeed(sagemaker_session): sagemaker_session.sagemaker_client.describe_endpoint = Mock( return_value={"EndpointStatus": "InService"} diff --git a/tests/unit/test_tuner.py b/tests/unit/test_tuner.py index 2ae028e9ba..d6f1f5a648 100644 --- a/tests/unit/test_tuner.py +++ b/tests/unit/test_tuner.py @@ -30,6 +30,8 @@ create_transfer_learning_tuner, HyperparameterTuner, ) +from sagemaker.workflow.functions import JsonGet, Join +from sagemaker.workflow.parameters import ParameterString, ParameterInteger from .tuner_test_utils import * # noqa: F403 @@ -68,14 +70,24 @@ def tuner(estimator): def test_prepare_for_training(tuner): - static_hyperparameters = {"validated": 1, "another_one": 0} + hp1 = JsonGet(step_name="stepname", property_file="pf", json_path="jp") + hp2 = Join(on="/", values=["1", "2", ParameterString(name="ps", default_value="3")]) + + static_hyperparameters = { + "validated": 1, + "another_one": 0, + "hp1": hp1, + "hp2": hp2, + } + tuner.estimator.set_hyperparameters(**static_hyperparameters) tuner._prepare_for_tuning() assert tuner._current_job_name.startswith(IMAGE_NAME) - - assert len(tuner.static_hyperparameters) == 1 + assert len(tuner.static_hyperparameters) == 3 assert tuner.static_hyperparameters["another_one"] == "0" + assert tuner.static_hyperparameters["hp1"] == hp1 + assert tuner.static_hyperparameters["hp2"] == hp2 def test_prepare_for_tuning_with_amazon_estimator(tuner, sagemaker_session): @@ -1156,6 +1168,20 @@ def test_integer_parameter_ranges(): assert ranges["ScalingType"] == "Auto" +def test_integer_parameter_ranges_with_pipeline_parameter(): + min = ParameterInteger(name="p", default_value=2) + max = JsonGet(step_name="sn", property_file="pf", json_path="jp") + scale = ParameterString(name="scale", default_value="Auto") + int_param = IntegerParameter(min, max) + ranges = int_param.as_tuning_range("some") + + assert len(ranges.keys()) == 4 + assert ranges["Name"] == "some" + assert ranges["MinValue"] == min + assert ranges["MaxValue"] == max + assert ranges["ScalingType"] == scale + + def test_integer_parameter_scaling_type(): int_param = IntegerParameter(2, 3, scaling_type="Linear") int_range = int_param.as_tuning_range("range") diff --git a/tox.ini b/tox.ini index 348c3fae89..50c75cf18c 100644 --- a/tox.ini +++ b/tox.ini @@ -59,6 +59,7 @@ markers = timeout: mark a test as a timeout. [testenv] +pip_version = pip==20.2 passenv = AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY @@ -87,7 +88,7 @@ commands = flake8 skipdist = true skip_install = true deps = - pylint + pylint==2.6.2 astroid==2.4.2 commands = python -m pylint --rcfile=.pylintrc -j 0 src/sagemaker @@ -116,6 +117,8 @@ changedir = doc # pip install requirements.txt is separate as RTD does it in separate steps # having the requirements.txt installed in deps above results in Double Requirement exception # https://github.com/pypa/pip/issues/988 +deps = + pip==20.2 commands = pip install --exists-action=w -r requirements.txt sphinx-build -T -W -b html -d _build/doctrees-readthedocs -D language=en . _build/html