Ejemplo n.º 1
0
    def prepare_container_def(self, instance_type):
        """Return a container definition with framework configuration set in model environment variables.

        This also uploads user-supplied code to S3.

        Args:
            instance_type (str): The EC2 instance type to deploy this Model to. For example, 'ml.p2.xlarge'.

        Returns:
            dict[str, str]: A container definition object usable with the CreateModel API.
        """
        deploy_image = self.image
        if not deploy_image:
            region_name = self.sagemaker_session.boto_region_name
            deploy_image = create_image_uri(region_name, self.__framework_name__, instance_type,
                                            self.framework_version, self.py_version)

        deploy_key_prefix = model_code_key_prefix(self.key_prefix, self.name, deploy_image)
        self._upload_code(deploy_key_prefix)
        deploy_env = dict(self.env)
        deploy_env.update(self._framework_env_vars())

        if self.model_server_workers:
            deploy_env[MODEL_SERVER_WORKERS_PARAM_NAME.upper()] = str(self.model_server_workers)

        return sagemaker.container_def(deploy_image, self.model_data, deploy_env)
Ejemplo n.º 2
0
    def train_image(self):
        """Return the Docker image to use for training.

        The  :meth:`~sagemaker.estimator.EstimatorBase.fit` method, which does the model training,
        calls this method to find the image to use for model training.

        Returns:
            str: The URI of the Docker image.
        """
        if self.image_name:
            return self.image_name
        else:
            return create_image_uri(self.sagemaker_session.boto_region_name, self.__framework_name__,
                                    self.train_instance_type, self.framework_version, py_version=self.py_version)
Ejemplo n.º 3
0
    def train_image(self):
        """Return the Docker image to use for training.

        The  :meth:`~sagemaker.estimator.EstimatorBase.fit` method, which does the model training, calls this method to
        find the image to use for model training.

        Returns:
            str: The URI of the Docker image.
        """
        return create_image_uri(
            self.sagemaker_session.boto_session.region_name,
            self.__framework_name__,
            self.train_instance_type,
            self.framework_version,
            py_version=self.py_version)
Ejemplo n.º 4
0
    def train_image(self):
        """Placeholder docstring"""
        if self.image_name:
            return self.image_name

        if self._script_mode_enabled():
            return fw.create_image_uri(
                self.sagemaker_session.boto_region_name,
                _SCRIPT_MODE,
                self.train_instance_type,
                self.framework_version,
                self.py_version,
            )

        return super(TensorFlow, self).train_image()
Ejemplo n.º 5
0
    def prepare_container_def(self, instance_type, accelerator_type=None):
        """Return a container definition with framework configuration set in
        model environment variables.

        Args:
            instance_type (str): The EC2 instance type to deploy this Model to.
                For example, 'ml.p2.xlarge'.
            accelerator_type (str): The Elastic Inference accelerator type to
                deploy to the instance for loading and making inferences to the
                model. For example, 'ml.eia1.medium'.

        Returns:
            dict[str, str]: A container definition object usable with the
            CreateModel API.
        """
        lowest_mms_version = pkg_resources.parse_version(
            self._LOWEST_MMS_VERSION)
        framework_version = pkg_resources.parse_version(self.framework_version)
        is_mms_version = framework_version >= lowest_mms_version

        deploy_image = self.image
        if not deploy_image:
            region_name = self.sagemaker_session.boto_session.region_name

            framework_name = self.__framework_name__
            if is_mms_version:
                framework_name += "-serving"

            deploy_image = create_image_uri(
                region_name,
                framework_name,
                instance_type,
                self.framework_version,
                self.py_version,
                accelerator_type=accelerator_type,
            )
        deploy_key_prefix = model_code_key_prefix(self.key_prefix, self.name,
                                                  deploy_image)
        self._upload_code(deploy_key_prefix, repack=is_mms_version)
        deploy_env = dict(self.env)
        deploy_env.update(self._framework_env_vars())

        if self.model_server_workers:
            deploy_env[MODEL_SERVER_WORKERS_PARAM_NAME.upper()] = str(
                self.model_server_workers)
        return sagemaker.container_def(
            deploy_image, self.repacked_model_data or self.model_data,
            deploy_env)
Ejemplo n.º 6
0
 def _inferentia_image(self, region, target_instance_type, framework, framework_version):
     """
             Args:
                 region:
                 target_instance_type:
                 framework:
                 framework_version:
             """
     return fw_utils.create_image_uri(
         region,
         "neo-" + framework.lower(),
         target_instance_type.replace("_", "."),
         framework_version,
         py_version="py3",
         account=self._neo_image_account(region),
     )
Ejemplo n.º 7
0
def test_tf(sagemaker_session, tf_version):
    tf = TensorFlow(
        entry_point=SCRIPT_FILE,
        role=ROLE,
        sagemaker_session=sagemaker_session,
        training_steps=1000,
        evaluation_steps=10,
        train_instance_count=INSTANCE_COUNT,
        train_instance_type=INSTANCE_TYPE,
        framework_version=tf_version,
        requirements_file=REQUIREMENTS_FILE,
        source_dir=DATA_DIR,
    )

    inputs = "s3://mybucket/train"

    tf.fit(inputs=inputs, experiment_config=EXPERIMENT_CONFIG)

    call_names = [c[0] for c in sagemaker_session.method_calls]
    assert call_names == ["train", "logs_for_job"]

    expected_train_args = _create_train_job(tf_version)
    expected_train_args["input_config"][0]["DataSource"]["S3DataSource"]["S3Uri"] = inputs
    expected_train_args["experiment_config"] = EXPERIMENT_CONFIG

    actual_train_args = sagemaker_session.method_calls[0][2]
    assert actual_train_args == expected_train_args

    model = tf.create_model()

    environment = {
        "Environment": {
            "SAGEMAKER_SUBMIT_DIRECTORY": "s3://mybucket/sagemaker-tensorflow-2017-11-06-14:14:15.673/source/sourcedir.tar.gz",  # noqa: E501
            "SAGEMAKER_PROGRAM": "dummy_script.py",
            "SAGEMAKER_REQUIREMENTS": "dummy_requirements.txt",
            "SAGEMAKER_ENABLE_CLOUDWATCH_METRICS": "false",
            "SAGEMAKER_REGION": "us-west-2",
            "SAGEMAKER_CONTAINER_LOG_LEVEL": "20",
        },
        "Image": create_image_uri("us-west-2", "tensorflow", INSTANCE_TYPE, tf_version, "py2"),
        "ModelDataUrl": "s3://m/m.tar.gz",
    }
    assert environment == model.prepare_container_def(INSTANCE_TYPE)

    assert "cpu" in model.prepare_container_def(INSTANCE_TYPE)["Image"]
    predictor = tf.deploy(1, INSTANCE_TYPE)
    assert isinstance(predictor, TensorFlowPredictor)
Ejemplo n.º 8
0
def test_create_dlc_image_uri():
    image_uri = fw_utils.create_image_uri(
        "us-west-2", "tensorflow-scriptmode", "ml.p3.2xlarge", "1.14", "py3"
    )
    assert (
        image_uri
        == "{}.dkr.ecr.us-west-2.amazonaws.com/tensorflow-training:1.14-gpu-py3".format(
            fw_utils.ASIMOV_DEFAULT_ACCOUNT
        )
    )

    image_uri = fw_utils.create_image_uri(
        "us-west-2", "tensorflow-scriptmode", "ml.p3.2xlarge", "1.13.1", "py3"
    )
    assert (
        image_uri
        == "{}.dkr.ecr.us-west-2.amazonaws.com/tensorflow-training:1.13.1-gpu-py3".format(
            fw_utils.ASIMOV_DEFAULT_ACCOUNT
        )
    )

    image_uri = fw_utils.create_image_uri(
        "us-west-2", "tensorflow-serving", "ml.c4.2xlarge", "1.13.1"
    )
    assert image_uri == "{}.dkr.ecr.us-west-2.amazonaws.com/tensorflow-inference:1.13.1-cpu".format(
        fw_utils.ASIMOV_DEFAULT_ACCOUNT
    )

    image_uri = fw_utils.create_image_uri("us-west-2", "mxnet", "ml.p3.2xlarge", "1.4.1", "py3")
    assert image_uri == "{}.dkr.ecr.us-west-2.amazonaws.com/mxnet-training:1.4.1-gpu-py3".format(
        fw_utils.ASIMOV_DEFAULT_ACCOUNT
    )

    image_uri = fw_utils.create_image_uri(
        "us-west-2", "mxnet-serving", "ml.c4.2xlarge", "1.4.1", "py3"
    )
    assert image_uri == "{}.dkr.ecr.us-west-2.amazonaws.com/mxnet-inference:1.4.1-cpu-py3".format(
        fw_utils.ASIMOV_DEFAULT_ACCOUNT
    )

    image_uri = fw_utils.create_image_uri(
        "us-west-2",
        "mxnet-serving",
        "ml.c4.2xlarge",
        "1.4.1",
        "py3",
        accelerator_type="ml.eia1.medium",
    )
    assert (
        image_uri
        == "{}.dkr.ecr.us-west-2.amazonaws.com/mxnet-inference-eia:1.4.1-cpu-py3".format(
            fw_utils.ASIMOV_PROD_ACCOUNT
        )
    )
Ejemplo n.º 9
0
    def _get_image_uri(self, instance_type, accelerator_type=None):
        """
        Args:
            instance_type:
            accelerator_type:
        """
        if self.image:
            return self.image

        region_name = self.sagemaker_session.boto_region_name
        return create_image_uri(
            region_name,
            Model.FRAMEWORK_NAME,
            instance_type,
            self._framework_version,
            accelerator_type=accelerator_type,
        )
Ejemplo n.º 10
0
    def train_image(self):
        """Return the Docker image to use for training.

        The :meth:`~sagemaker.estimator.EstimatorBase.fit` method, which does the model training,
        calls this method to find the image to use for model training.

        Returns:
            str: The URI of the Docker image.
        """
        if self.image_name:
            return self.image_name
        else:
            return fw_utils.create_image_uri(
                self.sagemaker_session.boto_region_name,
                self._image_framework(),
                self.train_instance_type,
                self._image_version(),
                py_version=PYTHON_VERSION)
Ejemplo n.º 11
0
    def serving_image_uri(self, region_name, instance_type):
        """Create a URI for the serving image.

        Args:
            region_name (str): AWS region where the image is uploaded.
            instance_type (str): SageMaker instance type. Used to determine device type
                (cpu/gpu/family-specific optimized).

        Returns:
            str: The appropriate image URI based on the given parameters.

        """
        return fw_utils.create_image_uri(
            region_name,
            self.__framework_name__,
            instance_type,
            self.framework_version,
            self.py_version,
        )
Ejemplo n.º 12
0
    def prepare_container_def(self, instance_type, accelerator_type=None):
        """Return a container definition with framework configuration set in
        model environment variables.

        This also uploads user-supplied code to S3.

        Args:
            instance_type (str): The EC2 instance type to deploy this Model to.
                For example, 'ml.p2.xlarge'.
            accelerator_type (str): The Elastic Inference accelerator type to
                deploy to the instance for loading and making inferences to the
                model. For example, 'ml.eia1.medium'.

        Returns:
            dict[str, str]: A container definition object usable with the
            CreateModel API.
        """
        deploy_image = self.image
        if not deploy_image:
            region_name = self.sagemaker_session.boto_region_name
            deploy_image = create_image_uri(
                region_name,
                self.__framework_name__,
                instance_type,
                self.framework_version,
                self.py_version,
                accelerator_type=accelerator_type,
            )

        deploy_key_prefix = model_code_key_prefix(self.key_prefix, self.name,
                                                  deploy_image)
        self._upload_code(deploy_key_prefix)
        deploy_env = dict(self.env)
        deploy_env.update(self._framework_env_vars())

        if self.model_server_workers:
            deploy_env[MODEL_SERVER_WORKERS_PARAM_NAME.upper()] = str(
                self.model_server_workers)

        return sagemaker.container_def(deploy_image, self.model_data,
                                       deploy_env)
Ejemplo n.º 13
0
    def prepare_container_def(self, instance_type):
        """Return a container definition with framework configuration set in model environment variables.

        Args:
            instance_type (str): The EC2 instance type to deploy this Model to. For example, 'ml.p2.xlarge'.

        Returns:
            dict[str, str]: A container definition object usable with the CreateModel API.
        """
        deploy_image = self.image
        if not deploy_image:
            region_name = self.sagemaker_session.boto_session.region_name
            deploy_image = create_image_uri(region_name, self.__framework_name__, instance_type,
                                            self.framework_version, self.py_version)
        deploy_key_prefix = self.key_prefix or self.name or name_from_image(deploy_image)
        self._upload_code(deploy_key_prefix)
        deploy_env = dict(self.env)
        deploy_env.update(self._framework_env_vars())

        if self.model_server_workers:
            deploy_env[MODEL_SERVER_WORKERS_PARAM_NAME.upper()] = str(self.model_server_workers)
        return sagemaker.container_def(deploy_image, self.model_data, deploy_env)
def test_tf(m_tar, e_tar, time, strftime, sagemaker_session, tf_version):
    tf = TensorFlow(entry_point=SCRIPT_FILE, role=ROLE, sagemaker_session=sagemaker_session, training_steps=1000,
                    evaluation_steps=10, train_instance_count=INSTANCE_COUNT, train_instance_type=INSTANCE_TYPE,
                    framework_version=tf_version, requirements_file=REQUIREMENTS_FILE, source_dir=DATA_DIR)

    inputs = 's3://mybucket/train'
    s3_prefix = 's3://{}/{}/source/sourcedir.tar.gz'.format(BUCKET_NAME, JOB_NAME)
    e_tar.return_value = UploadedCode(s3_prefix=s3_prefix, script_name=SCRIPT_FILE)
    s3_prefix = 's3://{}/{}/sourcedir.tar.gz'.format(BUCKET_NAME, JOB_NAME)
    m_tar.return_value = UploadedCode(s3_prefix=s3_prefix, script_name=SCRIPT_FILE)
    tf.fit(inputs=inputs)

    call_names = [c[0] for c in sagemaker_session.method_calls]
    assert call_names == ['train', 'logs_for_job']

    expected_train_args = _create_train_job(tf_version)
    expected_train_args['input_config'][0]['DataSource']['S3DataSource']['S3Uri'] = inputs

    actual_train_args = sagemaker_session.method_calls[0][2]
    assert actual_train_args == expected_train_args

    model = tf.create_model()

    environment = {
        'Environment': {
            'SAGEMAKER_SUBMIT_DIRECTORY': 's3://{}/{}/sourcedir.tar.gz'.format(BUCKET_NAME, JOB_NAME),
            'SAGEMAKER_PROGRAM': 'dummy_script.py', 'SAGEMAKER_REQUIREMENTS': 'dummy_requirements.txt',
            'SAGEMAKER_ENABLE_CLOUDWATCH_METRICS': 'false', 'SAGEMAKER_REGION': 'us-west-2',
            'SAGEMAKER_CONTAINER_LOG_LEVEL': '20'
        },
        'Image': create_image_uri('us-west-2', "tensorflow", INSTANCE_TYPE, tf_version, "py2"),
        'ModelDataUrl': 's3://m/m.tar.gz'
    }
    assert environment == model.prepare_container_def(INSTANCE_TYPE)

    assert 'cpu' in model.prepare_container_def(INSTANCE_TYPE)['Image']
    predictor = tf.deploy(1, INSTANCE_TYPE)
    assert isinstance(predictor, TensorFlowPredictor)
Ejemplo n.º 15
0
def test_tf(m_tar, e_tar, time, strftime, sagemaker_session, tf_version):
    tf = TensorFlow(entry_point=SCRIPT_FILE, role=ROLE, sagemaker_session=sagemaker_session, training_steps=1000,
                    evaluation_steps=10, train_instance_count=INSTANCE_COUNT, train_instance_type=INSTANCE_TYPE,
                    framework_version=tf_version, requirements_file=REQUIREMENTS_FILE, source_dir=DATA_DIR)

    inputs = 's3://mybucket/train'
    s3_prefix = 's3://{}/{}/source/sourcedir.tar.gz'.format(BUCKET_NAME, JOB_NAME)
    e_tar.return_value = UploadedCode(s3_prefix=s3_prefix, script_name=SCRIPT_FILE)
    s3_prefix = 's3://{}/{}/sourcedir.tar.gz'.format(BUCKET_NAME, JOB_NAME)
    m_tar.return_value = UploadedCode(s3_prefix=s3_prefix, script_name=SCRIPT_FILE)
    tf.fit(inputs=inputs)

    call_names = [c[0] for c in sagemaker_session.method_calls]
    assert call_names == ['train', 'logs_for_job']

    expected_train_args = _create_train_job(tf_version)
    expected_train_args['input_config'][0]['DataSource']['S3DataSource']['S3Uri'] = inputs

    actual_train_args = sagemaker_session.method_calls[0][2]
    assert actual_train_args == expected_train_args

    model = tf.create_model()

    environment = {
        'Environment': {
            'SAGEMAKER_SUBMIT_DIRECTORY': 's3://{}/{}/sourcedir.tar.gz'.format(BUCKET_NAME, JOB_NAME),
            'SAGEMAKER_PROGRAM': 'dummy_script.py', 'SAGEMAKER_REQUIREMENTS': 'dummy_requirements.txt',
            'SAGEMAKER_ENABLE_CLOUDWATCH_METRICS': 'false', 'SAGEMAKER_REGION': 'us-west-2',
            'SAGEMAKER_CONTAINER_LOG_LEVEL': '20'
        },
        'Image': create_image_uri('us-west-2', "tensorflow", INSTANCE_TYPE, tf_version, "py2"),
        'ModelDataUrl': 's3://m/m.tar.gz'
    }
    assert environment == model.prepare_container_def(INSTANCE_TYPE)

    assert 'cpu' in model.prepare_container_def(INSTANCE_TYPE)['Image']
    predictor = tf.deploy(1, INSTANCE_TYPE)
    assert isinstance(predictor, TensorFlowPredictor)
Ejemplo n.º 16
0
    def serving_image_uri(self, region_name, instance_type, accelerator_type=None):
        """Create a URI for the serving image.

        Args:
            region_name (str): AWS region where the image is uploaded.
            instance_type (str): SageMaker instance type. Used to determine device type
                (cpu/gpu/family-specific optimized).
            accelerator_type (str): The Elastic Inference accelerator type to
                deploy to the instance for loading and making inferences to the
                model (default: None). For example, 'ml.eia1.medium'.

        Returns:
            str: The appropriate image URI based on the given parameters.

        """
        return create_image_uri(
            region_name,
            self.__framework_name__,
            instance_type,
            self.framework_version,
            self.py_version,
            accelerator_type=accelerator_type,
        )
Ejemplo n.º 17
0
def test_create_image_uri_merged():
    image_uri = fw_utils.create_image_uri("us-west-2", "tensorflow-scriptmode",
                                          "ml.p3.2xlarge", "1.14", "py3")
    assert (
        image_uri ==
        "763104351884.dkr.ecr.us-west-2.amazonaws.com/tensorflow-training:1.14-gpu-py3"
    )

    image_uri = fw_utils.create_image_uri("us-west-2", "tensorflow-scriptmode",
                                          "ml.p3.2xlarge", "1.13.1", "py3")
    assert (
        image_uri ==
        "763104351884.dkr.ecr.us-west-2.amazonaws.com/tensorflow-training:1.13.1-gpu-py3"
    )

    image_uri = fw_utils.create_image_uri("us-west-2", "tensorflow-serving",
                                          "ml.c4.2xlarge", "1.13.1")
    assert (
        image_uri ==
        "763104351884.dkr.ecr.us-west-2.amazonaws.com/tensorflow-inference:1.13.1-cpu"
    )

    image_uri = fw_utils.create_image_uri("us-west-2", "mxnet",
                                          "ml.p3.2xlarge", "1.4.1", "py3")
    assert image_uri == "763104351884.dkr.ecr.us-west-2.amazonaws.com/mxnet-training:1.4.1-gpu-py3"

    image_uri = fw_utils.create_image_uri("us-west-2", "mxnet-serving",
                                          "ml.c4.2xlarge", "1.4.1", "py3")
    assert (
        image_uri ==
        "520713654638.dkr.ecr.us-west-2.amazonaws.com/sagemaker-mxnet-serving:1.4.1-cpu-py3"
    )

    image_uri = fw_utils.create_image_uri(
        "us-west-2",
        "mxnet-serving",
        "ml.c4.2xlarge",
        "1.4.1",
        "py3",
        accelerator_type="ml.eia1.medium",
    )
    assert (
        image_uri ==
        "763104351884.dkr.ecr.us-west-2.amazonaws.com/mxnet-inference-eia:1.4.1-cpu-py3"
    )
Ejemplo n.º 18
0
def test_create_dlc_image_uri_iso_east_1():
    image_uri = fw_utils.create_image_uri("us-iso-east-1",
                                          "tensorflow-scriptmode",
                                          "ml.m4.xlarge", "1.13.1", "py3")
    assert (
        image_uri ==
        "886529160074.dkr.ecr.us-iso-east-1.c2s.ic.gov/tensorflow-training:1.13.1-cpu-py3"
    )

    image_uri = fw_utils.create_image_uri("us-iso-east-1",
                                          "tensorflow-scriptmode",
                                          "ml.p3.2xlarge", "1.14", "py2")
    assert (
        image_uri ==
        "886529160074.dkr.ecr.us-iso-east-1.c2s.ic.gov/tensorflow-training:1.14-gpu-py2"
    )

    image_uri = fw_utils.create_image_uri("us-iso-east-1",
                                          "tensorflow-serving", "ml.m4.xlarge",
                                          "1.13.0")
    assert (
        image_uri ==
        "886529160074.dkr.ecr.us-iso-east-1.c2s.ic.gov/tensorflow-inference:1.13.0-cpu"
    )

    image_uri = fw_utils.create_image_uri("us-iso-east-1", "mxnet",
                                          "ml.p3.2xlarge", "1.4.1", "py3")
    assert image_uri == "886529160074.dkr.ecr.us-iso-east-1.c2s.ic.gov/mxnet-training:1.4.1-gpu-py3"

    image_uri = fw_utils.create_image_uri("us-iso-east-1", "mxnet-serving",
                                          "ml.c4.2xlarge", "1.4.1", "py3")
    assert (
        image_uri ==
        "886529160074.dkr.ecr.us-iso-east-1.c2s.ic.gov/mxnet-inference:1.4.1-cpu-py3"
    )

    image_uri = fw_utils.create_image_uri("us-iso-east-1", "mxnet-serving",
                                          "ml.c4.2xlarge", "1.3.1", "py3")
    assert (
        image_uri ==
        "744548109606.dkr.ecr.us-iso-east-1.c2s.ic.gov/sagemaker-mxnet-serving:1.3.1-cpu-py3"
    )
Ejemplo n.º 19
0
def test_create_image_uri_ei():
    image_uri = fw_utils.create_image_uri(MOCK_REGION, 'tensorflow-serving', 'ml.c4.large', '1.1.0',
                                          accelerator_type='ml.eia1.large', account='23')
    assert image_uri == '23.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-tensorflow-serving-eia:1.1.0-cpu'
Ejemplo n.º 20
0
def test_invalid_instance_type():
    # instance type is missing 'ml.' prefix
    with pytest.raises(ValueError):
        create_image_uri('mars-south-3', 'mlfw', 'p3.2xlarge', '1.0.0', 'py3')
Ejemplo n.º 21
0
def test_create_image_uri_default_account():
    image_uri = create_image_uri('mars-south-3', 'mlfw', 'ml.p3.2xlarge',
                                 '1.0rc', 'py3')
    assert image_uri == '520713654638.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0rc-gpu-py3'
def test_create_image_uri_no_python():
    image_uri = fw_utils.create_image_uri(
        MOCK_REGION, MOCK_FRAMEWORK, "ml.c4.large", "1.0rc", account="23"
    )
    assert image_uri == "23.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0rc-cpu"
def test_create_image_uri_hkg_override_account():
    image_uri = fw_utils.create_image_uri(
        MOCK_HKG_REGION, MOCK_FRAMEWORK, "ml.p3.2xlarge", "1.0rc", "py3", account="fake"
    )
    assert {image_uri == "fake.dkr.ecr.ap-east-1.amazonaws.com/sagemaker-mlfw:1.0rc-gpu-py3"}
def test_create_image_uri_gov_cloud_override_account():
    image_uri = fw_utils.create_image_uri(
        "us-gov-west-1", MOCK_FRAMEWORK, "ml.p3.2xlarge", "1.0rc", "py3", account="fake"
    )
    assert image_uri == "fake.dkr.ecr.us-gov-west-1.amazonaws.com/sagemaker-mlfw:1.0rc-gpu-py3"
Ejemplo n.º 25
0
def test_unoptimized_gpu_family():
    image_uri = create_image_uri('mars-south-3', 'mlfw', 'ml.p2.xlarge', '1.0.0', 'py3',
                                 optimized_families=['c5', 'p3'])
    assert image_uri == '520713654638.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0.0-gpu-py3'
def test_create_image_uri_py37_invalid_framework():
    error_message = "{} does not support Python 3.7 at this time.".format(MOCK_FRAMEWORK)

    with pytest.raises(ValueError) as error:
        fw_utils.create_image_uri(REGION, MOCK_FRAMEWORK, "ml.m4.xlarge", "1.4.0", "py37")
    assert error_message in str(error)
Ejemplo n.º 27
0
def test_create_image_uri_gpu():
    image_uri = fw_utils.create_image_uri(MOCK_REGION, MOCK_FRAMEWORK, 'ml.p3.2xlarge', '1.0rc', 'py3', '23')
    assert image_uri == '23.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0rc-gpu-py3'

    image_uri = fw_utils.create_image_uri(MOCK_REGION, MOCK_FRAMEWORK, 'local_gpu', '1.0rc', 'py3', '23')
    assert image_uri == '23.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0rc-gpu-py3'
Ejemplo n.º 28
0
def test_create_image_uri_gpu():
    image_uri = create_image_uri('mars-south-3', 'mlfw', 'ml.p3.2xlarge', '1.0rc', 'py3', '23')
    assert image_uri == '23.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0rc-gpu-py3'

    image_uri = create_image_uri('mars-south-3', 'mlfw', 'local_gpu', '1.0rc', 'py3', '23')
    assert image_uri == '23.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0rc-gpu-py3'
Ejemplo n.º 29
0
def test_create_image_uri_gov_cloud():
    image_uri = fw_utils.create_image_uri('us-gov-west-1', MOCK_FRAMEWORK,
                                          'ml.p3.2xlarge', '1.0rc', 'py3')
    assert image_uri == '246785580436.dkr.ecr.us-gov-west-1.amazonaws.com/sagemaker-mlfw:1.0rc-gpu-py3'
Ejemplo n.º 30
0
def test_create_image_uri_default_account():
    image_uri = create_image_uri('mars-south-3', 'mlfw', 'ml.p3.2xlarge', '1.0rc', 'py3')
    assert image_uri == '520713654638.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0rc-gpu-py3'
Ejemplo n.º 31
0
def test_create_image_uri_cpu():
    image_uri = create_image_uri('mars-south-3', 'mlfw', 'ml.c4.large', '1.0rc', 'py2', '23')
    assert image_uri == '23.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0rc-cpu-py2'

    image_uri = create_image_uri('mars-south-3', 'mlfw', 'local', '1.0rc', 'py2', '23')
    assert image_uri == '23.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0rc-cpu-py2'
Ejemplo n.º 32
0
def test_invalid_instance_type():
    # instance type is missing 'ml.' prefix
    with pytest.raises(ValueError):
        create_image_uri('mars-south-3', 'mlfw', 'p3.2xlarge', '1.0.0', 'py3')
Ejemplo n.º 33
0
def test_create_image_uri_default_account():
    image_uri = fw_utils.create_image_uri(MOCK_REGION, MOCK_FRAMEWORK,
                                          'ml.p3.2xlarge', '1.0rc', 'py3')
    assert image_uri == '520713654638.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0rc-gpu-py3'
Ejemplo n.º 34
0
def test_create_image_uri_accelerator():
    image_uri = fw_utils.create_image_uri(MOCK_REGION, 'tensorflow', 'ml.p3.2xlarge', '1.0rc', 'py3',
                                          accelerator_type='ml.eia1.medium')
    assert image_uri == '520713654638.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-tensorflow-eia:1.0rc-gpu-py3'
Ejemplo n.º 35
0
def test_create_image_uri_bad_python():
    with pytest.raises(ValueError):
        fw_utils.create_image_uri(MOCK_REGION, MOCK_FRAMEWORK, "ml.c4.large",
                                  "1.0rc", "py0")
Ejemplo n.º 36
0
def test_create_image_uri_local_sagemaker_notebook_accelerator():
    image_uri = fw_utils.create_image_uri(MOCK_REGION, 'mxnet', 'ml.p3.2xlarge', '1.0rc', 'py3',
                                          accelerator_type='local_sagemaker_notebook')
    assert image_uri == '520713654638.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mxnet-eia:1.0rc-gpu-py3'
Ejemplo n.º 37
0
def test_invalid_instance_type():
    # instance type is missing 'ml.' prefix
    with pytest.raises(ValueError):
        fw_utils.create_image_uri(MOCK_REGION, MOCK_FRAMEWORK, "p3.2xlarge",
                                  "1.0.0", "py3")
Ejemplo n.º 38
0
def test_unoptimized_gpu_family():
    image_uri = fw_utils.create_image_uri(MOCK_REGION, MOCK_FRAMEWORK, 'ml.p2.xlarge', '1.0.0', 'py3',
                                          optimized_families=['c5', 'p3'])
    assert image_uri == '520713654638.dkr.ecr.mars-south-3.amazonaws.com/sagemaker-mlfw:1.0.0-gpu-py3'