def deploy(self, image_name, s3_model_location, train_instance_count, train_instance_type, tags=None, endpoint_name=None): """ Deploy model to SageMaker :param image_name: [str], name of Docker image :param s3_model_location: [str], model location in S3 :param train_instance_count: [str], number of ec2 instances :param train_instance_type: [str], ec2 instance type :param tags: [optional[list[dict]], default: None], List of tags for labeling a training job. For more, see https://docs.aws.amazon.com/sagemaker/latest/dg/API_Tag.html. Example: [ { 'Key': 'key_name_1', 'Value': key_value_1, }, { 'Key': 'key_name_2', 'Value': key_value_2, }, ... ] :param endpoint_name: [optional[str]], Optional name for the SageMaker endpoint :return: [str], endpoint name """ image = self._construct_image_location(image_name) model = sage.Model(model_data=s3_model_location, image_uri=image, role=self.role, sagemaker_session=self.sagemaker_session) try: model.deploy(initial_instance_count=train_instance_count, instance_type=train_instance_type, tags=tags, endpoint_name=endpoint_name) return model.endpoint_name except botocore.exceptions.ClientError: # ValueError raised if there is no endpoint already predictor = sage.Predictor( endpoint_name=endpoint_name, sagemaker_session=self.sagemaker_session) predictor.update_endpoint( initial_instance_count=train_instance_count, instance_type=train_instance_type, tags=tags, model_name=model.name) return predictor.endpoint_name
def create_endpoint_from_a_model_in_s3(self, sess, role, model_path_in_s3): """ :param sess: :param role: :param model_path_in_s3: :return: """ sm_model = sagemaker.Model(model_data=model_path_in_s3, image=self.training_image, role=role, sagemaker_session=sess) sm_model.deploy(initial_instance_count=1, instance_type=settings.TRAINING_AWS_INSTANCE)
def batch_transform(self, image_name, s3_model_location, s3_input_location, s3_output_location, transform_instance_count, transform_instance_type, tags=None): """ Execute batch transform on a trained model to SageMaker :param image_name: [str], name of Docker image :param s3_model_location: [str], model location in S3 :param s3_input_location: [str], S3 input data location :param s3_output_location: [str], S3 output data location :param transform_instance_count: [str], number of ec2 instances :param transform_instance_type: [str], ec2 instance type :param tags: [optional[list[dict]], default: None], List of tags for labeling a training job. For more, see https://docs.aws.amazon.com/sagemaker/latest/dg/API_Tag.html. Example: [ { 'Key': 'key_name_1', 'Value': key_value_1, }, { 'Key': 'key_name_2', 'Value': key_value_2, }, ... ] :return: [str], endpoint name """ image = self._construct_image_location(image_name) model = sage.Model(model_data=s3_model_location, image=image, role=self.role, sagemaker_session=self.sagemaker_session) content_type = "application/json" transformer = model.transformer( instance_type=transform_instance_type, instance_count=transform_instance_count, assemble_with='Line', output_path=s3_output_location, tags=tags, accept=content_type, strategy="SingleRecord") transformer.transform(data=s3_input_location, split_type='Line', content_type=content_type)
def deploy_on_sagemaker(image_name, aws_profile, s3_model_location): boto_session = boto3.Session(profile_name=aws_profile) session = sagemaker.Session(boto_session=boto_session) image = _construct_image_location(boto_session, image_name) role = get_execution_role(session) model = sagemaker.Model(model_data=s3_model_location, image=image, role=role, sagemaker_session=session) model.deploy(initial_instance_count=1, instance_type='ml.t2.medium')
def _create_model(s3_model_location, image, role, sagemaker_session, instance_type): model_a = sagemaker.Model(model_data=s3_model_location, image=image, role=role, sagemaker_session=sagemaker_session) container_definition = model_a.prepare_container_def(instance_type) return sagemaker_session.create_model(name=utils.name_from_image( container_definition['Image']), role=role, container_defs=container_definition)
def deploy_on_sagemaker(s3_model_location): boto_session = boto3.Session(profile_name="packt-sagemaker") session = sagemaker.Session(boto_session=boto_session) image_name = 'hotel-recommender' image = construct_image_location(boto_session, image_name) role = get_execution_role(session) model = sagemaker.Model(model_data=s3_model_location, image=image, role=role, sagemaker_session=session) model.deploy(initial_instance_count=1, instance_type='ml.t2.medium')
def deploy( self, image_name, s3_model_location, train_instance_count, train_instance_type, tags=None ): """ Deploy model to SageMaker :param image_name: [str], name of Docker image :param s3_model_location: [str], model location in S3 :param train_instance_count: [str], number of ec2 instances :param train_instance_type: [str], ec2 instance type :param tags: [optional[list[dict]], default: None], List of tags for labeling a training job. For more, see https://docs.aws.amazon.com/sagemaker/latest/dg/API_Tag.html. Example: [ { 'Key': 'key_name_1', 'Value': key_value_1, }, { 'Key': 'key_name_2', 'Value': key_value_2, }, ... ] :return: [str], endpoint name """ image = self._construct_image_location(image_name) model = sage.Model( model_data=s3_model_location, image=image, role=self.role, sagemaker_session=self.sagemaker_session ) model.deploy( initial_instance_count=train_instance_count, instance_type=train_instance_type, tags=tags ) return model.endpoint_name
def deploy(self, image_name, s3_model_location, train_instance_count, train_instance_type): """ Deploy model to SageMaker :param image_name: [str], name of Docker image :param s3_model_location: [str], model location in S3 :param train_instance_count: [str], number of ec2 instances :param train_instance_type: [str], ec2 instance type :return: [str], endpoint name """ image = self._construct_image_location(image_name) model = sage.Model(model_data=s3_model_location, image=image, role=self.role, sagemaker_session=self.sagemaker_session) model.deploy(train_instance_count, train_instance_type) return model.endpoint_name
def model_create(job, model_artifact, name, session: sagemaker.Session, inference_image, inference_image_path, inference_image_accounts, role, force, multimodel=False, accelerator_type=None): job = cli_argument(job, session=session) name = cli_argument(name, session=session) model_artifact = cli_argument(model_artifact, session=session) image_config = Image(tag=inference_image, path=inference_image_path, accounts=inference_image_accounts) image_uri = ecr_ensure_image(image=image_config, session=session.boto_session) if (job and model_artifact) or (not (job or model_artifact)): raise click.UsageError('Specify one of job_name or model_artifact') if model_artifact and not name: raise click.UsageError('name is required if job is not provided') iam = session.boto_session.client('iam') client = session.boto_session.client('sagemaker') role = ensure_inference_role(iam=iam, role_name=role) if job: client = session.boto_session.client('sagemaker') model_artifact = training_describe( job_name=job, field='ModelArtifacts.S3ModelArtifacts', session=session) if not name: name = job print("Creating model [{}] from job [{}] artifact [{}]".format( name, job, model_artifact)) else: if not model_artifact.startswith('s3://'): if model_artifact.startswith('/'): model_artifact = model_artifact[1:] bucket = session.default_bucket() model_artifact = 's3://{}/{}'.format(bucket, model_artifact) print("Creating model [{}] from artifact [{}]".format( name, model_artifact)) if model_exists(name=name, client=client): if force: print("Deleting existing model") model_delete(name=name, client=client) else: raise click.UsageError('Specify force if overwriting model') model = sagemaker.Model( image_uri=image_uri, model_data=model_artifact, role=role, predictor_cls=None, env=None, name=name, # vpc_config=None, sagemaker_session=session, # enable_network_isolation=False, # model_kms_key=None ) container_def = sagemaker.container_def( model.image_uri, model.model_data, model.env, container_mode='MultiModel' if multimodel else 'SingleModel') """ client.create_model( ModelName='string', PrimaryContainer={ 'ContainerHostname': 'string', 'Image': 'string', 'ImageConfig': { 'RepositoryAccessMode': 'Platform'|'Vpc' }, 'Mode': 'SingleModel'|'MultiModel', 'ModelDataUrl': 'string', 'Environment': { 'string': 'string' }, 'ModelPackageName': 'string' }, """ # self._ensure_base_name_if_needed(container_def["Image"]) # self._set_model_name_if_needed() enable_network_isolation = model.enable_network_isolation() # self._init_sagemaker_session_if_does_not_exist(instance_type) session.create_model( model.name, model.role, container_def, vpc_config=model.vpc_config, enable_network_isolation=enable_network_isolation, # tags=tags, )
def batch_transform(self, image_name, s3_model_location, s3_input_location, s3_output_location, transform_instance_count, transform_instance_type, tags=None, wait=False, job_name=None): """ Execute batch transform on a trained model to SageMaker :param image_name: [str], name of Docker image :param s3_model_location: [str], model location in S3 :param s3_input_location: [str], S3 input data location :param s3_output_location: [str], S3 output data location :param transform_instance_count: [str], number of ec2 instances :param transform_instance_type: [str], ec2 instance type :param tags: [optional[list[dict]], default: None], List of tags for labeling a training job. For more, see https://docs.aws.amazon.com/sagemaker/latest/dg/API_Tag.html. Example: [ { 'Key': 'key_name_1', 'Value': key_value_1, }, { 'Key': 'key_name_2', 'Value': key_value_2, }, ... ] :param wait: [bool, default=False], wait or not for the batch transform to finish :param job_name: [str, default=None], name for the SageMaker batch transform job :return: [str], transform job status if wait=True. Valid values: 'InProgress'|'Completed'|'Failed'|'Stopping'|'Stopped' """ image = self._construct_image_location(image_name) model = sage.Model(model_data=s3_model_location, image=image, role=self.role, sagemaker_session=self.sagemaker_session) content_type = "application/json" transformer = model.transformer( instance_type=transform_instance_type, instance_count=transform_instance_count, assemble_with='Line', output_path=s3_output_location, tags=tags, accept=content_type, strategy="SingleRecord") transformer.transform(data=s3_input_location, split_type='Line', content_type=content_type, job_name=job_name) if wait: try: transformer.wait() except Exception: # If there is an error, wait() throws an exception and we're not able to return a Failed status pass finally: job_name = transformer.latest_transform_job.job_name job_description = self.sagemaker_client.describe_transform_job( TransformJobName=job_name) return job_description['TransformJobStatus']