Exemplo n.º 1
0
def deploy_model(ws, aci_service_name, experiment_name, asset_name,
                 asset_label, run_id, cpu_cores, memory_gb, entry_script):
    env = create_env_from_requirements(endpoint=True)
    inference_config = InferenceConfig(source_directory=os.getcwd(),
                                       entry_script=entry_script,
                                       environment=env)

    deployment_config = AciWebservice.deploy_configuration(cpu_cores=cpu_cores,
                                                           memory_gb=memory_gb)

    # model name
    model_name = get_model_register_name(run_id)
    try:
        model = Model(ws, name=model_name)
    except:
        # creating directory for download Model files for Model register
        tmp_path = create_tempdir(name='download_tmp')
        register_path = create_directory(AML_MLAPP_FOLDER, path=tmp_path)

        # getting RUN context
        experiment = Experiment(workspace=ws, name=experiment_name)
        tags = {"run_id": run_id, "asset_name": asset_name}
        if asset_label is not None:
            tags["asset_label"] = asset_label

        selected_run_id = None
        for run in Run.list(experiment,
                            tags=tags,
                            include_children=True,
                            status='Completed'):
            run_metrics = run.get_metrics()
            exp_saved_run_id = run_metrics.get("run_id")
            if exp_saved_run_id == run_id:
                selected_run_id = run.id
                break
        if selected_run_id is None:
            raise Exception(
                'ERROR: there is no matching Run object that associated with the run id %s in this experiment.'
                % str(run_id))
        current_run = Run(experiment=experiment, run_id=selected_run_id)

        # download files from run object
        current_run.download_files(output_directory=register_path)

        # register model
        model = Model.register(ws,
                               model_path=register_path,
                               model_name=model_name,
                               tags=tags,
                               description=asset_name)

        # deletes tmp dir and all content
        delete_directory_with_all_contents(tmp_path)

    # deploy model
    service = None
    try:
        service = Webservice(ws, name=aci_service_name)
        service.update(models=[model], inference_config=inference_config)
    except WebserviceException as e:
        if service:
            service.delete()
        service = Model.deploy(ws, aci_service_name, [model], inference_config,
                               deployment_config)

    service.wait_for_deployment(True)
Exemplo n.º 2
0
    def update_deployment(self,
                          name,
                          model_uri=None,
                          flavor=None,
                          config=None):
        """
        Update the deployment specified by name.

        Update the deployment with the specified name. You can update the URI of the model, the
        flavor of the deployed model (in which case the model URI must also be specified), and/or
        any target-specific attributes of the deployment (via `config`). By default, this method
        should block until deployment completes (i.e. until it's possible to perform inference
        with the updated deployment). See target-specific plugin documentation for additional
        detail on support for asynchronous deployment and other configuration.

        :param name: Unique name of deployment to update
        :param model_uri: URI of a new model to deploy.
        :param flavor: (optional) new model flavor to use for deployment. If provided,
                       ``model_uri`` must also be specified. If ``flavor`` is unspecified but
                       ``model_uri`` is specified, a default flavor will be chosen and the
                       deployment will be updated using that flavor.
        :param config: (optional) dict containing updated target-specific configuration for the
                       deployment
        :return: None
        """
        try:
            service = Webservice(self.workspace, name)
        except Exception as e:
            raise MlflowException(
                'Error retrieving deployment to update') from e

        models = None
        inference_config = None

        with TempDir(chdr=True) as tmp_dir:
            if model_uri:
                model_name, model_version = handle_model_uri(model_uri, name)
                try:
                    aml_model = Model(self.workspace,
                                      id='{}:{}'.format(
                                          model_name, model_version))
                except Exception as e:
                    raise MlflowException(
                        'Failed to retrieve model to deploy') from e
                models = [aml_model]

                inference_config = create_inference_config(
                    tmp_dir, model_name, model_version, name)

            deploy_config = None
            if 'deploy-config-file' in config:
                try:
                    # TODO: Tags, properties, and description are not in the config file for some reason?
                    with open(config['deploy-config-file'],
                              'r') as deploy_file_stream:
                        deploy_config_obj = file_stream_to_object(
                            deploy_file_stream)
                        deploy_config = deploy_config_dict_to_obj(
                            deploy_config_obj, None, None, None)
                except Exception as e:
                    raise MlflowException(
                        'Failed to parse provided deployment config file'
                    ) from e

            aks_endpoint_version_config = None  # TODO deployment or version/service? Talk to PMs
            if 'aks-endpoint-deployment-config' in config:
                aks_endpoint_version_config = config[
                    'aks-endpoint-deployment-config']

            try:
                submit_update_call(service, models, inference_config,
                                   deploy_config, aks_endpoint_version_config)

                if 'async' in config and config['async']:
                    _logger.info(
                        'AzureML deployment in progress, you can use get_deployment to check on the current '
                        'deployment status.')
                else:
                    service.wait_for_deployment(show_output=True)
            except Exception as e:
                raise MlflowException(
                    'Error submitting deployment update') from e