Exemplo n.º 1
0
    def delete_deployment(self, name):
        """
        Delete the deployment with name ``name``.

        :param name: Name of deployment to delete
        :return: None
        """
        try:
            service = Webservice(self.workspace, name)
            service.delete()
        except WebserviceException as e:
            if 'WebserviceNotFound' not in e.message:
                _logger.info(
                    'Deployment with name {} not found, no service to delete'.
                    format(name))
                return
            raise MlflowException(
                'There was an error deleting the deployment: \n{}'.format(
                    e.message)) from e
Exemplo n.º 2
0
def deploy_model(ws, aci_service_name, experiment_name, asset_name,
                 asset_label, run_id, cpu_cores, memory_gb, entry_script):
    env = create_env_from_requirements(endpoint=True)
    inference_config = InferenceConfig(source_directory=os.getcwd(),
                                       entry_script=entry_script,
                                       environment=env)

    deployment_config = AciWebservice.deploy_configuration(cpu_cores=cpu_cores,
                                                           memory_gb=memory_gb)

    # model name
    model_name = get_model_register_name(run_id)
    try:
        model = Model(ws, name=model_name)
    except:
        # creating directory for download Model files for Model register
        tmp_path = create_tempdir(name='download_tmp')
        register_path = create_directory(AML_MLAPP_FOLDER, path=tmp_path)

        # getting RUN context
        experiment = Experiment(workspace=ws, name=experiment_name)
        tags = {"run_id": run_id, "asset_name": asset_name}
        if asset_label is not None:
            tags["asset_label"] = asset_label

        selected_run_id = None
        for run in Run.list(experiment,
                            tags=tags,
                            include_children=True,
                            status='Completed'):
            run_metrics = run.get_metrics()
            exp_saved_run_id = run_metrics.get("run_id")
            if exp_saved_run_id == run_id:
                selected_run_id = run.id
                break
        if selected_run_id is None:
            raise Exception(
                'ERROR: there is no matching Run object that associated with the run id %s in this experiment.'
                % str(run_id))
        current_run = Run(experiment=experiment, run_id=selected_run_id)

        # download files from run object
        current_run.download_files(output_directory=register_path)

        # register model
        model = Model.register(ws,
                               model_path=register_path,
                               model_name=model_name,
                               tags=tags,
                               description=asset_name)

        # deletes tmp dir and all content
        delete_directory_with_all_contents(tmp_path)

    # deploy model
    service = None
    try:
        service = Webservice(ws, name=aci_service_name)
        service.update(models=[model], inference_config=inference_config)
    except WebserviceException as e:
        if service:
            service.delete()
        service = Model.deploy(ws, aci_service_name, [model], inference_config,
                               deployment_config)

    service.wait_for_deployment(True)
Exemplo n.º 3
0
)
args = parser.parse_args()

run = Run.get_context()
ws = run.experiment.workspace

freezer_environment = ws.environments["sktime_freezer_environment"]

try:
    service = Webservice(ws, args.webservicename)
except WebserviceException:
    service = None

if args.redeploy:
    if service is not None:
        service.delete()
        print("deleted existing Webservice.")

    model = Model(ws, "sktime_freezer_classifier")

    inference_config = InferenceConfig(
        entry_script="score.py", source_directory="./", environment=freezer_environment
    )

    aci_config = AciWebservice.deploy_configuration(cpu_cores=1, memory_gb=1)

    service = Model.deploy(
        workspace=ws,
        name=args.webservicename,
        models=[model],
        inference_config=inference_config,