def deploy_service(ws, model, inference_config, service_name, compute_target): tags = {'model': '{}:{}'.format(model.name, model.version)} try: service = Webservice(ws, service_name) print("Service {} exists, update it".format(service_name)) service.update(models=[model], inference_config=inference_config, tags=tags) except Exception: print('deploy a new service {}'.format(service_name)) deployment_config = AksWebservice.deploy_configuration( cpu_cores=1, memory_gb=2, tags=tags, collect_model_data=True, enable_app_insights=True) service = Model.deploy(ws, service_name, [model], inference_config, deployment_config, compute_target) service.wait_for_deployment(show_output=True) if service.auth_enabled: token = service.get_keys()[0] elif service.token_auth_enabled: token = service.get_token()[0] return service.scoring_uri, token
provisioning_configuration=prov_config) aks_target.wait_for_completion(show_output=True) print(aks_target.provisioning_state) print(aks_target.provisioning_errors) # Use the default configuration (can also provide parameters to customize) aks_config = AksWebservice.deploy_configuration(enable_app_insights=True) service = Webservice.deploy_from_image( workspace=ws, name=aks_service_name, image=image, deployment_config=aks_config, deployment_target=aks_target, ) service.wait_for_deployment(show_output=True) print(service.state) print("Deployed AKS Webservice: {} \nWebservice Uri: {}".format( service.name, service.scoring_uri)) # Writing the AKS details to /aml_config/aks_webservice.json aks_webservice = {} aks_webservice["aks_name"] = aks_name aks_webservice["aks_service_name"] = service.name aks_webservice["aks_url"] = service.scoring_uri aks_webservice["aks_keys"] = service.get_keys() with open("aml_config/aks_webservice.json", "w") as outfile: json.dump(aks_webservice, outfile)
# store model id in json file update_json(options.params,{"model_id": model_id}) else: # from list of models, pick newest one with the provided name models = [x for x in ws.models() if x.name==model_name] import dateutil.parser model_id = sorted(models,key=lambda x: dateutil.parser.parse(x.created_time))[-1].id # store model id in json file update_json(options.params,{"model_id": model_id}) else: # use stored model id model_id = get_from_json(options.params,"model_id") service = None if len(service_name)>0: try: service = Webservice(ws,service_name) if options.delete: print("Deleting: "+str(service.id)) service.delete() service = None except WebserviceException: if not options.delete: service = Webservice.deploy_from_model(ws, service_name, [Model(ws,id=model_id)], BrainwaveImage.image_configuration(), BrainwaveWebservice.deploy_configuration()) service.wait_for_deployment(True) if service is not None: update_json(options.params, {"address": service.ip_address, "port": service.port})
# COMMAND ---------- from azureml.core.webservice import Webservice, AksWebservice from azureml.core.image import Image # Get Model model_image = Image(workspace, id=model_image_id) # Get Webservice prod_webservice_name = "wine-quality-aks" try: prod_webservice = Webservice(workspace, prod_webservice_name) print('updating existing webservice.') prod_webservice.update(image=model_image) prod_webservice.wait_for_deployment(show_output=True) except: print('creating new webservice.') # Set configuration and service name prod_webservice_deployment_config = AksWebservice.deploy_configuration() # Deploy from image prod_webservice = Webservice.deploy_from_image( workspace=workspace, name=prod_webservice_name, image=model_image, deployment_config=prod_webservice_deployment_config, deployment_target=aks_target) # Wait for the deployment to complete prod_webservice.wait_for_deployment(show_output=True) # COMMAND ----------
def main(): e = Env() # Get Azure machine learning workspace ws = Workspace.get(name=e.workspace_name, subscription_id=e.subscription_id, resource_group=e.resource_group) print(f"get_workspace: {ws}") # Parameters sources_directory_train = e.sources_directory_train # model_names = ["nyc_energy_model", "diabetes_model"] model_names = get_model_names( os.path.join(sources_directory_train, "pipeline_config.json")) models = [] for model_name in model_names: models.append(Model(ws, name=model_name)) # Conda environment myenv = Environment.from_conda_specification( "myenv", os.path.join(sources_directory_train, "conda_dependencies.yml")) # Enable Docker based environment myenv.docker.enabled = True # Deprecated: pass the model names string to score.py # score.py reads model names from pipeline_config.json directly. # model_names_str = '' # for name in model_names: # model_names_str = model_names_str + name + ',' # model_names_str = model_names_str[:-1] # myenv.environment_variables = {"MODEL_NAMES": model_names_str} inference_config = InferenceConfig( source_directory=sources_directory_train, entry_script="scoring/score.py", environment=myenv) deployment_config = AciWebservice.deploy_configuration( cpu_cores=1, memory_gb=2, tags={ 'area': "digits", 'type': aci_service_name }, description=aci_service_name) try: # Check if the service is existed service = Webservice(ws, name=aci_service_name) if service: print("Found existing service: %s .. delete it" % aci_service_name) service.delete() except WebserviceException as e: print(e) service = Model.deploy(ws, aci_service_name, models, inference_config, deployment_config) service.wait_for_deployment(True) print(service.state)