def submit_experiment(path_to_dataset_in_datastore, compute_target_name, experiment_name, env_name, model_name, model_save_path): try: ws = _establish_connection_to_aml_workspace() except Exception as e: raise e try: experiment = Experiment(workspace=ws, name=experiment_name) running_config = _create_running_config(ws, path_to_dataset_in_datastore, compute_target_name, env_name, model_save_path) ############### submit experiment and get url ################ run = experiment.submit(running_config) url = run.get_portal_url() print("The details for this experiment is here:\n {}".format(url)) run.wait_for_completion() print( "Experiment run has completed, Note:This message does NOT suggetst your experiment is successful, go to {} to find out logs and status for your experiment" .format(url)) ############### submit experiment and get url ################ except Exception as e: print("Submit Experiment failed!") raise e try: ############### register model ###################### run.register_model(model_name=model_name, model_path=os.path.join(model_save_path, 'saved_model.pt')) except Exception as e: print("Failed to register due to {}!".format(e))
def get_custom_env(env_name): try: ws = _establish_connection_to_aml_workspace() except Exception as e: raise e try: new_env = Environment.get(ws, env_name) return new_env except Exception as e: raise e
def show_list_of_all_environments(): try: ws = _establish_connection_to_aml_workspace() except Exception as e: raise e try: env_list = Environment.list(ws) for key in env_list: print(key) except Exception as e: raise e
def register_new_conda_environment(env_name, yml_file_path): try: ws = _establish_connection_to_aml_workspace() except Exception as e: raise e try: new_env = Environment.from_conda_specification(name=env_name, file_path=yml_file_path) new_env.register(ws) print("New environment {} has been registerd".format(env_name)) except Exception as e: raise e
def _create_inference_config(inference_env_name): try: ws=_establish_connection_to_aml_workspace() except Exception as e: print("failed to connect to workspce") raise e try: environment=Environment.get(workspace=ws,name=inference_env_name) inference_config=InferenceConfig(entry_script="score.py", environment=environment, source_directory=r'deployment') return inference_config except Exception as e: print("failed to create inference config") raise e
def create_compute_instance(): try: ws=_establish_connection_to_aml_workspace() except Exception as e: raise e try: compute_target_name='testConfDSVM' attach_config=RemoteCompute.attach_configuration( # resource_id="/subscriptions/e4da59ca-11b0-454e-a89a-cd711dea9094/resourceGroups/Barrys-ConfidentialML-Test/providers/Microsoft.Compute/virtualMachines/Barrys-ConfidentialML-TestComputeTarget", resource_id="/subscriptions/e4da59ca-11b0-454e-a89a-cd711dea9094/resourcegroups/Barrys-ConfidentialML-Test/providers/Microsoft.Compute/virtualMachines/BarrysTestConfidentialDSVM", ssh_port=22, username="******", password="******" ) compute=ComputeTarget.attach(ws,compute_target_name,attach_config) compute.wait_for_completion(show_output=True) except Exception as e: raise e
def create_compute_cluster(): try: print("Start searching for user's compute target...") ws=_establish_connection_to_aml_workspace() except Exception as e: raise e try: compute_target = _get_compute_target(ws,"StandardCompute") print("User already has a compute target!") except ComputeTargetException: print("User does not have a compute target, starts creating...") compute_config = AmlCompute.provisioning_configuration(vm_size="Standard_F4s_v2", max_nodes=4, idle_seconds_before_scaledown=2400) cpu_cluster = ComputeTarget.create(ws, "StandardCompute", compute_config) cpu_cluster.wait_for_completion(show_output=True) print("compute target craeted!") except Exception as e: raise e
def _start_deploy_model(inference_config,deployment_config,model_name,model_version,deployment_name): try: ws=_establish_connection_to_aml_workspace() except Exception as e: print("failed to connect to workspce") raise e try: model=Model(workspace=ws,name=model_name,version=model_version) service=model.deploy(workspace=ws, name=deployment_name, models=[model], inference_config=inference_config, deployment_config=deployment_config, overwrite=True) service.wait_for_deployment() print(service.state) print("Deployed at {}".format(service.scoring_uri)) except Exception as e: raise e