def __deploy_service(self, image, compute):
        service_config = self.__config['deploy']
        services = AksWebservice.list(self.__ws)
        service = services.find_by_property('name', service_config['name'])
        if service:
            service.update(auth_enabled=service_config['auth'])
            service.wait_for_deployment(show_output=True)
            return service
        aks_config = AksWebservice.deploy_configuration(
            auth_enabled=True,
            max_request_wait_time=75000,
            replica_max_concurrent_requests=100,
            autoscale_enabled=False,
            num_replicas=15)
        aks_service_name = service_config['name']
        aks_service = Webservice.deploy_from_image(
            workspace=self.__ws,
            name=aks_service_name,
            image=image,
            deployment_config=aks_config,
            deployment_target=compute)

        aks_service.wait_for_deployment(show_output=True)
        print(aks_service.state)
        return aks_service
Exemplo n.º 2
0
def connect_webservice(ws):
    with open('config/aks_config.json') as f:
        aks_config = json.load(f)

    aks_service = AksWebservice(ws, name=aks_config["name"])
    # print(aks_service.state)

    url = aks_service.scoring_uri
    key = aks_service.get_keys()[0]

    return url, key
def test_fpga_service(workspace):
    # Using the grpc client in Azure ML Accelerated Models SDK package
    aks_service_name = "my-aks-service"
    aks_service = AksWebservice(workspace=workspace, name=aks_service_name)
    client = FPGARealtimeScore.get_prediction_client(aks_service)

    # Score image with input and output tensor names
    input_tensors, output_tensors = FPGARealtimeScore.get_resnet50_IO()
    wget.download(
        "https://raw.githubusercontent.com/Azure/MachineLearningNotebooks/"
        "master/how-to-use-azureml/deployment/accelerated-models/snowleopardgaze.jpg"
    )

    results = client.score_file(path="snowleopardgaze.jpg",
                                input_name=input_tensors,
                                outputs=output_tensors)

    # map results [class_id] => [confidence]
    results = enumerate(results)
    # sort results by confidence
    sorted_results = sorted(results, key=lambda x: x[1], reverse=True)
    # print top 5 results
    classes_entries = requests.get(
        "https://raw.githubusercontent.com/Lasagne/Recipes/"
        "master/examples/resnet50/imagenet_classes.txt").text.splitlines()
    for top in sorted_results[:5]:
        print(classes_entries[top[0]], "confidence:", top[1])
def test_service(service: AksWebservice,
                 container: str,
                 blob: str,
                 write_logs: bool = True) -> None:
    if write_logs:
        logs = service.get_logs()
        with open("logs.txt", "w") as fp:
            fp.write(logs)
    data = {"container": container, "blob": blob}
    data_raw = bytes(json.dumps({"data": data}), encoding="utf8")
    print("Testing service: {0}".format(service.name))
    print("Container: {0}, blob: {1}".format(container, blob))
    ping = time.time()
    response = service.run(input_data=data_raw)
    print("Elapsed time: {0:.5f}".format(time.time() - ping))
    print("Response: {0}".format(response))
Exemplo n.º 5
0
def deploy_service(ws, model, inference_config, service_name, compute_target):
    tags = {'model': '{}:{}'.format(model.name, model.version)}

    try:
        service = Webservice(ws, service_name)
        print("Service {} exists, update it".format(service_name))
        service.update(models=[model],
                       inference_config=inference_config,
                       tags=tags)
    except Exception:
        print('deploy a new service {}'.format(service_name))
        deployment_config = AksWebservice.deploy_configuration(
            cpu_cores=1,
            memory_gb=2,
            tags=tags,
            collect_model_data=True,
            enable_app_insights=True)
        service = Model.deploy(ws, service_name, [model], inference_config,
                               deployment_config, compute_target)

    service.wait_for_deployment(show_output=True)

    if service.auth_enabled:
        token = service.get_keys()[0]
    elif service.token_auth_enabled:
        token = service.get_token()[0]

    return service.scoring_uri, token
Exemplo n.º 6
0
def test_deployed_model_service():
    service = AksWebservice(ws, deployment_name)
    assert service is not None

    key1, key2 = service.get_keys()
    uri = service.scoring_uri

    assert key1 is not None
    assert uri.startswith('http')

    headers = {
        'Content-Type': 'application/json',
        'Authorization': f'Bearer {key1}'
    }
    response = requests.post(uri, test_sample, headers=headers)
    assert response.status_code is 200
    assert abs(1 - sum(response.json()['predict_proba'][0])) < 0.01
def test_gpu_service(workspace):
    aks_service_name = "deepaksservice"

    assert aks_service_name in workspace.webservices, f"{aks_service_name} not found."
    aks_service = AksWebservice(workspace, name=aks_service_name)
    assert (aks_service.state == "Healthy"
            ), f"{aks_service_name} is in state {aks_service.state}."
    scoring_url = aks_service.scoring_uri
    print(scoring_url)
    api_key = aks_service.get_keys()[0]
    import requests

    headers = {"Authorization": ("Bearer " + api_key)}

    files = {"image": open("snowleopardgaze.jpg", "rb")}
    r_get = requests.get(scoring_url, headers=headers)
    assert r_get
    r_post = requests.post(scoring_url, files=files, headers=headers)
    assert r_post
Exemplo n.º 8
0
def deploy_new_webservice(ws, compute_name, webservice_name, image):
    aks_target = ws.compute_targets[compute_name]
    aks_config = AksWebservice.deploy_configuration(collect_model_data=True,
                                                    enable_app_insights=True)
    service = Webservice.deploy_from_image(workspace=ws,
                                           name=webservice_name,
                                           image=image,
                                           deployment_config=aks_config,
                                           deployment_target=aks_target)
    service.wait_for_deployment(show_output=True)
    print(service.state)
Exemplo n.º 9
0
def call_web_service(e, service_type, service_name):
    aml_workspace = Workspace.get(name=e.workspace_name,
                                  subscription_id=e.subscription_id,
                                  resource_group=e.resource_group)
    print('fetching webservice')
    if service_type == 'AKS':
        service = AksWebservice(aml_workspace, service_name)
    elif service_type == 'ACI':
        service = AciWebservice(aml_workspace, service_name)
    else:
        raise ValueError(f'no {service_type} is supported!')

    headers = {}
    if service.auth_enabled:
        service_keys = service.get_keys()
        headers['Authorization'] = 'Bearer ' + service_keys[0]

    scoring_url = service.scoring_uri
    print(f'scoring url: {scoring_url}')
    output = call_web_app(scoring_url, headers)

    return output
Exemplo n.º 10
0
def deploy(local, aks, aci, num_cores, mem_gb, compute_name):
    # Get the workspace
    ws = Workspace.from_config()
    # Create inference configuration based on the environment definition and the entry script
    # yolo = Environment.from_conda_specification(name="env", file_path="yolo.yml")
    yolo = Environment.from_pip_requirements(
        name="yolo", file_path="./deployed_requirements.txt")
    # yolo.save_to_directory('')
    yolo.register(workspace=ws)
    inference_config = InferenceConfig(entry_script="azure.py",
                                       environment=yolo,
                                       source_directory="yolov5")
    # Retrieve registered model
    model = Model(ws, id="lpr:1")
    deploy_target = None
    if local:
        # Create a local deployment, using port 8890 for the web service endpoint
        deployment_config = LocalWebservice.deploy_configuration(port=8890)
    elif aks:
        # Create a AKS deployment
        deployment_config = AksWebservice.deploy_configuration(
            cpu_cores=num_cores,
            memory_gb=mem_gb,
            compute_target_name=compute_name)
        deploy_target = ComputeTarget(workspace=ws, name=compute_name)
        # if deploy_target.get_status() != "Succeeded":
        #     print(f"Deploy Target: {deploy_target.get_status()}")
        #     deploy_target.wait_for_completion(show_output=True)
    elif aks:
        # Create a AKS deployment
        deployment_config = AciWebservice.deploy_configuration(
            cpu_cores=num_cores,
            memory_gb=mem_gb,
            compute_target_name=compute_name)
    else:
        raise NotImplementedError("Choose deploy target please")
    # Deploy the service
    print("Deploying:")
    service = Model.deploy(workspace=ws,
                           name="lpr",
                           models=[model],
                           inference_config=inference_config,
                           deployment_config=deployment_config,
                           overwrite=True,
                           deployment_target=deploy_target)
    # Wait for the deployment to complete
    print("Deploying:")
    service.wait_for_deployment(True)
    # Display the port that the web service is available on
    if local:
        print(service.port)
Exemplo n.º 11
0
def createWebservice(workspace, container_image, service_name, replica_count,
                     cores_count, compute_target):
    '''
        TODO: Should allow for the overwrite flag. 

        Attach a azureml.core.webservice.Webservice for a given container on an AKS cluster. 

        If a WebService already exists (by name) on the given workspace, return it instead. 


        PARAMS: 
            workspace        : azureml.core.Workspace               : Existing AMLS Workspace
            container_image  : azureml.core.image.ContainerImage    : Name of an existing AKS cluster 
            service_name     : String                               : Name of the webservice (deployment) in the AMLS workpsace.
            replica_count    : int                                  : Number of requested instances of container on cluster.
            cores_count      : int                                  : Number of cores to allocate to each container
            compute_target   : azureml.core.compute.AksCompute      : AKS cluster to create the service on

        RETURNS: 
            azureml.core.webservice.Webservice

    '''
    web_service = None

    services = Webservice.list(workspace=workspace,
                               image_name=container_image.name)
    if len(services) > 0:
        for svc in services:
            if svc.name == service_name:
                print("Returning existing deployed web service ....",
                      service_name)
                web_service = svc
                break

    if web_service == None:
        print("Creating new web service.....", service_name)
        aks_config = AksWebservice.deploy_configuration(
            num_replicas=replica_count, cpu_cores=cores_count)

        web_service = Webservice.deploy_from_image(
            workspace=workspace,
            name=service_name,
            image=container_image,
            deployment_config=aks_config,
            deployment_target=compute_target,
        )

        web_service.wait_for_deployment(show_output=True)

    return web_service
Exemplo n.º 12
0
def create_aks_service(name: str, image_config: ImageConfig,
                       models: List[Model], target: ComputeTarget,
                       ws: Workspace) -> Webservice:

    print("Loading AKS deploy config from deployconfig_aks.yml")
    deploy_conf = AksWebservice.deploy_configuration()
    print(models)
    service = Webservice.deploy_from_model(workspace=ws,
                                           name=name,
                                           deployment_target=target,
                                           models=models,
                                           deployment_config=deploy_conf,
                                           image_config=image_config)

    service.wait_for_deployment(show_output=True)
    return service
Exemplo n.º 13
0
def deploy_to_aks(workspace, model_image, aks_target, prod_webservice_name="dsswe-mprodm"):
    from azureml.core.webservice import Webservice, AksWebservice
    # Deploy a model image to AKS
    print("Deploying to AKS...")
    # Set configuration and service name
    prod_webservice_deployment_config = AksWebservice.deploy_configuration()
    # Deploy from image
    prod_webservice = Webservice.deploy_from_image(workspace=workspace,
                                                   name=prod_webservice_name,
                                                   image=model_image,
                                                   deployment_config=prod_webservice_deployment_config,
                                                   deployment_target=aks_target)
    # Wait for the deployment to complete
    prod_webservice.wait_for_deployment(show_output=True)
    print("Deployment to AKS completed sucessfully")
    return prod_webservice
Exemplo n.º 14
0
def deploy_aks(workspace, model_azure, endpoint_name, inference_config, aks_name):
  aks_target = AksCompute(workspace, aks_name)
  aks_config = AksWebservice.deploy_configuration()

  aks_service = Model.deploy(workspace=workspace,
                             name=endpoint_name,
                             models=[model_azure],
                             inference_config=inference_config,
                             deployment_config=aks_config,
                             deployment_target=aks_target,
                             overwrite=True)

  aks_service.wait_for_deployment(show_output = True)
 
  print(f"Endpoint : {endpoint_name} was successfully deployed to AKS")
  print(f"Endpoint : {aks_service.scoring_uri} created")
  print('')
def call_web_service(e, service_type, service_name):
    aml_workspace = Workspace.get(name=e.workspace_name,
                                  subscription_id=e.subscription_id,
                                  resource_group=e.resource_group)
    print("Fetching service")
    headers = {}
    if service_type == "ACI":
        service = AciWebservice(aml_workspace, service_name)
    else:
        service = AksWebservice(aml_workspace, service_name)
    if service.auth_enabled:
        service_keys = service.get_keys()
        headers['Authorization'] = 'Bearer ' + service_keys[0]
    print("Testing service")
    print(". url: %s" % service.scoring_uri)
    output = call_web_app(service.scoring_uri, headers)

    return output
Exemplo n.º 16
0
def deploy(workspace,
           name,
           model,
           script,
           source_directory,
           environment=None,
           target='local',
           cpu_cores=1,
           memory_gb=1,
           compute_target_name=None):
    inference_config = InferenceConfig(entry_script=script,
                                       source_directory=source_directory,
                                       environment=environment)

    if target == 'local':
        deployment_config = LocalWebservice.deploy_configuration(port=8890)
    elif target == 'aci':
        deployment_config = AciWebservice.deploy_configuration(
            cpu_cores=cpu_cores, memory_gb=memory_gb)
    elif target == 'aks':
        if compute_target_name is None:
            print("compute_target_name required when target='aks'")
            return None
        deployment_config = AksWebservice.deploy_configuration(
            cpu_cores=cpu_cores,
            memory_gb=memory_gb,
            compute_target_name=compute_target_name,
            auth_enabled=False)

    try:
        service = Webservice(workspace, name)
    except WebserviceException:
        service = None

    if service is None:
        service = Model.deploy(workspace, name, [model], inference_config,
                               deployment_config)
    else:
        print(
            "Existing service with that name found, updating InferenceConfig\n"
            "If you meant to redeploy or change the deployment option, first "
            "delete the existing service.")
        service.update(models=[model], inference_config=inference_config)
    return service
Exemplo n.º 17
0
def deploy_image():
    ws = get_workspace()
    azure_image = get_image()

    # Set the web service configuration (using default here with app insights)
    aks_config = AksWebservice.deploy_configuration(enable_app_insights=True)

    # Unique service name
    service_name = AKS_NAME

    aks_target = get_cluster()

    # Webservice creation using single command
    aks_service = Webservice.deploy_from_image(workspace=ws,
                                               name=service_name,
                                               deployment_config=aks_config,
                                               image=azure_image,
                                               deployment_target=aks_target)

    aks_service.wait_for_deployment(show_output=True)
Exemplo n.º 18
0
def deploy_to_AKS(workspace, attachment_name, service_name, models, inference_config, token_auth_enabled=True,
                  cpu_cores=1, memory_gb=1, overwrite=True):
    services = workspace.webservices
    if service_name in services and overwrite:
        print('found existing service named {}, delete it right now...'.format(service_name))
        services[service_name].delete()
    # Only one type of Auth may be enabled
    if token_auth_enabled:
        # key auth
        auth_enabled = False
    print('auth type: {}'.format('token' if token_auth_enabled else 'key'))
    aks_target = AksCompute(workspace, attachment_name)
    # If deploying to a cluster configured for dev/test, ensure that it was created with enough
    # cores and memory to handle this deployment configuration. Note that memory is also used by
    # things such as dependencies and AML components.
    deployment_config = AksWebservice.deploy_configuration(cpu_cores=cpu_cores, memory_gb=memory_gb,
                                                           token_auth_enabled=token_auth_enabled,
                                                           auth_enabled=auth_enabled)
    service = Model.deploy(workspace, service_name, models, inference_config, deployment_config, aks_target)
    service.wait_for_deployment(show_output=True)
    print(service.state)
    return service
Exemplo n.º 19
0
def test_aks(directory: str, aks_service: AksWebservice):
    """
    Test AKS with sample call.

    :param directory: directory of data_folder with test data
    :param aks_service: AKS Web Service to Test
    """
    num_dupes_to_score = 4

    dupes_test = get_dupes_test(directory)
    text_to_score = dupes_test.iloc[0, num_dupes_to_score]

    json_text = text_to_json(text_to_score)

    scoring_url = aks_service.scoring_uri
    api_key = aks_service.get_keys()[0]

    headers = {
        "content-type": "application/json",
        "Authorization": ("Bearer " + api_key),
    }
    requests.post(
        scoring_url, data=json_text,
        headers=headers)  # Run the request twice since the first time takes a
    r = requests.post(
        scoring_url, data=json_text,
        headers=headers)  # little longer due to the loading of the model
    print(r)

    dupes_to_score = dupes_test.iloc[:5, num_dupes_to_score]

    text_data = list(map(text_to_json,
                         dupes_to_score))  # Retrieve the text data
    for text in text_data:
        r = requests.post(scoring_url, data=text, headers=headers)
        print(r)
Exemplo n.º 20
0
                                                        vm_size="Standard_F2",
                                                        location="eastus")
    print(
        "No AKS found in aks_webservice.json. Creating new Aks: {} and AKS Webservice: {}"
        .format(aks_name, aks_service_name))
    # Create the cluster
    aks_target = ComputeTarget.create(workspace=ws,
                                      name=aks_name,
                                      provisioning_configuration=prov_config)

    aks_target.wait_for_completion(show_output=True)
    print(aks_target.provisioning_state)
    print(aks_target.provisioning_errors)

    # Use the default configuration (can also provide parameters to customize)
    aks_config = AksWebservice.deploy_configuration(enable_app_insights=True)

    service = Webservice.deploy_from_image(
        workspace=ws,
        name=aks_service_name,
        image=image,
        deployment_config=aks_config,
        deployment_target=aks_target,
    )

    service.wait_for_deployment(show_output=True)
    print(service.state)
    print("Deployed AKS Webservice: {} \nWebservice Uri: {}".format(
        service.name, service.scoring_uri))

# Writing the AKS details to /aml_config/aks_webservice.json
Exemplo n.º 21
0
# MAGIC The code in the following cell will deploy the model selected from MLFlow in the cells above

# COMMAND ----------

# MAGIC %md
# MAGIC ## Create a new web service deployment from a model image

# COMMAND ----------

from azureml.core.webservice import Webservice, AksWebservice

# Set configuration and service name
webservice_name = "<prod_webservice_name>"

#default configuration can be modified for custom requirements
webservice_deployment_config = AksWebservice.deploy_configuration()

# Deploy from image selected above
webservice = Webservice.deploy_from_image(
    workspace=workspace,
    name=webservice_name,
    image=model_image,
    deployment_config=webservice_deployment_config,
    deployment_target=aks_target)

#wait for the webservice to be deployed
webservice.wait_for_deployment(show_output=True)

# COMMAND ----------

print(webservice.get_logs())
Exemplo n.º 22
0
# now inference config is ready now So now we need to configure the compute to which the service will be deployed
#if we are going for AKS cluster then we need to create the cluster and a compute target before deployment

#creating the AKS cluster(Azure kubernetes service)

from azureml.core.compute import ComputeTarget, AksCompute

cluster_name = 'aks-cluster'
compute_config = AksCompute.provisioning_configuration(location="eastus")
production_cluster = ComputeTarget.create(ws, cluster_name, compute_config)
production_cluster.wait_for_completion(show_output=True)

from azureml.core.webservice import AksWebservice

classifier_deploy_config = AksWebservice.deploy_configuration(cpu_core=1,
                                                              memory_gb=1)

#finally deploting the model

from azureml.core.model import Model

model = ws.models['classification_model']
service = Model.deploy(workspace=ws,
                       name='classifier-service',
                       models=[model],
                       inference_config=class_inference_config,
                       deployment_config=classifier_deploy_config,
                       deployment_target=production_cluster)

service.wait_for_deployment(show_output=True)
Exemplo n.º 23
0
    print("No AKS found. Creating new Aks: {} and AKS Webservice: {}".format(
        aks_name, aks_service_name))
    prov_config = AksCompute.provisioning_configuration(location=aks_region)
    # Create the cluster
    aks_target = ComputeTarget.create(workspace=ws,
                                      name=aks_name,
                                      provisioning_configuration=prov_config)
    aks_target.wait_for_completion(show_output=True)
    print(aks_target.provisioning_state)
    print(aks_target.provisioning_errors)

print("Creating new webservice")
# Create the web service configuration (using defaults)
aks_config = AksWebservice.deploy_configuration(description=args.description,
                                                tags={
                                                    'name': aks_name,
                                                    'image_id': image.id
                                                })
service = Webservice.deploy_from_image(workspace=ws,
                                       name=aks_service_name,
                                       image=image,
                                       deployment_config=aks_config,
                                       deployment_target=aks_target)
service.wait_for_deployment(show_output=True)
print(service.state)

api_key, _ = service.get_keys()
print(
    "Deployed AKS Webservice: {} \nWebservice Uri: {} \nWebservice API Key: {}"
    .format(service.name, service.scoring_uri, api_key))
Exemplo n.º 24
0
status = aks_target.get_status()
while status != 'Succeeded' and status != 'Failed':
    print('current status: {} - waiting...'.format(status))
    time.sleep(10)
    status = aks_target.get_status()

from azureml.core.webservice import Webservice, AksWebservice

aks_service_name = 'sklearn-mnist-aks-svc'

# Set the web service configuration (using default here)
aks_config = AksWebservice.deploy_configuration(
    cpu_cores=1,
    memory_gb=1,
    tags={
        "data": "MNIST",
        "method": "sklearn"
    },
    description='Predict MNIST with sklearn')

aks_service = Webservice.deploy_from_image(workspace=ws,
                                           name=aks_service_name,
                                           image=image,
                                           deployment_config=aks_config,
                                           deployment_target=aks_target)

aks_service.wait_for_deployment(show_output=True)
print(aks_service.state)

print("Testing deployed service via HTTP call...")
import requests
Exemplo n.º 25
0
        # Use the default configuration (can also provide parameters to customize)
        prov_config = AksCompute.provisioning_configuration()

        # Create the cluster
        aks_target = ComputeTarget.create(
            workspace=ws, name=aksName, provisioning_configuration=prov_config)

        print("Wait for AKS compute target....")
        aks_target.wait_for_completion(show_output=True)
        print(aks_target.provisioning_state)
        print(aks_target.provisioning_errors)

    if not aks_service:
        #Set the web service configuration (using default here)
        aks_config = AksWebservice.deploy_configuration()

        aks_service = Webservice.deploy_from_image(
            workspace=ws,
            name=aksServiceName,
            image=containerImage,
            deployment_config=aks_config,
            deployment_target=aks_target)

        print("Wait for AKS service....")
        aks_service.wait_for_deployment(show_output=True)
        print(aks_service.state)

    # Now get ready to call the service
    key1, Key2 = aks_service.get_keys()
    print(key1)
Exemplo n.º 26
0
                      exist_ok=True)

# Provision AKS cluster
prov_config = AksCompute.provisioning_configuration(vm_size="Standard_D14")
prov_config.enable_ssl(leaf_domain_label=https_cert)
# Create the cluster
aks_target = ComputeTarget.create(workspace=ws,
                                  name=aks_name,
                                  provisioning_configuration=prov_config)

inference_config = InferenceConfig(runtime="python",
                                   entry_script="aml_app.py",
                                   conda_file="myenv.yml",
                                   extra_docker_file_steps='dockerfile')

aks_python_bot = AksWebservice.deploy_configuration(autoscale_enabled=False,
                                                    num_replicas=3,
                                                    cpu_cores=2,
                                                    memory_gb=4,
                                                    auth_enabled=False)

aks_service = Model.deploy(ws,
                           models=['aml_app.py'],
                           inference_config=inference_config,
                           deployment_config=aks_python_bot,
                           deployment_target=aks_target,
                           name=aks_service_name)

aks_service.wait_for_deployment(show_output=True)
print(aks_service.state)
Exemplo n.º 27
0
def main():
    # Loading input values
    print("::debug::Loading input values")
    model_name = os.environ.get("INPUT_MODEL_NAME", default=None)
    model_version = os.environ.get("INPUT_MODEL_VERSION", default=None)

    # Casting input values
    print("::debug::Casting input values")
    try:
        model_version = int(model_version)
    except TypeError as exception:
        print(f"::debug::Could not cast model version to int: {exception}")
        model_version = None
    except ValueError as exception:
        print(f"::debug::Could not cast model version to int: {exception}")
        model_version = None

    # Loading azure credentials
    print("::debug::Loading azure credentials")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print(
            "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS"
        )
        raise AMLConfigurationException(
            "Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-compute/blob/master/README.md"
        )

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=azure_credentials,
                  schema=azure_credentials_schema,
                  input_name="AZURE_CREDENTIALS")

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Loading parameters file
    print("::debug::Loading parameters file")
    parameters_file = os.environ.get("INPUT_PARAMETERS_FILE",
                                     default="deploy.json")
    parameters_file_path = os.path.join(".cloud", ".azure", parameters_file)
    try:
        with open(parameters_file_path) as f:
            parameters = json.load(f)
    except FileNotFoundError:
        print(
            f"::debug::Could not find parameter file in {parameters_file_path}. Please provide a parameter file in your repository  if you do not want to use default settings (e.g. .cloud/.azure/deploy.json)."
        )
        parameters = {}

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=parameters,
                  schema=parameters_schema,
                  input_name="PARAMETERS_FILE")

    # Loading Workspace
    print("::debug::Loading AML Workspace")
    sp_auth = ServicePrincipalAuthentication(
        tenant_id=azure_credentials.get("tenantId", ""),
        service_principal_id=azure_credentials.get("clientId", ""),
        service_principal_password=azure_credentials.get("clientSecret", ""))
    config_file_path = os.environ.get("GITHUB_WORKSPACE",
                                      default=".cloud/.azure")
    config_file_name = "aml_arm_config.json"
    try:
        ws = Workspace.from_config(path=config_file_path,
                                   _file_name=config_file_name,
                                   auth=sp_auth)
    except AuthenticationException as exception:
        print(
            f"::error::Could not retrieve user token. Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS: {exception}"
        )
        raise AuthenticationException
    except AuthenticationError as exception:
        print(f"::error::Microsoft REST Authentication Error: {exception}")
        raise AuthenticationError
    except AdalError as exception:
        print(
            f"::error::Active Directory Authentication Library Error: {exception}"
        )
        raise AdalError
    except ProjectSystemException as exception:
        print(f"::error::Workspace authorizationfailed: {exception}")
        raise ProjectSystemException

    # Loading deployment target
    print("::debug::Loading deployment target")
    try:
        deployment_target = ComputeTarget(workspace=ws,
                                          name=parameters.get(
                                              "deployment_compute_target", ""))
    except ComputeTargetException:
        deployment_target = None
    except TypeError:
        deployment_target = None

    # Loading model
    print("::debug::Loading model")
    try:
        model = Model(workspace=ws, name=model_name, version=model_version)
    except WebserviceException as exception:
        print(
            f"::error::Could not load model with provided details: {exception}"
        )
        raise AMLConfigurationException(
            f"Could not load model with provided details: {exception}")

    # Creating inference config
    print("::debug::Creating inference config")
    if os.environ.get("CONTAINER_REGISTRY_ADRESS", None) is not None:
        container_registry = ContainerRegistry()
        container_registry.address = os.environ.get(
            "CONTAINER_REGISTRY_ADRESS", None)
        container_registry.username = os.environ.get(
            "CONTAINER_REGISTRY_USERNAME", None)
        container_registry.password = os.environ.get(
            "CONTAINER_REGISTRY_PASSWORD", None)
    else:
        container_registry = None

    try:
        inference_config = InferenceConfig(
            entry_script=parameters.get("inference_entry_script", "score.py"),
            runtime=parameters.get("runtime", "python"),
            conda_file=parameters.get("conda_file", "environment.yml"),
            extra_docker_file_steps=parameters.get("extra_docker_file_steps",
                                                   None),
            source_directory=parameters.get("inference_source_directory",
                                            "code/deploy/"),
            enable_gpu=parameters.get("enable_gpu", None),
            description=parameters.get("description", None),
            base_image=parameters.get("base_image", None),
            base_image_registry=container_registry,
            cuda_version=parameters.get("cuda_version", None))
    except WebserviceException as exception:
        print(
            f"::debug::Failed to create InferenceConfig. Trying to create no code deployment: {exception}"
        )
        inference_config = None
    except TypeError as exception:
        print(
            f"::debug::Failed to create InferenceConfig. Trying to create no code deployment: {exception}"
        )
        inference_config = None

    # Loading run config
    print("::debug::Loading run config")
    model_resource_config = model.resource_configuration
    cpu_cores = get_resource_config(config=parameters.get("cpu_cores", None),
                                    resource_config=model_resource_config,
                                    config_name="cpu")
    memory_gb = get_resource_config(config=parameters.get("memory_gb", None),
                                    resource_config=model_resource_config,
                                    config_name="memory_in_gb")
    gpu_cores = get_resource_config(config=parameters.get("gpu_cores", None),
                                    resource_config=model_resource_config,
                                    config_name="gpu")

    # Creating deployment config
    print("::debug::Creating deployment config")
    if type(deployment_target) is AksCompute:
        deployment_config = AksWebservice.deploy_configuration(
            autoscale_enabled=parameters.get("autoscale_enabled", None),
            autoscale_min_replicas=parameters.get("autoscale_min_replicas",
                                                  None),
            autoscale_max_replicas=parameters.get("autoscale_max_replicas",
                                                  None),
            autoscale_refresh_seconds=parameters.get(
                "autoscale_refresh_seconds", None),
            autoscale_target_utilization=parameters.get(
                "autoscale_target_utilization", None),
            collect_model_data=parameters.get("model_data_collection_enabled",
                                              None),
            auth_enabled=parameters.get("authentication_enabled", None),
            cpu_cores=cpu_cores,
            memory_gb=memory_gb,
            enable_app_insights=parameters.get("app_insights_enabled", None),
            scoring_timeout_ms=parameters.get("scoring_timeout_ms", None),
            replica_max_concurrent_requests=parameters.get(
                "replica_max_concurrent_requests", None),
            max_request_wait_time=parameters.get("max_request_wait_time",
                                                 None),
            num_replicas=parameters.get("num_replicas", None),
            primary_key=os.environ.get("PRIMARY_KEY", None),
            secondary_key=os.environ.get("SECONDARY_KEY", None),
            tags=parameters.get("tags", None),
            properties=parameters.get("properties", None),
            description=parameters.get("description", None),
            gpu_cores=gpu_cores,
            period_seconds=parameters.get("period_seconds", None),
            initial_delay_seconds=parameters.get("initial_delay_seconds",
                                                 None),
            timeout_seconds=parameters.get("timeout_seconds", None),
            success_threshold=parameters.get("success_threshold", None),
            failure_threshold=parameters.get("failure_threshold", None),
            namespace=parameters.get("namespace", None),
            token_auth_enabled=parameters.get("token_auth_enabled", None))
    else:
        deployment_config = AciWebservice.deploy_configuration(
            cpu_cores=cpu_cores,
            memory_gb=memory_gb,
            tags=parameters.get("tags", None),
            properties=parameters.get("properties", None),
            description=parameters.get("description", None),
            location=parameters.get("location", None),
            auth_enabled=parameters.get("authentication_enabled", None),
            ssl_enabled=parameters.get("ssl_enabled", None),
            enable_app_insights=parameters.get("app_insights_enabled", None),
            ssl_cert_pem_file=parameters.get("ssl_cert_pem_file", None),
            ssl_key_pem_file=parameters.get("ssl_key_pem_file", None),
            ssl_cname=parameters.get("ssl_cname", None),
            dns_name_label=parameters.get("dns_name_label", None),
            primary_key=os.environ.get("PRIMARY_KEY", None),
            secondary_key=os.environ.get("SECONDARY_KEY", None),
            collect_model_data=parameters.get("model_data_collection_enabled",
                                              None),
            cmk_vault_base_url=os.environ.get("CMK_VAULT_BASE_URL", None),
            cmk_key_name=os.environ.get("CMK_KEY_NAME", None),
            cmk_key_version=os.environ.get("CMK_KEY_VERSION", None))

    # Deploying model
    print("::debug::Deploying model")
    try:
        # Default service name
        repository_name = os.environ.get("GITHUB_REPOSITORY").split("/")[-1]
        branch_name = os.environ.get("GITHUB_REF").split("/")[-1]
        default_service_name = f"{repository_name}-{branch_name}".lower(
        ).replace("_", "-")[:32]

        service = Model.deploy(workspace=ws,
                               name=parameters.get("name",
                                                   default_service_name),
                               models=[model],
                               inference_config=inference_config,
                               deployment_config=deployment_config,
                               deployment_target=deployment_target,
                               overwrite=True)
        service.wait_for_deployment(show_output=True)
    except WebserviceException as exception:
        print(f"::error::Model deployment failed with exception: {exception}")
        service_logs = service.get_logs()
        raise AMLDeploymentException(
            f"Model deployment failedlogs: {service_logs} \nexception: {exception}"
        )

    # Checking status of service
    print("::debug::Checking status of service")
    if service.state != "Healthy":
        service_logs = service.get_logs()
        print(
            f"::error::Model deployment failed with state '{service.state}': {service_logs}"
        )
        raise AMLDeploymentException(
            f"Model deployment failed with state '{service.state}': {service_logs}"
        )

    if parameters.get("test_enabled", False):
        # Testing service
        print("::debug::Testing service")
        root = os.environ.get("GITHUB_WORKSPACE", default=None)
        test_file_path = parameters.get("test_file_path", "code/test/test.py")
        test_file_function_name = parameters.get("test_file_function_name",
                                                 "main")

        print("::debug::Adding root to system path")
        sys.path.insert(1, f"{root}")

        print("::debug::Importing module")
        test_file_path = f"{test_file_path}.py" if not test_file_path.endswith(
            ".py") else test_file_path
        try:
            test_spec = importlib.util.spec_from_file_location(
                name="testmodule", location=test_file_path)
            test_module = importlib.util.module_from_spec(spec=test_spec)
            test_spec.loader.exec_module(test_module)
            test_function = getattr(test_module, test_file_function_name, None)
        except ModuleNotFoundError as exception:
            print(
                f"::error::Could not load python script in your repository which defines theweb service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
            raise AMLConfigurationException(
                f"Could not load python script in your repository which defines the web service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
        except FileNotFoundError as exception:
            print(
                f"::error::Could not load python script or function in your repository which defines the web service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
            raise AMLConfigurationException(
                f"Could not load python script or function in your repository which defines the web service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
        except AttributeError as exception:
            print(
                f"::error::Could not load python script or function in your repository which defines the web service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
            raise AMLConfigurationException(
                f"Could not load python script or function in your repository which defines the web service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )

        # Load experiment config
        print("::debug::Loading experiment config")
        try:
            test_function(service)
        except TypeError as exception:
            print(
                f"::error::Could not load experiment config from your module (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
            raise AMLConfigurationException(
                f"Could not load experiment config from your module (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
        except Exception as exception:
            print(
                f"::error::The webservice tests did not complete successfully: {exception}"
            )
            raise AMLDeploymentException(
                f"The webservice tests did not complete successfully: {exception}"
            )

    # Deleting service if desired
    if parameters.get("delete_service_after_deployment", False):
        service.delete()
    else:
        # Create outputs
        print("::debug::Creating outputs")
        print(f"::set-output name=service_scoring_uri::{service.scoring_uri}")
        print(f"::set-output name=service_swagger_uri::{service.swagger_uri}")
    print(
        "::debug::Successfully finished Azure Machine Learning Deploy Action")
model = None
runs = ws.experiments[experiment_name].get_runs()
run = next(runs)
while run.get_status() != "Completed" or model is None:
    run = next(runs)
    try:
        model = run.register_model(experiment_name, model_path="model")
    except:
        pass

# create deployment configuration
aks_config = AksWebservice.deploy_configuration(
    compute_target_name="aks-cpu-deploy",
    cpu_cores=2,
    memory_gb=5,
    tags={
        "data": "diabetes",
        "method": "sklearn"
    },
    description="Predict using webservice",
)

# create webservice
webservice, azure_model = mlflow.azureml.deploy(
    model_uri=f"runs:/{run.id}/model",
    workspace=ws,
    deployment_config=aks_config,
    service_name="sklearn-diabetes-" + str(randint(10000, 99999)),
    model_name="sklearn-diabetes-example",
)

# test webservice
Exemplo n.º 29
0
    print('Found existing compute target')
except ComputeTargetException:
    print('Creating a new compute target...')
    # Provision AKS cluster with GPU machine
    prov_config = AksCompute.provisioning_configuration(vm_size="Standard_NC6")

    # Create the cluster
    aks_target = ComputeTarget.create(workspace=ws,
                                      name=aks_name,
                                      provisioning_configuration=prov_config)

    aks_target.wait_for_completion(show_output=True)

# Define the deployment configuration
gpu_aks_config = AksWebservice.deploy_configuration(autoscale_enabled=False,
                                                    num_replicas=3,
                                                    cpu_cores=2,
                                                    memory_gb=4)

# Define the inference configuration
myenv = Environment.from_conda_specification(
    name="testEnv",
    file_path=
    "C:/Users/Danilo.Bento/Icon Dropbox/DEVDATA/RO/DEVELOPMENT/SIB2/tutorials/model5/mod5_deploy/deploy_env.yaml"
)

myenv.docker.base_image = DEFAULT_GPU_IMAGE
inference_config = InferenceConfig(  #entry_script=os.path.join(os.getenv('AZUREML_MODEL_DIR'), 'yolov5','score.py'),
    #entry_script="./yolov5/score.py",
    entry_script="score.py",
    environment=myenv,
    source_directory=
Exemplo n.º 30
0
def deploy():
    # Load credentials
    print("::debug::Loading azure credentials")
    with open('creds.json') as json_file:
        azure_credentials = json.load(json_file)

    #destribute credentials over variables
    tenant_id = azure_credentials['tenantId']
    app_id = azure_credentials['clientId']
    app_secret = azure_credentials['clientSecret']
    subscription_id = azure_credentials['subscriptionId']
    rm_endpoint = azure_credentials['resourceManagerEndpointUrl']

    #Load model name and model version
    print("::debug::Loading input values")
    model_name = 'newsletter-info'
    mv = '1'
    #convert into int
    print("::debug::Casting input values")
    try:
        model_version = int(mv)
    except TypeError as exception:
        print(f"::debug::Could not cast model version to int: {exception}")
        model_version = None

    cloud = "AzureCloud"

    # Authenticate Azure
    try:
        sp = ServicePrincipalAuthentication(
            tenant_id=tenant_id,
            service_principal_id=app_id,
            service_principal_password=app_secret,
            cloud=cloud)
    except AuthenticationException as exception:
        print(
            f"::error::Could not retrieve user token. Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS: {exception}"
        )
        raise AuthenticationException

    #Load workspace and resource group
    print("::debug::Loading Workspace values")
    ws_path = 'delphai-common-ml'
    resource_group = 'tf-ml-workspace'

    #Load Azure workspace
    try:
        ws = Workspace.get(name=ws_path,
                           auth=sp,
                           subscription_id=subscription_id,
                           resource_group=resource_group)
    except AuthenticationException as exception:
        print(
            f"::error::Could not retrieve user token. Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS: {exception}"
        )
        raise AuthenticationException

    #Load Model
    print("::debug::Loading model")
    try:
        model = Model(workspace=ws, name=model_name, version=model_version)
    except WebserviceException as exception:
        print(
            f"::error::Could not load model with provided details: {exception}"
        )
        raise AMLConfigurationException(
            f"Could not load model with provided details: {exception}")

    # Loading deployment target
    print("::debug::Loading deployment target")
    try:
        deployment_target = ComputeTarget(workspace=ws, name='delphai-common')
    except ComputeTargetException:
        deployment_target = None
    except TypeError:
        deployment_target = None

    # Loading entry and conda file
    source = 'tests/model'

    print("::debug::Loading entry_file & Conda file")
    entry_file = 'entry.py'
    entry_file_path = os.path.join(source, entry_file)

    conda_file = 'conda.yml'
    conda_ffile_path = os.path.join(source, conda_file)
    print(conda_ffile_path)

    try:
        env = Environment.from_conda_specification(name=model_name,
                                                   file_path=conda_ffile_path)
    except:
        print(
            f'::debug:: failed to create environment from {conda_ffile_path}')

    try:
        inference_configration = InferenceConfig(entry_script=entry_file_path,
                                                 environment=env)
    except:
        print(f'::debug:: Failed to create InferenceConfig')

        #print('::debug:: Make sure conda.yml and entry.py are in the [src] directory')

    print('::debug:: get namespace and replicas')
    replicas = os.environ.get('INPUT_REPLICAS') or '3'
    try:
        replicas = int(replicas)
    except TypeError as exception:
        print(f"::debug::Could not cast model version to int: {exception}")
        replicas = 3

    deployment_name = os.environ.get('INPUT_DEPLOYMENT_NAME',
                                     default=model_name.replace("_", "-"))
    create_namespace(app_id=app_id,
                     app_secret=app_secret,
                     tenant=tenant_id,
                     namespace=deployment_name)
    deployment_configration = AksWebservice.deploy_configuration(
        autoscale_enabled=False,
        num_replicas=replicas,
        namespace=deployment_name)

    # Deploying model
    print("::debug::Deploying model")
    override = os.environ.get('INPUT_OVERRIDE') or 'yes'
    if override == 'yes':
        override = True
    elif override == 'no':
        override = False
    try:
        service = Model.deploy(workspace=ws,
                               name=deployment_name,
                               models=[model],
                               inference_config=inference_configration,
                               deployment_config=deployment_configration,
                               deployment_target=deployment_target,
                               overwrite=override)
        service.wait_for_deployment(show_output=True)
    except WebserviceException as exception:
        print(f"::error::Model deployment failed with exception: {exception}")
        service_logs = service.get_logs()

    # Give Time to Ku8 to create PODS
    time.sleep(60)

    if service.state != "Healthy":
        try:
            service_logs = service.get_logs()
        except:
            print(
                f"::error::Model deployment Might be failied, Please check in lens for your deployments"
            )