def test_fpga_service(workspace):
    # Using the grpc client in Azure ML Accelerated Models SDK package
    aks_service_name = "my-aks-service"
    aks_service = AksWebservice(workspace=workspace, name=aks_service_name)
    client = FPGARealtimeScore.get_prediction_client(aks_service)

    # Score image with input and output tensor names
    input_tensors, output_tensors = FPGARealtimeScore.get_resnet50_IO()
    wget.download(
        "https://raw.githubusercontent.com/Azure/MachineLearningNotebooks/"
        "master/how-to-use-azureml/deployment/accelerated-models/snowleopardgaze.jpg"
    )

    results = client.score_file(path="snowleopardgaze.jpg",
                                input_name=input_tensors,
                                outputs=output_tensors)

    # map results [class_id] => [confidence]
    results = enumerate(results)
    # sort results by confidence
    sorted_results = sorted(results, key=lambda x: x[1], reverse=True)
    # print top 5 results
    classes_entries = requests.get(
        "https://raw.githubusercontent.com/Lasagne/Recipes/"
        "master/examples/resnet50/imagenet_classes.txt").text.splitlines()
    for top in sorted_results[:5]:
        print(classes_entries[top[0]], "confidence:", top[1])
Esempio n. 2
0
def connect_webservice(ws):
    with open('config/aks_config.json') as f:
        aks_config = json.load(f)

    aks_service = AksWebservice(ws, name=aks_config["name"])
    # print(aks_service.state)

    url = aks_service.scoring_uri
    key = aks_service.get_keys()[0]

    return url, key
Esempio n. 3
0
def test_deployed_model_service():
    service = AksWebservice(ws, deployment_name)
    assert service is not None

    key1, key2 = service.get_keys()
    uri = service.scoring_uri

    assert key1 is not None
    assert uri.startswith('http')

    headers = {
        'Content-Type': 'application/json',
        'Authorization': f'Bearer {key1}'
    }
    response = requests.post(uri, test_sample, headers=headers)
    assert response.status_code is 200
    assert abs(1 - sum(response.json()['predict_proba'][0])) < 0.01
def call_web_service(e, service_type, service_name):
    aml_workspace = Workspace.get(name=e.workspace_name,
                                  subscription_id=e.subscription_id,
                                  resource_group=e.resource_group)
    print("Fetching service")
    headers = {}
    if service_type == "ACI":
        service = AciWebservice(aml_workspace, service_name)
    else:
        service = AksWebservice(aml_workspace, service_name)
    if service.auth_enabled:
        service_keys = service.get_keys()
        headers['Authorization'] = 'Bearer ' + service_keys[0]
    print("Testing service")
    print(". url: %s" % service.scoring_uri)
    output = call_web_app(service.scoring_uri, headers)

    return output
def test_gpu_service(workspace):
    aks_service_name = "deepaksservice"

    assert aks_service_name in workspace.webservices, f"{aks_service_name} not found."
    aks_service = AksWebservice(workspace, name=aks_service_name)
    assert (aks_service.state == "Healthy"
            ), f"{aks_service_name} is in state {aks_service.state}."
    scoring_url = aks_service.scoring_uri
    print(scoring_url)
    api_key = aks_service.get_keys()[0]
    import requests

    headers = {"Authorization": ("Bearer " + api_key)}

    files = {"image": open("snowleopardgaze.jpg", "rb")}
    r_get = requests.get(scoring_url, headers=headers)
    assert r_get
    r_post = requests.post(scoring_url, files=files, headers=headers)
    assert r_post
Esempio n. 6
0
def call_web_service(e, service_type, service_name):
    aml_workspace = Workspace.get(name=e.workspace_name,
                                  subscription_id=e.subscription_id,
                                  resource_group=e.resource_group)
    print('fetching webservice')
    if service_type == 'AKS':
        service = AksWebservice(aml_workspace, service_name)
    elif service_type == 'ACI':
        service = AciWebservice(aml_workspace, service_name)
    else:
        raise ValueError(f'no {service_type} is supported!')

    headers = {}
    if service.auth_enabled:
        service_keys = service.get_keys()
        headers['Authorization'] = 'Bearer ' + service_keys[0]

    scoring_url = service.scoring_uri
    print(f'scoring url: {scoring_url}')
    output = call_web_app(scoring_url, headers)

    return output
Esempio n. 7
0
# Start creating
# Point file to conf directory containing details for the aml service
cli_auth = AzureCliAuthentication()
ws = Workspace(workspace_name=workspace,
               subscription_id=subscription_id,
               resource_group=resource_grp,
               auth=cli_auth)

model_list = Model.list(workspace=ws)
model, = (m for m in model_list
          if m.version == model_version and m.name == model_name)
print("Model picked: {} \nModel Description: {} \nModel Version: {}".format(
    model.name, model.description, model.version))

try:
    service = AksWebservice(name=service_name, workspace=ws)
    print("delete " + service_name + " before creating new one")
    service.delete()
except:
    print(service_name + " does not exist yet")

os.chdir("deploy")

# Add model name to scorefile
with open("scoreSparkTemplate.py") as fr:
    score = fr.read()

score = score.replace("{model_name}", model_name)
#
with open("scoreSpark.py", "w") as fw:
    fw.write(score)
    parser.add_argument(
        "--endpoint_name",
        type=str,
        default="triton-densenet-onnx",
        help="name of the endpoint to test",
    )
    parser.add_argument(
        "--data_file",
        type=str,
        default="../../data/raw/triton/peacock.jpg",
        help="filename to run through the classifier",
    )
    args = parser.parse_args()

    ws = Workspace.from_config()
    aks_service = AksWebservice(ws, args.endpoint_name)

    # if (key) auth is enabled, fetch keys and include in the request
    key1, _ = aks_service.get_keys()

    headers = {
        "Content-Type": "application/octet-stream",
        "Authorization": "Bearer " + key1,
    }

    file_name = os.path.join(
        os.path.abspath(os.path.dirname(__file__)),
        "..",
        "data",
        args.data_file,
    )
Esempio n. 9
0
    aks_test_cluster.wait_for_completion(show_output=True)

# Checking status of Test AKS Cluster
print("Checking status of Test AKS Cluster")
if aks_test_cluster.provisioning_state == "Failed":
    aks_test_cluster.delete()
    raise Exception(
        "Deployment of Test AKS Cluster failed with the following status: {} and logs: \n {}"
        .format(aks_test_cluster.provisioning_state,
                aks_test_cluster.provisioning_errors))

# Deploying model on test AKS
print("Deploying model on Test AKS")
try:
    print("Trying to update existing AKS test service")
    test_service = AksWebservice(workspace=ws,
                                 name=aks_service_settings["name"])
    test_service.update(
        image=image,
        autoscale_enabled=aks_service_settings["autoscale_enabled"],
        autoscale_min_replicas=aks_service_settings["autoscale_min_replicas"],
        autoscale_max_replicas=aks_service_settings["autoscale_max_replicas"],
        autoscale_refresh_seconds=aks_service_settings[
            "autoscale_refresh_seconds"],
        autoscale_target_utilization=aks_service_settings[
            "autoscale_target_utilization"],
        collect_model_data=aks_service_settings["collect_model_data"],
        auth_enabled=aks_service_settings["auth_enabled"],
        cpu_cores=profiling_result["cpu"],
        memory_gb=profiling_result["memory"],
        enable_app_insights=aks_service_settings["enable_app_insights"],
        scoring_timeout_ms=aks_service_settings["scoring_timeout_ms"],
Esempio n. 10
0
                         data = json_data,
                         headers = request_headers)

# Get the predictions from the JSON response
predictions = json.loads(response.json())

# Print the predicted class for each case.
for i in range(len(x_new)):
    print (x_new[i]), predictions[i] )


#check service state
from azureml.core.webservice import AksWebservice

# Get the deployed service
service = AksWebservice(name='classifier-service', workspace=ws)

# Check its state
print(service.state)   

#review service logs
print(service.get_logs())


#deploy to local container to diagnose probs
from azureml.core.webservice import LocalWebservice

deployment_config = LocalWebservice.deploy_configuration(port=8890)
service = Model.deploy(ws, 'test-svc', [model], inference_config, deployment_config)

#test deployed service
Esempio n. 11
0
print("Opening Rose1.jpg...")
file_name = "./resources/test-images/Rose1.jpg"

t = read_tensor_from_image_file(file_name,
                                input_height=input_height,
                                input_width=input_width,
                                input_mean=input_mean,
                                input_std=input_std)

print("Predicting...")
predict_flower(t)

service_name = 'flower-photos-svc'
try:
    service = AksWebservice(ws, service_name)
except WebserviceException:
    print("Registering the model...")
    model_graph_name = "flower_photos_graph"
    model_labels_name = "flower_photos_labels"

    model_graph = Model.register(
        model_path=model_file,
        model_name=model_graph_name,
        tags={
            "data": "flower_photos",
            "model": "classification"
        },
        description="Retrained Inception V3 model with flower photos",
        workspace=ws)
import json
import pickle
import numpy as np
import pandas as pd
from azureml.core.workspace import Workspace
import azureml.train.automl
from sklearn.externals import joblib
from azureml.core.model import Model

ws = Workspace.from_config('./config.json')

from azureml.core.webservice import Webservice, AciWebservice, AksWebservice
# service = AciWebservice(ws, "sentiment-scorer-korean")
# service = AksWebservice(ws, "sentiment-scorer-korean-aks")
service = AksWebservice(ws, "sentiment-scorer-korean-aks-pr")

# input_sample = pd.DataFrame({'id': pd.Series(['6471903'], dtype='int64'), 'document': pd.Series(['진짜 별로다 헐 ㅡ'], dtype='object')})
from load_dataset import testdata as input_sample

import json
test = json.dumps({"data": input_sample.values.tolist()})
result = service.run(input_data=bytes(test, encoding="utf8"))

# input_sample['predicted'] = list(json.loads(result).values())[0]
# print(input_sample)

print(json.loads(result).values())
with open('./output.json', 'w') as f:
    f.write(result)
## Remember to open the output with proper encoding (for example UTF-8)
Esempio n. 13
0
def main():
    # Parse command line arguments
    args = parse_args(sys.argv[1:])

    # Retreive workspace
    workspace = Workspace.get(
        subscription_id=args.subscription_id,
        resource_group=args.resource_group,
        name=args.workspace_name,
    )

    # Retreive compute cluster
    compute_target = workspace.compute_targets[args.compute_target]

    # Get baseline dataset
    baseline_dataset = Dataset.get_by_name(workspace, args.baseline_dataset_name)

    # Get model id and version
    model_name, model_version = args.model_id.split(":")

    # Get AKS Endpoint
    aks_endpoint = AksWebservice(workspace, args.endpoint_name)

    # Make call to endpoint with sample data and wait for the data to arrive in the storage account
    # [Note: this step is required to ensure a data sample is present for validation when
    # registering a new target dataset below - this can take up to 10 mins to appear]
    input_record = (
        baseline_dataset.take(1)
        .to_pandas_dataframe()
        .drop(["cardiovascular_disease", "datetime"], axis=1)
        .to_dict("records")
    )

    input_data = json.dumps({"data": input_record})

    print("Variable [input_data]:", input_data)

    aks_endpoint.run(input_data)
    time.sleep(600)

    # Define target dataset
    target_dataset_name = (
        f"inference-data-{model_name}-{model_version}-{args.endpoint_name}"
    )

    # Get current registered target dataset definition
    current_target_dataset = Dataset.get_by_name(workspace, name=target_dataset_name)
    current_target_dataset_definition = json.loads(current_target_dataset._definition)

    # Get current registered target dataset datasetore definition
    current_target_dataset_datastore_definition = current_target_dataset_definition[
        "blocks"
    ][0]["arguments"]["datastores"][0]

    # Define current registered target dataset datasetore
    target_dataset_datastore = Datastore(
        workspace, current_target_dataset_datastore_definition["datastoreName"]
    )

    # Define current registered target dataset datasetore path
    target_dataset_datastore_path = current_target_dataset_datastore_definition["path"]

    # Create updated target dataset with non-string feature data types
    target_dataset = Dataset.Tabular.from_delimited_files(
        path=(target_dataset_datastore, target_dataset_datastore_path),
        validate=False,
        infer_column_types=False,
        set_column_types={
            "age": DataType.to_float(decimal_mark="."),
            "height": DataType.to_float(decimal_mark="."),
            "weight": DataType.to_float(decimal_mark="."),
            "systolic": DataType.to_float(decimal_mark="."),
            "diastolic": DataType.to_float(decimal_mark="."),
            "gender": DataType.to_string(),
            "cholesterol": DataType.to_string(),
            "glucose": DataType.to_string(),
            "smoker": DataType.to_string(),
            "alcoholic": DataType.to_string(),
            "active": DataType.to_string(),
            "datetime": DataType.to_datetime(),
        },
    )

    # Assign timestamp column for Tabular Dataset to activate time series related APIs
    target_dataset = target_dataset.with_timestamp_columns(
        timestamp=target_dataset_timestamp_column
    )

    # Register updated dataset version
    target_dataset.register(
        workspace, name=target_dataset_name, create_new_version=True
    )

    print("Variable [target_dataset]:", target_dataset)
    print("Variable [baseline_dataset]:", baseline_dataset)

    # Define features to monitor
    feature_list = args.feature_list.split(",")

    print("Variable [feature_list]:", args.feature_list)

    # List data drift detectors
    drift_detector_list = DataDriftDetector.list(workspace)

    # Delete existing data drift detector
    for drift_monitor in drift_detector_list:
        if drift_monitor.name == args.data_drift_monitor_name:
            print("Deleteing existing data drift monitor...")
            drift_monitor.delete()

    # Define data drift detector
    monitor = DataDriftDetector.create_from_datasets(
        workspace,
        args.data_drift_monitor_name,
        baseline_dataset,
        target_dataset,
        compute_target=compute_target,
        frequency=args.frequency,
        feature_list=feature_list,
    )

    print("Variable [monitor]:", monitor)

    # Enable the pipeline schedule for the data drift detector
    monitor.enable_schedule()
base_dir = "."

config_json = os.path.join(base_dir, "config.json")
with open(config_json, "r") as f:
    config = json.load(f)

auth = ServicePrincipalAuthentication(
    tenant_id=config["tenant_id"],
    service_principal_id=config["service_principal_id"],
    service_principal_password=config["service_principal_password"],
)

# Get workspace
ws = Workspace.from_config(auth=auth)

service = AksWebservice(ws, "videoanom-service")

# load the dataset
X_test_file = os.path.join('.', 'deployment', 'test_data', 'X_test.hkl')
y_test_file = os.path.join('.', 'deployment', 'test_data', 'y_test.hkl')
X_test = hkl.load(X_test_file)
y_test = hkl.load(y_test_file)

json_data = json.dumps({"data": X_test.tolist(), "id": "UCSDped1"})
json_data = bytes(json_data, encoding='utf8')

print("Service URL:", service.scoring_uri)

try:
    prediction = service.run(json_data)
except Exception as e: