def test_service(service: AksWebservice,
                 container: str,
                 blob: str,
                 write_logs: bool = True) -> None:
    if write_logs:
        logs = service.get_logs()
        with open("logs.txt", "w") as fp:
            fp.write(logs)
    data = {"container": container, "blob": blob}
    data_raw = bytes(json.dumps({"data": data}), encoding="utf8")
    print("Testing service: {0}".format(service.name))
    print("Container: {0}, blob: {1}".format(container, blob))
    ping = time.time()
    response = service.run(input_data=data_raw)
    print("Elapsed time: {0:.5f}".format(time.time() - ping))
    print("Response: {0}".format(response))
Esempio n. 2
0
        deployment_target=aks_test_cluster)
# Show output of the deployment on stdout
test_service.wait_for_deployment(show_output=True)
print(test_service.state)

# Checking status of test web service
print("Checking status of AKS Test Deployment")
if test_service.state != "Healthy":
    raise Exception(
        "Test Deployment on AKS failed with the following status: {} and logs: \n{}"
        .format(test_service.state, test_service.get_logs()))

# Testing AKS web service
print("Testing AKS test web service")
test_sample = test_functions.get_test_data_sample()
print("Test Sample: ", test_sample)
test_sample_encoded = bytes(test_sample, encoding='utf8')
try:
    prediction = test_service.run(input_data=test_sample)
    print(prediction)
except Exception as e:
    result = str(e)
    logs = test_service.get_logs()
    test_service.delete()
    raise Exception(
        "AKS Test web service is not working as expected: \n{} \nLogs: \n{}".
        format(result, logs))

# Delete test AKS service after test
print("Deleting AKS Test web service after successful test")
test_service.delete()
Esempio n. 3
0
# Check its state
print(service.state)   

#review service logs
print(service.get_logs())


#deploy to local container to diagnose probs
from azureml.core.webservice import LocalWebservice

deployment_config = LocalWebservice.deploy_configuration(port=8890)
service = Model.deploy(ws, 'test-svc', [model], inference_config, deployment_config)

#test deployed service
print(service.run(input_data = json_data))

#troubleshoot issues by changing scoring file
#then reload wo having to redeploy
service.reload()
print(service.run(input_data = json_data))



##EXERCISE

#connect to ws
import azureml.core
from azureml.core import Workspace

# Load the workspace from the saved config file
Esempio n. 4
0
    for f in fnames:
        file_name = os.path.join(dirpath, f)

        # load image
        print("Loading image", file_name)

        data = read_tensor_from_image_file(file_name,
                                           input_height=input_height,
                                           input_width=input_width,
                                           input_mean=input_mean,
                                           input_std=input_std)
        raw_data = str(data.tolist())

        # predict using the deployed model
        print("Sending image", f, "to service")
        response = service.run(input_data=raw_data)
        print("Service response:", response)
        print()

print("Testings web service via HTTP call...")
api_keys = service.get_keys()
headers = {
    'Content-Type': 'application/json',
    'Authorization': ('Bearer ' + api_keys[0])
}

file_name = "./resources/test-images/Daisy1.jpg"
data = read_tensor_from_image_file(file_name,
                                   input_height=input_height,
                                   input_width=input_width,
                                   input_mean=input_mean,
import json
import pickle
import numpy as np
import pandas as pd
from azureml.core.workspace import Workspace
import azureml.train.automl
from sklearn.externals import joblib
from azureml.core.model import Model

ws = Workspace.from_config('./config.json')

from azureml.core.webservice import Webservice, AciWebservice, AksWebservice
# service = AciWebservice(ws, "sentiment-scorer-korean")
# service = AksWebservice(ws, "sentiment-scorer-korean-aks")
service = AksWebservice(ws, "sentiment-scorer-korean-aks-pr")

# input_sample = pd.DataFrame({'id': pd.Series(['6471903'], dtype='int64'), 'document': pd.Series(['진짜 별로다 헐 ㅡ'], dtype='object')})
from load_dataset import testdata as input_sample

import json
test = json.dumps({"data": input_sample.values.tolist()})
result = service.run(input_data=bytes(test, encoding="utf8"))

# input_sample['predicted'] = list(json.loads(result).values())[0]
# print(input_sample)

print(json.loads(result).values())
with open('./output.json', 'w') as f:
    f.write(result)
## Remember to open the output with proper encoding (for example UTF-8)
Esempio n. 6
0
def main():
    # Parse command line arguments
    args = parse_args(sys.argv[1:])

    # Retreive workspace
    workspace = Workspace.get(
        subscription_id=args.subscription_id,
        resource_group=args.resource_group,
        name=args.workspace_name,
    )

    # Retreive compute cluster
    compute_target = workspace.compute_targets[args.compute_target]

    # Get baseline dataset
    baseline_dataset = Dataset.get_by_name(workspace, args.baseline_dataset_name)

    # Get model id and version
    model_name, model_version = args.model_id.split(":")

    # Get AKS Endpoint
    aks_endpoint = AksWebservice(workspace, args.endpoint_name)

    # Make call to endpoint with sample data and wait for the data to arrive in the storage account
    # [Note: this step is required to ensure a data sample is present for validation when
    # registering a new target dataset below - this can take up to 10 mins to appear]
    input_record = (
        baseline_dataset.take(1)
        .to_pandas_dataframe()
        .drop(["cardiovascular_disease", "datetime"], axis=1)
        .to_dict("records")
    )

    input_data = json.dumps({"data": input_record})

    print("Variable [input_data]:", input_data)

    aks_endpoint.run(input_data)
    time.sleep(600)

    # Define target dataset
    target_dataset_name = (
        f"inference-data-{model_name}-{model_version}-{args.endpoint_name}"
    )

    # Get current registered target dataset definition
    current_target_dataset = Dataset.get_by_name(workspace, name=target_dataset_name)
    current_target_dataset_definition = json.loads(current_target_dataset._definition)

    # Get current registered target dataset datasetore definition
    current_target_dataset_datastore_definition = current_target_dataset_definition[
        "blocks"
    ][0]["arguments"]["datastores"][0]

    # Define current registered target dataset datasetore
    target_dataset_datastore = Datastore(
        workspace, current_target_dataset_datastore_definition["datastoreName"]
    )

    # Define current registered target dataset datasetore path
    target_dataset_datastore_path = current_target_dataset_datastore_definition["path"]

    # Create updated target dataset with non-string feature data types
    target_dataset = Dataset.Tabular.from_delimited_files(
        path=(target_dataset_datastore, target_dataset_datastore_path),
        validate=False,
        infer_column_types=False,
        set_column_types={
            "age": DataType.to_float(decimal_mark="."),
            "height": DataType.to_float(decimal_mark="."),
            "weight": DataType.to_float(decimal_mark="."),
            "systolic": DataType.to_float(decimal_mark="."),
            "diastolic": DataType.to_float(decimal_mark="."),
            "gender": DataType.to_string(),
            "cholesterol": DataType.to_string(),
            "glucose": DataType.to_string(),
            "smoker": DataType.to_string(),
            "alcoholic": DataType.to_string(),
            "active": DataType.to_string(),
            "datetime": DataType.to_datetime(),
        },
    )

    # Assign timestamp column for Tabular Dataset to activate time series related APIs
    target_dataset = target_dataset.with_timestamp_columns(
        timestamp=target_dataset_timestamp_column
    )

    # Register updated dataset version
    target_dataset.register(
        workspace, name=target_dataset_name, create_new_version=True
    )

    print("Variable [target_dataset]:", target_dataset)
    print("Variable [baseline_dataset]:", baseline_dataset)

    # Define features to monitor
    feature_list = args.feature_list.split(",")

    print("Variable [feature_list]:", args.feature_list)

    # List data drift detectors
    drift_detector_list = DataDriftDetector.list(workspace)

    # Delete existing data drift detector
    for drift_monitor in drift_detector_list:
        if drift_monitor.name == args.data_drift_monitor_name:
            print("Deleteing existing data drift monitor...")
            drift_monitor.delete()

    # Define data drift detector
    monitor = DataDriftDetector.create_from_datasets(
        workspace,
        args.data_drift_monitor_name,
        baseline_dataset,
        target_dataset,
        compute_target=compute_target,
        frequency=args.frequency,
        feature_list=feature_list,
    )

    print("Variable [monitor]:", monitor)

    # Enable the pipeline schedule for the data drift detector
    monitor.enable_schedule()
ws = Workspace.from_config(auth=auth)

service = AksWebservice(ws, "videoanom-service")

# load the dataset
X_test_file = os.path.join('.', 'deployment', 'test_data', 'X_test.hkl')
y_test_file = os.path.join('.', 'deployment', 'test_data', 'y_test.hkl')
X_test = hkl.load(X_test_file)
y_test = hkl.load(y_test_file)

json_data = json.dumps({"data": X_test.tolist(), "id": "UCSDped1"})
json_data = bytes(json_data, encoding='utf8')

print("Service URL:", service.scoring_uri)

try:
    prediction = service.run(json_data)
except Exception as e:
    result = str(e)
    print(result)
    raise Exception('web service is not working as expected')

cm = confusion_matrix(y_test.tolist(), prediction)
acc = accuracy_score(y_test.tolist(), prediction)

print("Accuracy:", acc)
print("Confusion Matrix:\n", cm)

if acc < .80:
    raise Exception("The accuracy of this service is pretty low!")