Example #1
0
def call_web_service(e, service_type, service_name):
    aml_workspace = Workspace.get(name=e.workspace_name,
                                  subscription_id=e.subscription_id,
                                  resource_group=e.resource_group)
    print("Fetching service")
    headers = {}
    if service_type == "ACI":
        service = AciWebservice(aml_workspace, service_name)
    else:
        service = AksWebservice(aml_workspace, service_name)
    if service.auth_enabled:
        service_keys = service.get_keys()
        headers['Authorization'] = 'Bearer ' + service_keys[0]
    print("Testing service")
    print(". url: %s" % service.scoring_uri)
    output = call_web_app(service.scoring_uri, headers)

    return output
Example #2
0
def call_web_service(e, service_type, service_name):
    aml_workspace = Workspace.get(name=e.workspace_name,
                                  subscription_id=e.subscription_id,
                                  resource_group=e.resource_group)
    print('fetching webservice')
    if service_type == 'AKS':
        service = AksWebservice(aml_workspace, service_name)
    elif service_type == 'ACI':
        service = AciWebservice(aml_workspace, service_name)
    else:
        raise ValueError(f'no {service_type} is supported!')

    headers = {}
    if service.auth_enabled:
        service_keys = service.get_keys()
        headers['Authorization'] = 'Bearer ' + service_keys[0]

    scoring_url = service.scoring_uri
    print(f'scoring url: {scoring_url}')
    output = call_web_app(scoring_url, headers)

    return output
Example #3
0
def deploy():
    parser = argparse.ArgumentParser()
    parser.add_argument("--model_name", required=True)
    parser.add_argument("--model_path", required=True)
    args = parser.parse_args()

    print(f"model_name : {args.model_name}")
    print(f"model_path: {args.model_path}")

    run = Run.get_context()
    ws = run.experiment.workspace

    model = Model.register(workspace=ws,
                           model_path=args.model_path,
                           model_name=args.model_name)

    print("Registered version {0} of model {1}".format(model.version,
                                                       model.name))

    inference_config = InferenceConfig(
        entry_script='score.py',
        runtime='python',
        conda_file='conda.yml',
        extra_docker_file_steps='extra_docker_steps',
        source_directory='server_files/')
    deployment_config = AciWebservice.deploy_configuration(cpu_cores=0.1,
                                                           memory_gb=0.5,
                                                           auth_enabled=True)

    try:
        service = AciWebservice(ws, "testscorescriptauto")
        service.update(models=[model])
        print("EXISTING ENDPOINT FOUND: MODEL UPDATED")
    except Exception:
        Model.deploy(ws, "testscorescriptauto", [model], inference_config,
                     deployment_config)
        print("NO EXISTING ENDPOINT FOUND: DEPLOYED MODEL TO NEW ENDPOINT")
Example #4
0
config_file_path = os.environ.get("GITHUB_WORKSPACE", default="aml_service")
config_file_name = "aml_arm_config.json"
ws = Workspace.from_config(path=config_file_path,
                           auth=cli_auth,
                           _file_name=config_file_name)
print(ws.name, ws.resource_group, ws.location, ws.subscription_id, sep='\n')

# Loading Image
image_details = profiling_result["image_id"].split(":")
image = Image(workspace=ws, name=image_details[0], version=image_details[1])

# Deploying model on ACI
print("Deploying model on ACI")
try:
    print("Trying to update existing ACI service")
    dev_service = AciWebservice(workspace=ws, name=aci_settings["name"])
    dev_service.update(image=image,
                       tags=deployment_settings["image"]["tags"],
                       properties=deployment_settings["image"]["properties"],
                       description=deployment_settings["image"]["description"],
                       auth_enabled=aci_settings["auth_enabled"],
                       ssl_enabled=aci_settings["ssl_enabled"],
                       ssl_cert_pem_file=aci_settings["ssl_cert_pem_file"],
                       ssl_key_pem_file=aci_settings["ssl_key_pem_file"],
                       ssl_cname=aci_settings["ssl_cname"],
                       enable_app_insights=aci_settings["enable_app_insights"])
    print("Successfully updated existing ACI service")
except WebserviceException:
    print("Failed to update ACI service... Creating new ACI instance")
    aci_config = AciWebservice.deploy_configuration(
        cpu_cores=profiling_result["cpu"],
import json
import pickle
import numpy as np
import pandas as pd
from azureml.core.workspace import Workspace
import azureml.train.automl
from sklearn.externals import joblib
from azureml.core.model import Model

ws = Workspace.from_config('./config.json')

from azureml.core.webservice import Webservice, AciWebservice, AksWebservice
service = AciWebservice(ws, "sentiment-scorer-korean")
# service = AksWebservice(ws, "sentiment-scorer-korean-aks")

# input_sample = pd.DataFrame({'id': pd.Series(['6471903'], dtype='int64'), 'document': pd.Series(['진짜 별로다 헐 ㅡ'], dtype='object')})
from load_dataset import testdata as input_sample

import json
test = json.dumps({"data": input_sample.values.tolist()})
result = service.run(input_data=bytes(test, encoding="utf8"))

input_sample['predicted'] = list(json.loads(result).values())[0]
print(input_sample)
    raise Exception('Image creation status: {image.creation_state}')

print('{}(v.{} [{}]) stored at {} with build log {}'.format(
    image.name, image.version, image.creation_state, image.image_location,
    image.image_build_log_uri))

aciconfig = AciWebservice.deploy_configuration(
    cpu_cores=1,
    memory_gb=1,
    tags={'area': "visual object classificiation"},
    description='A sample description')

aci_service_name = 'aciwebservice' + datetime.datetime.now().strftime('%m%d%H')

try:
    service = AciWebservice(ws, aci_service_name)
    service.delete()
except Exception as e:
    print(e)
    pass

service = Webservice.deploy_from_image(deployment_config=aciconfig,
                                       image=image,
                                       name=aci_service_name,
                                       workspace=ws)

service.wait_for_deployment()

# print(service.get_logs())

print('Deployed ACI Webservice: {} \nWebservice Uri: {}'.format(
Example #7
0
# Get workspace
ws = Workspace.from_config()

# Get the ACI Details
try:
    with open("aml_config/aci_webservice.json") as f:
        config = json.load(f)
except:
    print('No new model, thus no deployment on ACI')
    #raise Exception('No new model to register as production model perform better')
    sys.exit(0)

service_name = config['aci_name']
print("Service :", service_name)
# Get the hosted web service
service = AciWebservice(ws, service_name)

# Input for Model with all features
# load the dataset
test_file = os.path.join('..', 'data', 'preprocessed', 'X_test.hkl')
X = hkl.load(test_file)
X_test = X[:10]

json_data = json.dumps({"data": X_test.tolist()})
json_data = bytes(json_data, encoding='utf8')

print("Service URL:", service.scoring_uri)

try:
    prediction = service.run(json_data)
    print(prediction)
Example #8
0
# Initialize a Workspace object from the existing workspace you created in
# the Prerequisites step. Workspace.from_config() creates a workspace object from the details stored in config.json.
from azureml.core.workspace import Workspace

import os
os.chdir('VS_code/deep_learning/')

# make sure that you followed the instructions in the readme file to create the config.json file for this step
ws = Workspace.from_config()

from azureml.core.webservice import AciWebservice

service_name = 'aci-hymenoptera'
service = AciWebservice(workspace=ws, name=service_name)

print(service.serialize)

print(service.state)

# If your deployment fails for any reason and you need to redeploy, make sure to delete the service before you do so: service.delete()

# Tip: If something goes wrong with the deployment, the first thing to look at is the logs from the service by running the following command:

service.get_logs()

# Get the web service's HTTP endpoint, which accepts REST client calls. This endpoint can be shared with anyone who wants to test the web service or integrate it into an application.

print(service.scoring_uri)

# Test the web service