Example #1
0
    def _predict_remotely(
        self, ws, experiment, predict_data, model_id, threshold):
        input_payload = predict_data.to_json(orient='split', index = False)

        remote_run = AutoMLRun(experiment = experiment, run_id = model_id)
        model_name = remote_run.properties['model_name']
        aci_service_name = self._aci_service_name(model_name)
        aci_service = AciWebservice(ws, aci_service_name)

        input_payload = json.loads(input_payload)
        # If you have a classification model, you can get probabilities by changing this to 'predict_proba'.        
        method = 'predict'
        if threshold is not None:
            method = 'predict_proba'
        input_payload = {
            'method': method,
            'data': input_payload['data']
        }
        input_payload = json.dumps(input_payload)
        try:
            response = aci_service.run(input_data = input_payload)
            print(response)
        except Exception as e:
            print('err log', aci_service.get_logs())
            raise e

        results_proba = None
        proba_classes = None

        return json.loads(response)['result'], results_proba, proba_classes
Example #2
0
    def _predict_remotely(self, predict_data, model_id, predict_proba):
        from azureml.core.webservice import AciWebservice
        from azureml.train.automl.run import AutoMLRun
        from azureml.core.run import Run

        import numpy as np

        ws, experiment = self._get_experiment()

        model_features = None
        target_categories = None

        remote_run = AutoMLRun(experiment = experiment, run_id = model_id)
        model_features, target_categories = self._get_remote_model_features(remote_run)
        if model_id.startswith("AutoML_"):
            model_name = remote_run.properties['model_name']
        else:
            model_name = model_id

        if model_features:
            predict_data = predict_data[model_features]

        input_payload = predict_data.to_json(orient='split', index = False)

        aci_service_name = self._aci_service_name(model_name)
        aci_service = AciWebservice(ws, aci_service_name)

        input_payload = json.loads(input_payload)
        # If you have a classification model, you can get probabilities by changing this to 'predict_proba'.
        method = 'predict'
        if predict_proba:
            method = 'predict_proba'
        input_payload = {
            'data': {'data': input_payload['data'], 'method': method}
        }
        input_payload = json.dumps(input_payload)
        try:
            response = aci_service.run(input_data = input_payload)
        except Exception as e:
            log_file = 'automl_errors.log'
            fsclient.write_text_file(log_file, aci_service.get_logs(), mode="a")
            raise AzureException("Prediction service error. Please redeploy the model. Log saved to file '%s'. Details: %s"%(log_file, str(e)))

        response = json.loads(response)
        if "error" in response or not 'result' in response:
            raise AzureException('Prediction service return error: %s'%response.get('error'))

        results_proba = None
        proba_classes = None
        results = response['result']
        if predict_proba:
            results_proba = results
            proba_classes = response['proba_classes']
            results_proba = np.array(results_proba)

        return results, results_proba, proba_classes, target_categories
Example #3
0
# Show output of the deployment on stdout
dev_service.wait_for_deployment(show_output=True)
print("State of Service: {}".format(dev_service.state))

# Checking status of web service
print("Checking status of ACI Dev Deployment")
if dev_service.state != "Healthy":
    raise Exception(
        "Dev Deployment on ACI failed with the following status: {} and logs: \n{}"
        .format(dev_service.state, dev_service.get_logs()))

# Testing ACI web service
print("Testing ACI web service")
test_sample = test_functions.get_test_data_sample()
print("Test Sample: ", test_sample)
test_sample_encoded = bytes(test_sample, encoding='utf8')
try:
    prediction = dev_service.run(input_data=test_sample)
    print(prediction)
except Exception as e:
    result = str(e)
    logs = dev_service.get_logs()
    dev_service.delete()
    raise Exception(
        "ACI Dev web service is not working as expected: \n{} \nLogs: \n{}".
        format(result, logs))

# Delete aci after test
print("Deleting ACI Dev web service after successful test")
dev_service.delete()
import json
import pickle
import numpy as np
import pandas as pd
from azureml.core.workspace import Workspace
import azureml.train.automl
from sklearn.externals import joblib
from azureml.core.model import Model

ws = Workspace.from_config('./config.json')

from azureml.core.webservice import Webservice, AciWebservice, AksWebservice
service = AciWebservice(ws, "sentiment-scorer-korean")
# service = AksWebservice(ws, "sentiment-scorer-korean-aks")

# input_sample = pd.DataFrame({'id': pd.Series(['6471903'], dtype='int64'), 'document': pd.Series(['진짜 별로다 헐 ㅡ'], dtype='object')})
from load_dataset import testdata as input_sample

import json
test = json.dumps({"data": input_sample.values.tolist()})
result = service.run(input_data=bytes(test, encoding="utf8"))

input_sample['predicted'] = list(json.loads(result).values())[0]
print(input_sample)
Example #5
0
    with open("aml_config/aci_webservice.json") as f:
        config = json.load(f)
except:
    print('No new model, thus no deployment on ACI')
    #raise Exception('No new model to register as production model perform better')
    sys.exit(0)

service_name = config['aci_name']
print("Service :", service_name)
# Get the hosted web service
service = AciWebservice(ws, service_name)

# Input for Model with all features
# load the dataset
test_file = os.path.join('..', 'data', 'preprocessed', 'X_test.hkl')
X = hkl.load(test_file)
X_test = X[:10]

json_data = json.dumps({"data": X_test.tolist()})
json_data = bytes(json_data, encoding='utf8')

print("Service URL:", service.scoring_uri)

try:
    prediction = service.run(json_data)
    print(prediction)
except Exception as e:
    result = str(e)
    print(result)
    raise Exception('ACI service is not working as expected')
Example #6
0
import torch
from torchvision import transforms


def preprocess(image_file):
    """Preprocess the input image."""
    data_transforms = transforms.Compose([
        transforms.Resize(256),
        transforms.CenterCrop(224),
        transforms.ToTensor(),
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    ])

    image = Image.open(image_file)
    image = data_transforms(image).float()
    image = image.clone().detach()
    image = image.unsqueeze(0)
    return image.numpy()


input_data = preprocess('test_img.jpg')
result = service.run(input_data=json.dumps({'data': input_data.tolist()}))
print(result)

# Clean up

# Once you no longer need the web service, you can delete it with a simple API call.

service.delete()