Example #1
0
            def register_model(model_name, model_path):
                model_config = next(
                    iter(
                        filter(lambda x: x["name"] == model_name,
                               self.output_reg_models)))

                tags = model_config.get("tags")
                description = model_config.get("description")

                Model.register(workspace=ws,
                               model_path=model_path,
                               model_name=model_name,
                               tags=tags,
                               description=description)
Example #2
0
def upload_model(ws: Workspace, config: MoveModelConfig) -> Model:
    """
    Uploads an InnerEye model to an AzureML workspace
    :param ws: The AzureML workspace
    :param config: move config
    :return: imported Model
    """
    model_path, environment_path = config.get_paths()
    with open(model_path / MODEL_JSON, 'r') as f:
        model_dict = json.load(f)

    # Find the folder containing the final model.
    final_model_path = model_path / FINAL_MODEL_FOLDER
    full_model_path = final_model_path if final_model_path.exists(
    ) else model_path / FINAL_ENSEMBLE_MODEL_FOLDER

    new_model = Model.register(ws,
                               model_path=str(full_model_path),
                               model_name=model_dict['name'],
                               tags=model_dict['tags'],
                               properties=model_dict['properties'],
                               description=model_dict['description'])
    env = Environment.load_from_directory(str(environment_path))
    env.register(workspace=ws)
    print(f"Environment {env.name} registered")
    return new_model
Example #3
0
    def _register(self, run_id, asset_name, asset_label=None):
        """
        This method register a new model as a AML reference.
        :param run_id: run's identifier
        :param asset_name: name of asset of the current run
        :param asset_label: name of label of the current run
        :return: None
        """
        tags = {"run_id": run_id, "asset_name": asset_name}

        if asset_label is not None:
            tags["asset_label"] = asset_label

        register_path = os.path.join(OUTPUTS_FOLDER, AML_MLAPP_FOLDER)
        Model.register(self.ws,
                       model_path=register_path,
                       model_name=get_model_register_name(run_id),
                       tags=tags,
                       description=asset_name)
Example #4
0
    def register(self, validated_model_folder, registered_model_folder,
                 azure_ml_logs_provider, web_service_deployer):

        IGNORE_TRAIN_STEP = azure_ml_logs_provider.get_tag_from_brother_run(
            "prep_data.py", "IGNORE_TRAIN_STEP")
        if IGNORE_TRAIN_STEP == True:
            print("Ignore register step")
            self._execute_sampling_pipeline()
            print("launch sampling state")
            return

        _, classifier = disc_network()
        classifier_name = "classifier.hdf5"
        validated_model_file = os.path.join(validated_model_folder,
                                            classifier_name)
        classifier.load_weights(validated_model_file)

        self.run.upload_file(name=self.config.MODEL_NAME,
                             path_or_stream=validated_model_file)

        #_ = self.run.register_model(model_name=self.config.MODEL_NAME,
        #                        tags={'Training context':'Pipeline'},
        #                        model_path=validated_model_file)

        Model.register(workspace=self.run.experiment.workspace,
                       model_path=validated_model_file,
                       model_name=self.config.MODEL_NAME,
                       tags={'Training context': 'Pipeline'})

        acc = azure_ml_logs_provider.get_log_from_brother_run(
            "eval_model.py", "acc")
        print("acc :", acc)
        #deploy model
        if web_service_deployer.to_deploy(acc):
            print("deploying...")
            web_service_deployer.deploy()
            print("model deployed")

        #pas si import à part pour le test
        registered_model_file = os.path.join(registered_model_folder,
                                             classifier_name)
        os.makedirs(registered_model_folder)
        _ = shutil.copy(validated_model_file, registered_model_file)
Example #5
0
def log_results(model_accuracy, output_folder, model_name, matrix, labels,
                run):
    run.log('model_accuracy', model_accuracy)
    run.log_image('Confusion Matrix Plot',
                  plot=create_confusion_matrix_plot(matrix, labels))
    model = Model.register(workspace=run.experiment.workspace,
                           model_path=output_folder,
                           model_name=model_name,
                           model_framework=Model.Framework.SCIKITLEARN,
                           model_framework_version=sklearn.__version__,
                           tags={'Training context': 'Pipeline'})
    run.log('model_name', model.name)
    run.log('model_version', model.version)
Example #6
0
def deploy_pickled_model(amls_config, workspace):
    """
    Publish a pickled model to AMLS model repository

    :param amls_config:
    :param workspace:
    :return:
    """
    model_path = '../model/model.pkl'

    logger.info(f"Deploying model.")
    model = Model.register(model_path=model_path,
                           model_name='model',
                           tags=amls_config['tags'],
                           description=amls_config['description'],
                           workspace=workspace)
    return model
def register_model(**parameters):
    zsh('az ml model list --output json > az_ml_model_list.json')
    model_name = parameters['model_name']
    # model_id = parameters['id']
    # model_version = parameters['version']
    ws = parameters['workspace']
    with open('az_ml_model_list.json') as models_json:
        models = json.load(models_json)
        matched_models = [
            model for model in models if model['name'] == model_name
        ]
        if len(matched_models) == 1:
            print('Found the model\n')
            model = Model(workspace=ws, name=model_name)
        elif len(matched_models) == 0:
            print('Provided model {} has not been found\n'.format(model_name))
            print('registering new model in Azure ...\n')
            model = Model.register(**parameters)
        elif len(matched_models) > 1:
            model_name = matched_models[0]['name']
            model = Model(workspace=ws, name=model_name)

    zsh('rm az_ml_model_list.json')
    return model
Example #8
0
    plt.plot(history.history["val_loss"], label="validation")
    plt.title("Loss Plot")
    plt.xlabel("Epochs")
    plt.ylabel("Loss")
    plt.legend()

    plt.subplot(1, 2, 2)
    plt.plot(history.history["acc"], label="training")
    plt.plot(history.history["val_acc"], label="validation")
    plt.title("Accuracy Plot")
    plt.xlabel("Epochs")
    plt.ylabel("Accuracy")
    plt.legend()

    plt.savefig("/tmp/learning-curves.png")
    mlflow.log_artifact("/tmp/learning-curves.png")

if run._run_id.startswith("OfflineRun"):
    print("This appears to be an offline run. I will not register the model")
else:
    with open("conf.yaml", "r") as f:
        metadata = yaml.load(f)["metadata"]

    Model.register(
        run.experiment.workspace,
        model_path="./outputs/model",
        model_name=metadata["model_name"],
        description=metadata["description"],
        tags=metadata["tags"],
    )
# "run" is a reference to a completed experiment run

# List the files generated by the experiment
for file in run.get_file_names():
    print(file)

# Download a named file
run.download_file(name='outputs/model.pkl', output_file_path='model.pkl')


from azureml.core import Model

model = Model.register(workspace=ws,
                       model_name='classification_model',
                       model_path='model.pkl', # local path
                       description='A classification model',
                       tags={'dept': 'sales'},
                       model_framework=Model.Framework.SCIKITLEARN,
                       model_framework_version='0.20.3')

#alternatively
run.register_model( model_name='classification_model',
                    model_path='outputs/model.pkl', # run outputs path
                    description='A classification model',
                    tags={'dept': 'sales'},
                    model_framework=Model.Framework.SCIKITLEARN,
                    model_framework_version='0.20.3')


#view registered models
from azureml.core import Model
Example #10
0
# Import libraries
import argparse
import joblib
from azureml.core import Workspace, Model, Run

# Get parameters
parser = argparse.ArgumentParser()
parser.add_argument('--model_folder',
                    type=str,
                    dest='model_folder',
                    default="diabetes_model",
                    help='model location')
args = parser.parse_args()
model_folder = args.model_folder

# Get the experiment run context
run = Run.get_context()

# load the model
print("Loading model from " + model_folder)
model_file = model_folder + "/model.pkl"
model = joblib.load(model_file)

Model.register(workspace=run.experiment.workspace,
               model_path=model_file,
               model_name='diabetes_model',
               tags={'Training context': 'Pipeline'})

run.complete()
Example #11
0
import logging
import os
from azureml.core import Run, Model
import argparse

logging.basicConfig(level=logging.INFO)

log: logging.Logger = logging.getLogger(__name__)
    
parser = argparse.ArgumentParser("register")
parser.add_argument("--input", type=str, required=True)
args = parser.parse_args()

run = Run.get_context()
ws = run.experiment.workspace

Model.register(model_path=os.path.join(args.input, "parallel_run_step.txt"), model_name="parallel_model", workspace=ws)
Example #12
0
from azureml.core import Workspace
from azureml.core import Model

if __name__ == "__main__":
    ws = Workspace.from_config(path='./.azureml', _file_name='config.json')

    model = Model.register(model_name='model',
                           tags={'area': 'trabajo_cluod'},
                           model_path='outputs/model.pkl',
                           workspace=ws)
    print(model.name, model.id, model.version, sep='\t')
Example #13
0
import logging
import os
from azureml.core import Run, Model
import argparse

logging.basicConfig(level=logging.INFO)

log: logging.Logger = logging.getLogger(__name__)

parser = argparse.ArgumentParser("register")
parser.add_argument("--input", type=str, required=True)
args = parser.parse_args()

run = Run.get_context()
ws = run.experiment.workspace

Model.register(model_path=os.path.join(args.input, "model.json"),
               model_name="multifolder_model",
               workspace=ws)
AML_RESOURCE_GROUP = ""
AML_WORKSPACE_NAME = "ajagarw-demo-aml-ws"

svc_pr = ServicePrincipalAuthentication(
    tenant_id=AZURE_TENANT_ID,
    service_principal_id=AZURE_CLIENT_ID,
    service_principal_password=AZURE_CLIENT_SECRET)

ws = Workspace(workspace_name=AML_WORKSPACE_NAME,
               subscription_id=AML_SUBSCRIPTION_ID,
               resource_group=AML_RESOURCE_GROUP,
               auth=svc_pr)

model = Model.register(
    model_path="./artifacts/output",
    model_name="mlflow_sklearn_e2e",
    workspace=ws,
)

# ### Import SynapseML Predict

# In[4]:

from pyspark.sql.functions import col, pandas_udf, udf, lit
import azure.synapse.ml.predict as pcontext

# ### Set some input parameters
# <p>Data is stored on ADLS, model is stored on AML<p>
# <p>Return type is int<p>

# In[5]:
Example #15
0
from azureml.train.hyperdrive import HyperDriveRun
from shutil import copy2

parser = argparse.ArgumentParser()
parser.add_argument('--saved-model', type=str, dest='saved_model', help='path to saved model file')
parser.add_argument('--metrics_data', type=str, dest='metrics_data', help='metrics')
parser.add_argument('--model_name', type=str, dest='model_name', help='model name')
args = parser.parse_args()

saved_model = args.saved_model
print('saved_model')
print(saved_model)

metrics_data = args.metrics_data
print('metrics_data')
print(metrics_data)

model_name = args.model_name
print('model_name')
print(model_name)


model_output_dir = './model/'

os.makedirs(model_output_dir, exist_ok=True)
copy2(args.saved_model, model_output_dir)

ws = Run.get_context().experiment.workspace

model = Model.register(workspace=ws, model_name=model_name, model_path=model_output_dir)
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for
# full license information.

import json
from azureml.core import Workspace, Model, VERSION
from azureml.core.authentication import ServicePrincipalAuthentication

print(VERSION)

with open("aml/config.json", "r") as f:
    config = json.load(f)

auth = ServicePrincipalAuthentication(
    config["tenant_id"],
    config["service_principal_id"],
    config["service_principal_password"]
)

ws = Workspace.create(
    name=config["workspace_name"],
    auth=auth,
    subscription_id=config['subscription_id'],
    resource_group=config['resource_group'],
    location=config['location']
    exist_ok=True,
    show_output=True,
)

Model.register(ws, 'models/TinyYOLO.onnx', 'TinyYOLO')
                                                    y,
                                                    test_size=0.2,
                                                    random_state=1)

knn = KNeighborsClassifier()
knn.fit(X_train, y_train)

score = knn.predict(X_test)

predictions = X_test.copy(deep=True)
predictions["Prediction"] = score
predictions["Actual"] = y_test

print(predictions)

if not os.path.isdir('outputs'):
    os.mkdir('outputs')

model_path = os.path.join('outputs', 'highspender.pkl')
dump(knn, model_path)

# Register model
from azureml.core import Model
from azureml.core.run import Run

run = Run.get_context()
workspace = run.experiment.workspace
model = Model.register(workspace=workspace,
                       model_name='highspender',
                       model_path=model_path)
Example #18
0
import numpy as np
import pandas as pd

#we can deploy model as a real-time web service to several kinds of compute target like local compute, ACI(Azure Container instance), AKS, an azure function, IOT modules

#AzureML uses containers as a deployment mechanism, packaging the model and the code to use it as an image that can be deployed to a container in your chosen compute target.

#for the deployment of the model these are the steps:

# 1. Register a trained model

from azureml.core import Model

classi_model = Model.register(
    workspace=ws,
    model_name='classi_model',
    model_path='model.pkl',  # localpath
    description="classification model")

#if we have a reference to the Run used to train the model

run.register_model(model_name='classi_model',
                   model_path='outputs/model.pkl',
                   description='A classification Model')

#Now we need inference configuration in which 2 things will be there 1. script which will return the prediction and 2. Environment script in which 1 will run.

# Need to create Entry Script

import json
import joblib
from azureml.core.run import Run, _OfflineRun
from azureml.core import Workspace, Model
import joblib
import argparse

# Get context
run = Run.get_context()
ws = Workspace.from_config() if type(run) == _OfflineRun else run.experiment.workspace

if __name__ == "__main__":
    # Parse args
    parser = argparse.ArgumentParser()
    parser.add_argument('--model_name', type=str)
    parser.add_argument('--metrics_data', type=str)
    parser.add_argument('--saved_model', type=str)
    args = parser.parse_args()
    print(f'--model_name={args.model_name}')
    print(f'--metrics_data={args.metrics_data}')
    print(f'--saved_model={args.saved_model}')

    # load model
    loaded_model = joblib.load(args.saved_model)
    print(loaded_model)

    # Register model
    model = Model.register(workspace=ws,
                           model_path=args.saved_model,
                           model_name=args.model_name)
    print(model)
Example #20
0
blob_store.upload(src_dir=local_path, 
                  target_path=blob_path,
                  overwrite=True, 
                  show_progress=True)

#%% 
# ** Register the data as a dataset **
# %% now that the data is up on the blobstore we can register it as a dataset 
# to keep track of its versions and make it easily acessible
dataset = Dataset.File.from_files( blob_store.path(blob_path + "/data.csv") )
dataset.register(ws, 
                 name="Campus_Recruitment_PCA_Training_Data",
                 create_new_version=True)

#%% 
# ** Upload and register the model as a Model **
#%% 
model = Model.register(workspace=ws,
                       model_name='Campus_Recruitment_PCA',                # Name of the registered model in your workspace.
                       model_path='./Upload/Model/model.pkl',  # Local file to upload and register as a model.
                      
                       sample_input_dataset=dataset,
                       sample_output_dataset=None,
                      
                       description='PCA model for dimention reduction of the Campus Recruitment Dataset',
                      )

print('Name:', model.name)
print('Version:', model.version)

modelPath = args.model_path
modelName = args.model_name
modelDescription = args.model_description

print('Model Path: ', modelPath)
print('Model Name: ', modelName)
print('Model Description: ', modelDescription)

print('getting run context')
run_context = Run.get_context()
run_context.log('Author', 'Sam was here!')

ws = run_context.experiment.workspace

print('Model Registeration began')

Model.register(workspace=ws,
               model_path=modelPath,
               model_name=modelName,
               tags=None,
               properties=None,
               description=modelDescription,
               datasets=None,
               model_framework=None,
               model_framework_version=None,
               child_paths=None,
               sample_input_dataset=None,
               sample_output_dataset=None,
               resource_configuration=None)

print('Model Registeration completed')
Example #22
0
print(trainData.shape)

X = trainData[allfeatures[1:len(allfeatures)]].astype(float).values # exclude first feature
y = trainData[allfeatures[0]].astype(float).values  # use first feature

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)

from sklearn.ensemble import GradientBoostingClassifier
model_GBM = GradientBoostingClassifier(random_state=42, verbose=1)
model_GBM.fit(X_train, y_train)
printScores(model_GBM.predict(X_train), y_train)
printScores(model_GBM.predict(X_test), y_test)


##------------- Serialize the Model

with open(args.modelName,'wb') as f:
    pickle.dump(model_GBM, f)

##------------- Upload the Model file explicitly into artifacts

run.upload_file(name="./outputs/" + args.modelName, path_or_stream=args.modelName)
print("Uploaded the model {} to experiment {}".format(args.modelName, run.experiment.name))

##------------- Register the Model

model = Model.register(model_path = args.modelName,
                       model_name = args.modelName,
                       tags = {'area': "QualityPrediction", 'type': "GBM"},
                       description = "Quality prediction model",
                       workspace = ws)
# 07-model-registration-azure.py
from azureml.core import Workspace
from azureml.core import Model

if __name__ == "__main__":
    ws = Workspace.from_config(path='./.azureml', _file_name='config.json')

    model = Model.register(model_name='digits_model',
                           tags={'area': 'udea_training'},
                           model_path='outputs/digits_model.pkl',
                           workspace=ws)
    print(model.name, model.id, model.version, sep='\t')
Example #24
0
)

ds = Datastore.get(ws, args.datastore)

ds.download("assets", args.dataset, overwrite=False)


# Load dataset
url = "assets/"+args.dataset
names = ['sepal-length', 'sepal-width', 'petal-length', 'petal-width', 'class']
dataset = read_csv(url, names=names)

# Split-out validation dataset
array = dataset.values
X = array[:,0:4]
y = array[:,4]
X_train, X_validation, Y_train, Y_validation = train_test_split(X, y, test_size=0.20, random_state=1)
# Make predictions on validation dataset
model = SVC(gamma='auto')
model.fit(X_train, Y_train)
predictions = model.predict(X_validation)
# Evaluate predictions
print(accuracy_score(Y_validation, predictions))
print(confusion_matrix(Y_validation, predictions))
print(classification_report(Y_validation, predictions))


joblib.dump(model, "trained_model1.pkl")

Model.register(ws, "trained_model1.pkl", "new_model")
# Reference: "Fully Convolutional Networks for Semantic Segmentation"
layer_names = ["block3_pool", "block4_pool", "block5_pool"]

layers = [base_model.get_layer(layer) for layer in layer_names]

decoder_output = build_decoder(*layers)
model = keras.Model(base_model.inputs, decoder_output)

model.compile(optimizer="adam",
              loss=keras.losses.BinaryCrossentropy(from_logits=True),
              metrics=["accuracy"])

if args.plot_model:
    keras.utils.plot_model(model,
                           to_file="vgg16_segmentation.png",
                           show_shapes=True,
                           show_layer_names=True)

model.save(args.path_model)

# Register the model
run = Run.get_context()
ws = run.experiment.workspace

Model.register(workspace=ws,
               model_path=args.path_model,
               model_name='segmentation_new',
               description='Instance of untrained model',
               model_framework=Model.Framework.TENSORFLOW,
               model_framework_version='2.3')
# 07-model-registration-azure.py
from azureml.core import Workspace
from azureml.core import Model

if __name__ == "__main__":
    ws = Workspace.from_config()

    model = Model.register(model_name='cifar_10',
                           tags={'area': 'udea_training'},
                           model_path='outputs/cifar_10_model.pkl',
                           workspace=ws)
    print(model.name, model.id, model.version, sep='\t')
                                     lr=args.learning_rate)
criterion = nn.CrossEntropyLoss()

if args.cuda: decoder.cuda()

start = time.time()
all_losses = []
loss_avg = 0
print("Start training for {} epochs...".format(args.n_epochs))
for epoch in tqdm(range(1, args.n_epochs + 1)):
    loss = train(*random_training_set(args.chunk_len, args.batch_size))
    loss_avg += loss

    if epoch % args.print_every == 0:
        print('[%s (%d %d%%) %.4f]' %
              (time_since(start), epoch, epoch / args.n_epochs * 100, loss))
        print(generate(decoder, 'Wh', 100, cuda=args.cuda), '\n')

# Saving model to outputs/ in Azure ML
save_filename = "outputs/" + args.modelname + ".pt"
torch.save(decoder.state_dict(), save_filename)

# TODO: Use the Model class and the 'register' method to upload the model to Azure ML
# HINT:
#   https://docs.microsoft.com/en-us/python/api/azureml-core/azureml.core.model.model?view=azure-ml-py
model = Model.register(workspace=ws,
                       model_name=args.modelname,
                       model_path="outputs/")

# Complete the run
run.complete()
Example #28
0
# Initialize a workspace
ws = Workspace.from_config(
    "C:/Users/Danilo.Bento/Icon Dropbox/DEVDATA/RO/DEVELOPMENT/SIB2/dev/.azureml/config.json"
)
print('Workspace name: ' + ws.name,
      'Azure region: ' + ws.location,
      'Subscription id: ' + ws.subscription_id,
      'Resource group: ' + ws.resource_group,
      'Workspace connected',
      sep='\n')

model = Model.register(
    workspace=ws,
    model_name='mod5_test',
    model_path=
    'C:/Users/Danilo.Bento/Icon Dropbox/DEVDATA/RO/DEVELOPMENT/SIB2/tutorials/model5/mod5_deploy/mod5_azure',  # local path
    #child_paths=['C:/Users/Danilo.Bento/Icon Dropbox/DEVDATA/RO/DEVELOPMENT/SIB2/tutorials/model5/mod5_deploy/mod5_azure/yolov5/runs/exp0/weights/last.pt'],
    description=
    'Test model based on model 5 done in phase 1. It trained for 250 epochs. Contain original folder structure',
    #tags={'dept': 'sales'},
    model_framework=Model.Framework.PYTORCH,
    model_framework_version='1.6.0')

for model in Model.list(ws):
    # Get model name and auto-generated version
    print(model.name, 'version:', model.version)

# import logging
# logging.basicConfig(level=logging.DEBUG)
# print(Model.get_model_path(model_name='mod5_test'))
Example #29
0
                                                    np.array(df[label],
                                                             dtype=int),
                                                    test_size=0.9)

# train model
mod = DecisionTreeClassifier(max_depth=5)
mod.fit(x_train, y_train)

# make sure we can predict a value
preds = mod.predict(x_test)

cm = confusion_matrix(y_test, preds)
common.plot_confusion_matrix(cm, ['0', '1'])
plt.savefig("outputs/confusion_matrix.png")

precision = precision_score(y_test, preds)
accuracy = accuracy_score(y_test, preds)

run.log('precision', precision)
run.log('accuracy', accuracy)

with open('outputs/decision_tree_model.pkl', 'wb') as f:
    pickle.dump(mod, f)

Model.register(run.experiment.workspace,
               'outputs/decision_tree_model.pkl',
               'decision_tree_model',
               tags={
                   'precision': precision,
                   'accuracy': accuracy
               })
Example #30
0
# 07-model-registration-azure.py
from azureml.core import Workspace
from azureml.core import Model

if __name__ == "__main__":
    ws = Workspace.from_config(path='./.azureml',_file_name='config.json')

    model = Model.register(model_name='titanic_model',
                           tags={'area': 'udea_project' , 'scoring' : 0.70},
                           model_path='outputs/clf.pkl',
                           workspace = ws)
    print(model.name, model.id, model.version, sep='\t')