Exemplo n.º 1
0
def init():
    global model
    global username
    global git_repo_url
    global gitAPI
    global jenkins_key
    global jenkins_url
    global sql_server
    global sql_database
    global sql_username
    global sql_password
    global used_file_extensions

    # TODO: Change model name
    model_name = 'TestAutoModel'
    # TODO: Update list of file extensions used in your project
    used_file_extensions = ['swift', 'pdf']

    model_path = Model.get_model_path(model_name=model_name)
    model = joblib.load(model_path)

    load_dotenv(verbose=True, dotenv_path=Path('.') / 'server_files' / 'env')
    username = os.getenv("USERNAME")
    git_repo_url = os.getenv("GIT_REPO_URL")
    gitAPI = os.getenv("GIT_KEY")
    jenkins_key = os.getenv("JENKINS_KEY")
    jenkins_url = os.getenv("JENKINS_URL")
    sql_server = os.getenv("SQL_SERVER")
    sql_database = os.getenv("SQL_DATABASE")
    sql_username = os.getenv("SQL_USERNAME")
    sql_password = os.getenv("SQL_PASSWORD")
Exemplo n.º 2
0
def init():
    global g_tf_sess

    svc_pr_password = "******"
 
    svc_pr = ServicePrincipalAuthentication(
        tenant_id="72f988bf-86f1-41af-91ab-2d7cd011db47",
        service_principal_id="8a3ddafe-6dd6-48af-867e-d745232a1833",
        service_principal_password="******")

    ws = Workspace(
        subscription_id="c46a9435-c957-4e6c-a0f4-b9a597984773",
        resource_group="mlops",
        workspace_name="gputraining",
        auth=svc_pr
        )
    model_root = os.getenv('AZUREML_MODEL_DIR')
    # Pull down the model from the workspace
    model_path = Model.get_model_path("tf-dnn-mnist")
    tf_model_folder = 'model'
    # Create a model folder in the current directory
    os.makedirs('./outputs', exist_ok=True)
    os.makedirs('./outputs/model', exist_ok=True)

    # Construct a graph to execute
    tf.reset_default_graph()
    saver = tf.train.import_meta_graph(os.path.join(model_path, 'tf-dnn-mnist.meta'))
    g_tf_sess = tf.Session()
    #saver.restore(g_tf_sess, os.path.join(model_path, tf_model_folder, 'tf-dnn-mnist.model'))
    saver.restore(g_tf_sess, os.path.join(model_path, 'tf-dnn-mnist'))
Exemplo n.º 3
0
def init():
    # Runs when the pipeline step is initialized
    global model

    # load the model
    model_path = Model.get_model_path('classification_model')
    model = joblib.load(model_path)
Exemplo n.º 4
0
def init():
    global model_p
    print("Executed")
    model_path = Model.get_model_path('optimal_model.joblib')
    #model_path = Model.get_model_path('model_automl')
    print(model_path)
    model_p = joblib.load(model_path)
Exemplo n.º 5
0
def init():
    # Runs when the pipeline step is initialized
    global model

    # load the model
    model_path = Model.get_model_path('diabetes_model')
    model = joblib.load(model_path)
Exemplo n.º 6
0
def init():
    global model
    global inputs_dc, prediction_dc
    # The AZUREML_MODEL_DIR environment variable indicates
    # a directory containing the model file you registered.
    model_path = Model.get_model_path(model_name="model")
    model = joblib.load(model_path)
    inputs_dc = ModelDataCollector("sample-model", designation="inputs", feature_names=["feat1", "feat2", "feat3", "feat4"])
    prediction_dc = ModelDataCollector("sample-model", designation="predictions", feature_names=["prediction"])
Exemplo n.º 7
0
def init():

    # retrieve the path to the model file using the model name
    model_name = 'automl_best_model'

    global model

    model_path = Model.get_model_path(model_name)
    model = joblib.load(model_path)
Exemplo n.º 8
0
def init():
    global model
    # AZUREML_MODEL_DIR is an environment variable created during deployment. Join this path with the filename of the model file.
    # It holds the path to the directory that contains the deployed model (./azureml-models/$MODEL_NAME/$VERSION).
    # If there are multiple models, this value is the path to the directory containing all deployed models (./azureml-models).
    model_path = Model.get_model_path('new_model')
    #model_path = os.path.join(os.getenv('AZUREML_MODEL_DIR'), 'trained_model.pkl')
    # Deserialize the model file back into a sklearn model
    model = joblib.load(model_path)
Exemplo n.º 9
0
def init():
    global g_tf_sess

    # pull down model from workspace
    model_path = Model.get_model_path("mnist")

    # contruct graph to execute
    tf.reset_default_graph()
    saver = tf.train.import_meta_graph(os.path.join(model_path, 'mnist-tf.model.meta'))
    g_tf_sess = tf.Session(config=tf.ConfigProto(device_count={'GPU': 0}))
    saver.restore(g_tf_sess, os.path.join(model_path, 'mnist-tf.model'))
Exemplo n.º 10
0
def init():
    global g_tf_sess

    # Pull down the model from the workspace
    model_path = Model.get_model_path("mnist")

    # Construct a graph to execute
    tf.reset_default_graph()
    saver = tf.train.import_meta_graph(os.path.join(model_path, 'mnist-tf.model.meta'))
    g_tf_sess = tf.Session()
    saver.restore(g_tf_sess, os.path.join(model_path, 'mnist-tf.model'))
Exemplo n.º 11
0
def main(args):
    # Load model
    print("Loading model")
    model_path = Model.get_model_path(model_name=args.model_name,
                                      version=args.model_version)
    with open(model_path, "rb") as model_file:
        my_model = joblib.load(filename=model_file)

    # Load mounted dataset path
    print("Get file dataset mount paths")
    input_path = os.environ.get(args.input_dataset, None)
    output_path = os.environ.get(
        f"AZUREML_DATAREFERENCE_{args.output_dataset}", None)
    print(f"Input mount path: {input_path}")
    print(f"Output mount path: {output_path}")

    # Create output path
    print("Creating output path and /outputs folder")
    os.makedirs(name=output_path, exist_ok=True)

    # Get input file paths list
    print("Get input file paths")
    paths = []
    for root, dirs, files in os.walk(input_path):
        for filename in files:
            if ".parquet" in filename:
                path = os.path.join(root, filename)
                paths.append(path)
    print(f"Path List: {paths}")

    # Create one large dataframe from all files
    if len(paths) > 0:
        print("Creating one large pandas dataframe")
        df = pd.read_parquet(path=paths.pop(), engine="auto")
        for path in paths:
            df_temp = pd.read_parquet(path=path, engine="auto")
            df.append(df_temp)
    else:
        print("File dataset does not include any files")
        return

    # Score data
    print("Scoring data in dataframe")
    num_rows = df.shape
    predictions = my_model.predict(df)  #.reshape((num_rows, 1))
    result = df
    result["predictions"] = predictions

    # Save parquet for training
    print("Saving Parquet file for training")
    result.to_parquet(path=os.path.join(output_path, "result.parquet"))
Exemplo n.º 12
0
def init():
    global model

    model_path = Model.get_model_path('qubvel-segmentation_models-u-net')

    BACKBONE = 'efficientnetb3'
    CLASSES = ['car']

    n_classes = 1 if len(CLASSES) == 1 else (
        len(CLASSES) + 1)  # case for binary and multiclass segmentation
    activation = 'sigmoid' if n_classes == 1 else 'softmax'

    model = sm.Unet(BACKBONE, classes=n_classes, activation=activation)
    model.load_weights(os.path.join(model_path, 'best_model.h5'))
Exemplo n.º 13
0
def init():
    """
    This function loads the bert model, bert tokenizer, and class names from the model
    directory. The contents of the folder can also be seen under Artifacts in the model registry.
    """

    global model
    global tokenizer
    global classes

    model_dir = Model.get_model_path(model_name='page_binary_bert', version=2)

    model = BertForSequenceClassification.from_pretrained(model_dir)
    tokenizer = BertTokenizer.from_pretrained(model_dir)
def init():

    global model
    model_path = Model.get_model_path("mask_rcnn_horovod")

    # initialize the inference configuration
    config = LesionBoundaryInferenceConfig()

    # initialize the Mask R-CNN model for inference
    model = modellib.MaskRCNN(mode="inference",
                              config=config,
                              model_dir=LOGS_AND_MODEL_DIR)

    model.load_weights(os.path.join(model_path, 'mask_rcnn_lesion_0020.h5'),
                       by_name=True)
Exemplo n.º 15
0
def init():
    try:
        print('Loading Model')
        model_params = parse_args()
        aml_model = get_model(model_name=model_params[0],
                              model_version=model_params[1],
                              tag_name=model_params[2],
                              tag_value=model_params[3])
        global model
        model_path = Model.get_model_path(model_name=aml_model.name,
                                          version=aml_model.version)
        model = joblib.load(model_path)
        print(f'model:{aml_model.name} downloading is successful')
    except Exception as ex:
        print(ex)
def init():
    """
    Initializer called once per node that runs the scoring job. Parse command
    line arguments and get the right model to use for scoring.
    """
    try:
        print("Initializing batch scoring script...")

        # Get the model using name/version/tags filter
        model_filter = parse_args()
        amlmodel = get_model(model_name=model_filter[0],
                             model_version=model_filter[1],
                             tag_name=model_filter[2],
                             tag_value=model_filter[3])

        # Load the model using name/version found
        global model
        modelpath = Model.get_model_path(model_name=amlmodel.name,
                                         version=amlmodel.version)
        model = joblib.load(modelpath)
        print("Loaded model {}".format(model_filter[0]))
    except Exception as ex:
        print("Error: {}".format(ex))
Exemplo n.º 17
0
                              preprocessing_function=binarize_mask)

image_datagen = (keras.preprocessing.image.ImageDataGenerator(
    **generator_config_images))

mask_datagen = (keras.preprocessing.image.ImageDataGenerator(
    **generator_config_masks))

image_generator = (image_datagen.flow_from_directory(PATH_IMAGES,
                                                     target_size=(224, 224),
                                                     class_mode=None,
                                                     seed=SEED))

mask_generator = (mask_datagen.flow_from_directory(PATH_MASKS,
                                                   target_size=(224, 224),
                                                   class_mode=None,
                                                   seed=SEED))

# Download the model architecture from AzureML
run = Run.get_context()
ws = run.experiment.workspace
path_model = Model.get_model_path("segmentation_new", version=2, _workspace=ws)

# Load and train the model
loaded_model = keras.models.load_model(path_model)
train_generator = zip(image_generator, mask_generator)
loaded_model.fit(train_generator, epochs=3, steps_per_epoch=100)

# Save the model in the path specified of the compute target
loaded_model.save(args.trained_model)
Exemplo n.º 18
0
def init():
    global model
    model_path = Model.get_model_path("moa_prediction_model")
    print("Model Path is  ", model_path)
    model = joblib.load(model_path)
    ]

    OPTIMIZER = tf.keras.optimizers.Adam(learning_rate=LR,
                                         beta_1=0.9,
                                         beta_2=0.999)
    m = model_tools.get_multiclass_model(depth=DEPTH,
                                         nclasses=NCLASSES,
                                         optim=OPTIMIZER,
                                         loss=get_gen_dice,
                                         mets=METRICS,
                                         bias=BIAS)

    # if a model directory provided we will reload previously trained model and weights
    if args.model_id:
        # we will package the 'models' directory within the 'azure' dirrectory submitted with experiment run
        model_dir = Model.get_model_path(args.model_id, _workspace=ws)
        #    model_dir = os.path.join('./models', args.model_id, '1', 'outputs')

        # load our previously trained model and weights
        model_file = glob.glob(os.path.join(model_dir, '*.h5'))[0]
        weights_file = glob.glob(os.path.join(model_dir, '*.hdf5'))[0]
        m, checkpoint = model_tools.retrain_model(
            model_file,
            checkpoint,
            evaluation,
            'mean_iou',
            weights_file,
            custom_objects={'get_gen_dice': get_gen_dice},
            lr=LR)
        # TODO: make this dynamic
        initial_epoch = 100
Exemplo n.º 20
0
def init():
    global tokenizer, model
    tokenizer = BertTokenizer.from_pretrained('bert-base-cased')
    model_dir = Model.get_model_path('bert-mrpc')
    model = TFBertForSequenceClassification.from_pretrained(model_dir)
Exemplo n.º 21
0
def init():
    global model
    model_path = Model.get_model_path("classi_model")
    model = joblib.load(model_path)
Exemplo n.º 22
0
def init():
    global model
    model = joblib.load(Model.get_model_path('classi_model'))
Exemplo n.º 23
0
import pickle
import json
import numpy
from azureml.core import Workspace
from azureml.core import Model, Run
from sklearn.externals import joblib
import argparse

##------------- Get Workspace
run = Run.get_context()
exp = run.experiment
ws = run.experiment.workspace

##------------- Get Arguments

parser = argparse.ArgumentParser("train")
parser.add_argument("--modelName", type=str)
args = parser.parse_args()

##------------- Model Scoring

model_path = Model.get_model_path(model_name=args.modelName)
model = joblib.load(model_path)

rawdata = '{ "data" : [4.996352,41.68612,41.79799,4.998839,5.051471,27.01976,28.5,36,39.6,22.7,921.2494,2081.41,2170.84,1017.489,24.68081,29.16544,29.65642,29.15765,21.35513,140.4473,133.4049,4.998026,63.68074,25.13597,32.08001,5.075305,27.029,7.516817,54.95351,4.998026,60,40,13,4.933381,11,5.000257,5.001157,28283.33,26892.26,18333.06641]}'

data = json.loads(rawdata)['data']
data = [numpy.array(data)]
result = model.predict(data)

print("Result =>", result.tolist())
Exemplo n.º 24
0
def load_model(modelName):
    modelPath = Model.get_model_path(modelName)
    model = joblib.load(modelPath)
    return model
Exemplo n.º 25
0
def init():
    global model
    # Get the path to the registered model file and load it
    model_path = Model.get_model_path('classification_model')
    model = joblib.load(model_path)
Exemplo n.º 26
0
        def inner(*args, **kwargs):
            run = kwargs.get("run")
            ws = run.experiment.workspace

            def register_model(model_name, model_path):
                model_config = next(
                    iter(
                        filter(lambda x: x["name"] == model_name,
                               self.output_reg_models)))

                tags = model_config.get("tags")
                description = model_config.get("description")

                Model.register(workspace=ws,
                               model_path=model_path,
                               model_name=model_name,
                               tags=tags,
                               description=description)

            def register_dataset(dataset_name, dataframe):
                dataset_config = next(
                    iter(
                        filter(lambda x: x["name"] == dataset_name,
                               self.output_reg_datasets)))

                datastore = dataset_config.get("datastore") or "default"
                description = dataset_config.get("description")
                tags = dataset_config.get("tags")

                if datastore == "default":
                    ds = ws.get_default_datastore()
                else:
                    ds = Datastore.get(workspace=ws, datastore_name=datastore)

                target_path = f'experiment/{run.experiment.name}/run/{run.number}/out/{dataset_name}'

                default_output_dataset_tags = {
                    "format": self.
                    OUTPUT_FORMAT,  # Dataset.Tabular.register_pandas_dataframe always writes a parquet
                    "experiment": run.experiment.name,
                    "run": run.number
                }

                output_dataset_tags = {**default_output_dataset_tags, **tags}

                Dataset.Tabular.register_pandas_dataframe(
                    dataframe,
                    target=(ds, target_path),
                    name=dataset_name,
                    description=description,
                    tags=output_dataset_tags)

            dataframes = {}
            for i, d in enumerate(self.input_datasets or []):
                dkey = self.named_input_keys[i]
                dataframes[dkey] = run.input_datasets[
                    dkey].to_pandas_dataframe()

            for d in self.input_reg_datasets or []:
                dname = d["name"]
                dver = d.get("version")
                if dver == "latest":
                    dver = None

                dataframes[dname] = Dataset.get_by_name(
                    ws, name=dname, version=dver).to_pandas_dataframe()

            kwargs["dataframes"] = dataframes

            models = {}
            for m in self.input_reg_models or []:
                mname = m["name"]
                mver = m.get("version")
                if mver == "latest":
                    mver = None

                models[mname] = Model.get_model_path(model_name=mname,
                                                     version=mver,
                                                     _workspace=ws)

            kwargs["models"] = models

            pipeline_data_dirs = []
            for p in self.input_pipeline_data_dirs or []:
                pipeline_data_dirs.append(p["name"])

            kwargs["pipeline_data_dirs"] = pipeline_data_dirs

            register_dataframes, register_models = func(*args, **kwargs)

            for k in register_dataframes or {}:
                v = register_dataframes[k]
                register_dataset(dataset_name=k, dataframe=v)

            for k in register_models or {}:
                v = register_models[k]
                register_model(model_name=k, model_path=v)
Exemplo n.º 27
0
if __name__ == "__main__":
    run = Run.get_context()

    args = get_args()
    model_name = args.model_name
    ouput_dataset_name = args.ouput_dataset_name
    test_dataset_name = args.test_dataset_name
    target_column_name = args.target_column_name
    print("args passed are: ")

    print(model_name)
    print(test_dataset_name)
    print(ouput_dataset_name)
    print(target_column_name)

    model_path = Model.get_model_path(model_name)
    model_file_name = get_model_filename(run, model_name, model_path)
    print(model_file_name)
    fitted_model = get_model(model_path, model_file_name)

    X_test_df, y_test = get_data(
        run,
        fitted_model,
        target_column_name,
        test_dataset_name,
    )

    infer_forecasting_dataset_tcn(X_test_df, y_test, fitted_model,
                                  args.output_path, ouput_dataset_name)
Exemplo n.º 28
0
def init():
    global model
    model_path = Model.get_model_path('bestHpModel')
    model = joblib.load(model_path)
Exemplo n.º 29
0
def init():
    global model
    # Get the path to the deployed model file and load it
    model_path = Model.get_model_path('diabetes_model')
    model = joblib.load(model_path)
Exemplo n.º 30
0
def init():
    global model
    model_path = Model.get_model_path('best-model-machine-cpu-hd')

    model = joblib.load(model_path)