def init():
    from sklearn.externals import joblib
    global model, inputs_dc, prediction_dc
    # The model we created in our linear_reg.py file is now a model.pkl
    model = joblib.load('model.pkl')
    inputs_dc = ModelDataCollector('model.pkl', identifier="inputs")
    prediction_dc = ModelDataCollector('model.pkl', identifier="prediction")
Ejemplo n.º 2
0
def init():
    from utils import *
    global model, inputs_dc, prediction_dc
    model = load_model('saved/model_json_week.h5')
    word2vec = models.Word2Vec.load(saved_path + 'model_word2vec.pkl')
    X, invalid_rows = texts_to_word_vec(word2vec, texts)
    inputs_dc = ModelDataCollector('model.pkl',identifier="inputs")
    prediction_dc = ModelDataCollector('model.pkl', identifier="prediction")
Ejemplo n.º 3
0
def init():
    from sklearn.externals import joblib
    import pickle
    global model, inputs_dc, prediction_dc
    model = joblib.load('testModel2.pkl')

    inputs_dc = ModelDataCollector('testModel.pkl', identifier="inputs")
    prediction_dc = ModelDataCollector('testModel.pkl',
                                       identifier="prediction")
Ejemplo n.º 4
0
def init():
    global inputs_dc, prediction_dc
    from sklearn.externals import joblib

    global model
    model = joblib.load('model.pkl')

    inputs_dc = ModelDataCollector("model.pkl", identifier="inputs")
    prediction_dc = ModelDataCollector("model.pkl", identifier="prediction")
Ejemplo n.º 5
0
def init():
    from sklearn.externals import joblib
    global model
    model = joblib.load('startupfunding.pkl')

    global inputCollector, predictionCollector
    inputCollector = ModelDataCollector('model.pkl', identifier="inputs")
    predictionCollector = ModelDataCollector('model.pkl',
                                             identifier="prediction")
Ejemplo n.º 6
0
def init():
    global model, inputs_dc, prediction_dc
    # Get the path to the model asset
    # local_path = get_local_path('mymodel.model.link')
    from sklearn.externals import joblib
    
    inputs_dc = ModelDataCollector("model.pkl", identifier="inputs")
    prediction_dc = ModelDataCollector("model.pkl", identifier="prediction")

    # model = model_load_function(local_path)
    model = joblib.load('model.pkl')
Ejemplo n.º 7
0
def init():
    """Web Service Initialization
    
        Prepare the web service definition by authoring
        init() and run() functions. Test the functions
        before deploying the web service.
    """
    import json
    global inputs_dc, prediction_dc
    global model, labels

    model = load_cntk('model_cntk.pb')
    labels = json.load(open('labels.json'))

    inputs_dc = ModelDataCollector("model_tf.pb", identifier="inputs")
    prediction_dc = ModelDataCollector("model_tf.pb", identifier="prediction")
Ejemplo n.º 8
0
def init():
    global inputs_dc, prediction_dc
    from sklearn.externals import joblib

    # load the model from file into a global object
    global model
    with open('sgd_automated_learn_v2.pkl', 'rb') as fid:
        model = pickle.load(fid, encoding='latin1')
        input_t = np.array([[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]])
        print("model has been loaded ~~~~~~")
        print(model.predict(input_t))
        print("prediction test done")

    inputs_dc = ModelDataCollector("sgd_automated_learn_v2.pkl",
                                   identifier="inputs")
    prediction_dc = ModelDataCollector("sgd_automated_learn_v2.pkl",
                                       identifier="prediction")
Ejemplo n.º 9
0
    # Download the model
    if not os.path.exists(LOCAL_SYSTEM_DIRECTORY):
        os.makedirs(LOCAL_SYSTEM_DIRECTORY)
    
    if os.path.isfile(localFilePath):
        os.remove(localFilePath)
    
    az_blob_service = BlockBlobService(account_name=AZURE_STORAGE_ACCOUNT_NAME, account_key=AZURE_STORAGE_ACCOUNT_KEY)
    az_blob_service.get_blob_to_path(AZURE_STORAGE_CONTAINER_NAME, AZURE_STORAGE_BLOB_NAME, localFilePath)

    # load the model file
    global model
    model = joblib.load(localFilePath)

    inputs_dc = ModelDataCollector(localFilePath, identifier="inputs")
    prediction_dc = ModelDataCollector(localFilePath, identifier="prediction")

def upload_schema(localfile):
    from azure.storage.blob import BlockBlobService
    from azure.storage.blob import ContentSettings
    
    az_blob_service = BlockBlobService(account_name=AZURE_STORAGE_ACCOUNT_NAME, account_key=AZURE_STORAGE_ACCOUNT_KEY)
    az_blob_service.create_blob_from_path(
        AZURE_STORAGE_CONTAINER_NAME,
        AZURE_STORAGE_BLOB_NAME_SCHEMA,
        localfile,
        content_settings=ContentSettings(content_type='application/json'))

def run(input_df):
    import json