def __init__(self):
     """ Initializes the serving state, reads a trained model from HDFS"""
     self.model_path = "Models/IrisFlowerClassifier/1/iris_knn.pkl"
     print("Copying SKLearn model from HDFS to local directory")
     hdfs.copy_to_local(self.model_path)
     print("Reading local SkLearn model for serving")
     self.model = joblib.load("./iris_knn.pkl")
     print("Initialization Complete")
Exemplo n.º 2
0
 def __init__(self):
     """ Initializes the serving state, reads a trained model from HDFS"""
     self.model_path = "Models/XGBoost_Churn_Classifier/1/xgb_reg.pkl"
     print("Copying model from HDFS to local directory")
     hdfs.copy_to_local(self.model_path)
     print("Reading local model for serving")
     self.model = xgb_model_loaded = pickle.load(open("xgb_reg.pkl", "rb"))
     print("Initialization Complete")
Exemplo n.º 3
0
def load(hdfs_filename, **kwds):
    """
    Reads a file from HDFS into a Numpy Array

     Args:
       :hdfs_filename: You can specify either a full hdfs pathname or a relative one (relative to your Project's path in HDFS).
       :**kwds: You can add any additional args found in numpy.read(...) 

     Returns:
      A numpy array

     Raises:
      IOError: If the file does not exist
    """
    hdfs_path = hdfs._expand_path(hdfs_filename)
    local_path = hdfs.copy_to_local(hdfs_path)
    return np.load(local_path, **kwds)
Exemplo n.º 4
0
def _copyHdfsToLocalOverwrite(hdfs_filename):
    hdfs_path = hdfs._expand_path(hdfs_filename)
    local_path = hdfs.copy_to_local(hdfs_path)
    return local_path