def load(cls, savedModelDir): """ Load saved model. @param savedModelDir (string) Directory of where the experiment is to be or was saved @returns (Model) The loaded model instance """ logger = opfutils.initLogger(cls) logger.debug("Loading model from local checkpoint at %r...", savedModelDir) # Load the model modelPickleFilePath = Model._getModelPickleFilePath(savedModelDir) with open(modelPickleFilePath, 'r') as modelPickleFile: logger.debug("Unpickling Model instance...") model = pickle.load(modelPickleFile) logger.debug("Finished unpickling Model instance") # Tell the model to load extra data, if any, that was too big for pickling model._deSerializeExtraData( extraDataDir=Model._getModelExtraDataDir(savedModelDir)) logger.debug("Finished Loading model from local checkpoint") return model
def __init__(self, inferenceType=InferenceType.TemporalNextStep, fieldNames=[], fieldTypes=[], predictedField=None, predictionSteps=[]): """ PVM constructor. inferenceType: An opfutils.InferenceType value that specifies what type of inference (i.e. TemporalNextStep, TemporalMultiStep, etc.) fieldNames: a list of field names fieldTypes: a list of the types for the fields mentioned in fieldNames predictedField: the field from fieldNames which is to be predicted predictionSteps: a list of steps for which a prediction is made. This is only needed in the case of multi step predictions """ super(PreviousValueModel, self).__init__(inferenceType) self._logger = opfutils.initLogger(self) self._predictedField = predictedField self._fieldNames = fieldNames self._fieldTypes = fieldTypes # only implement multistep and temporalnextstep if inferenceType == InferenceType.TemporalNextStep: self._predictionSteps = [1] elif inferenceType == InferenceType.TemporalMultiStep: self._predictionSteps = predictionSteps else: assert False, "Previous Value Model only works for next step or multi-step."
def __getLogger(cls): """ Get the logger for this object. @returns (Logger) A Logger object. """ if cls.__logger is None: cls.__logger = opfutils.initLogger(cls) return cls.__logger
def load(cls, savedModelDir): """ Load saved model Parameters: ----------------------------------------------------------------------- savedModelDir: directory of where the experiment is to be or was saved Returns: the loaded model instance """ logger = opfutils.initLogger(cls) logger.info("Loading model from local checkpoint at %r...", savedModelDir) # Load the model modelPickleFilePath = Model._getModelPickleFilePath(savedModelDir) with open(modelPickleFilePath, 'rb') as modelPickleFile: logger.info("Unpickling Model instance...") model = pickle.load(modelPickleFile) logger.info("Finished unpickling Model instance") # Tell the model to load extra data, if any, that was too big for pickling model._deSerializeExtraData( extraDataDir=Model._getModelExtraDataDir(savedModelDir)) logger.info("Finished Loading model from local checkpoint") return model
def __getLogger(cls): """ Get the logger for this object. @returns (Logger) A Logger object. """ if cls.__logger is None: cls.__logger = opfutils.initLogger(cls) return cls.__logger
def __init__(self, inferenceType=InferenceType.TemporalNextStep, fieldNames=[], fieldTypes=[], predictedField=None, predictionSteps=[]): """ PVM constructor. inferenceType: An opfutils.InferenceType value that specifies what type of inference (i.e. TemporalNextStep, TemporalMultiStep, etc.) fieldNames: a list of field names fieldTypes: a list of the types for the fields mentioned in fieldNames predictedField: the field from fieldNames which is to be predicted predictionSteps: a list of steps for which a prediction is made. This is only needed in the case of multi step predictions """ super(PreviousValueModel, self).__init__(inferenceType) self._logger = opfutils.initLogger(self) self._predictedField = predictedField self._fieldNames = fieldNames self._fieldTypes = fieldTypes # only implement multistep and temporalnextstep if inferenceType == InferenceType.TemporalNextStep: self._predictionSteps = [1] elif inferenceType == InferenceType.TemporalMultiStep: self._predictionSteps = predictionSteps else: assert False, "Previous Value Model only works for next step or multi-step."
def __init__(self, inferenceType=InferenceType.TemporalNextStep, encoderParams=()): """ Two-gram model constructor. inferenceType: An opfutils.InferenceType value that specifies what type of inference (i.e. TemporalNextStep, Classification, etc.) encoders: Sequence of encoder params dictionaries. """ super(TwoGramModel, self).__init__(inferenceType) self._logger = opfutils.initLogger(self) self._reset = False self._hashToValueDict = dict() self._learningEnabled = True self._encoder = encoders.MultiEncoder(encoderParams) self._fieldNames = self._encoder.getScalarNames() self._prevValues = [None] * len(self._fieldNames) self._twoGramDicts = [dict() for _ in xrange(len(self._fieldNames))]
def __init__(self, inferenceType=InferenceType.TemporalNextStep, encoderParams=()): """ Two-gram model constructor. inferenceType: An opfutils.InferenceType value that specifies what type of inference (i.e. TemporalNextStep, Classification, etc.) encoders: Sequence of encoder params dictionaries. """ super(TwoGramModel, self).__init__(inferenceType) self._logger = opfutils.initLogger(self) self._reset = False self._hashToValueDict = dict() self._learningEnabled = True self._encoder = encoders.MultiEncoder(encoderParams) self._fieldNames = self._encoder.getScalarNames() self._prevValues = [None] * len(self._fieldNames) self._twoGramDicts = [dict() for _ in xrange(len(self._fieldNames))]
def __setstate__(self): self._logger = opfutils.initLogger(self)
def __setstate__(self): self._logger = opfutils.initLogger(self)
def __getLogger(cls): if cls.__logger is None: cls.__logger = opfutils.initLogger(cls) return cls.__logger
def __getLogger(cls): if cls.__logger is None: cls.__logger = opfutils.initLogger(cls) return cls.__logger