def load(cls, savedModelDir): """ Load saved model. :param savedModelDir: (string) Directory of where the experiment is to be or was saved :returns: (:class:`Model`) The loaded model instance """ logger = opf_utils.initLogger(cls) logger.debug("Loading model from local checkpoint at %r...", savedModelDir) # Load the model modelPickleFilePath = Model._getModelPickleFilePath(savedModelDir) with open(modelPickleFilePath, 'rb') as modelPickleFile: logger.debug("Unpickling Model instance...") model = pickle.load(modelPickleFile) logger.debug("Finished unpickling Model instance") # Tell the model to load extra data, if any, that was too big for pickling model._deSerializeExtraData( extraDataDir=Model._getModelExtraDataDir(savedModelDir)) logger.debug("Finished Loading model from local checkpoint") return model
def load(cls, savedModelDir): """ Load saved model. :param savedModelDir: (string) Directory of where the experiment is to be or was saved :returns: (:class:`Model`) The loaded model instance """ logger = opf_utils.initLogger(cls) logger.debug("Loading model from local checkpoint at %r...", savedModelDir) # Load the model modelPickleFilePath = Model._getModelPickleFilePath(savedModelDir) with open(modelPickleFilePath, 'r') as modelPickleFile: logger.debug("Unpickling Model instance...") model = pickle.load(modelPickleFile) logger.debug("Finished unpickling Model instance") # Tell the model to load extra data, if any, that was too big for pickling model._deSerializeExtraData( extraDataDir=Model._getModelExtraDataDir(savedModelDir)) logger.debug("Finished Loading model from local checkpoint") return model
def __getLogger(cls): """ Get the logger for this object. @returns (Logger) A Logger object. """ if cls.__logger is None: cls.__logger = opf_utils.initLogger(cls) return cls.__logger
def read(cls, proto): """ :param proto: capnp TwoGramModelProto message reader """ instance = object.__new__(cls) super(TwoGramModel, instance).__init__(proto=proto.modelBase) instance._logger = opf_utils.initLogger(instance) instance._reset = proto.reset instance._hashToValueDict = { x.hash: x.value for x in proto.hashToValueDict } instance._learningEnabled = proto.learningEnabled instance._encoder = encoders.MultiEncoder.read(proto.encoder) instance._fieldNames = instance._encoder.getScalarNames() instance._prevValues = list(proto.prevValues) instance._twoGramDicts = [ dict() for _ in range(len(proto.twoGramDicts)) ] for idx, field in enumerate(proto.twoGramDicts): for entry in field: prev = None if entry.value == -1 else entry.value instance._twoGramDicts[idx][prev] = collections.defaultdict( int) for bucket in entry.buckets: instance._twoGramDicts[idx][prev][ bucket.index] = bucket.count return instance
def read(cls, proto): """ :param proto: capnp TwoGramModelProto message reader """ instance = object.__new__(cls) super(TwoGramModel, instance).__init__(proto=proto.modelBase) instance._logger = opf_utils.initLogger(instance) instance._reset = proto.reset instance._hashToValueDict = {x.hash: x.value for x in proto.hashToValueDict} instance._learningEnabled = proto.learningEnabled instance._encoder = encoders.MultiEncoder.read(proto.encoder) instance._fieldNames = instance._encoder.getScalarNames() instance._prevValues = list(proto.prevValues) instance._twoGramDicts = [dict() for _ in xrange(len(proto.twoGramDicts))] for idx, field in enumerate(proto.twoGramDicts): for entry in field: prev = None if entry.value == -1 else entry.value instance._twoGramDicts[idx][prev] = collections.defaultdict(int) for bucket in entry.buckets: instance._twoGramDicts[idx][prev][bucket.index] = bucket.count return instance
def __init__(self, inferenceType=InferenceType.TemporalNextStep, fieldNames=[], fieldTypes=[], predictedField=None, predictionSteps=[]): """ PVM constructor. inferenceType: An opf_utils.InferenceType value that specifies what type of inference (i.e. TemporalNextStep, TemporalMultiStep, etc.) fieldNames: a list of field names fieldTypes: a list of the types for the fields mentioned in fieldNames predictedField: the field from fieldNames which is to be predicted predictionSteps: a list of steps for which a prediction is made. This is only needed in the case of multi step predictions """ super(PreviousValueModel, self).__init__(inferenceType) self._logger = opf_utils.initLogger(self) self._predictedField = predictedField self._fieldNames = fieldNames self._fieldTypes = fieldTypes # only implement multistep and temporalnextstep if inferenceType == InferenceType.TemporalNextStep: self._predictionSteps = [1] elif inferenceType == InferenceType.TemporalMultiStep: self._predictionSteps = predictionSteps else: assert False, "Previous Value Model only works for next step or multi-step."
def __getLogger(cls): """ Get the logger for this object. :returns: (Logger) A Logger object. """ if cls.__logger is None: cls.__logger = opf_utils.initLogger(cls) return cls.__logger
def __init__(self, inferenceType=InferenceType.TemporalNextStep, encoderParams=()): super(TwoGramModel, self).__init__(inferenceType) self._logger = opf_utils.initLogger(self) self._reset = False self._hashToValueDict = dict() self._learningEnabled = True self._encoder = encoders.MultiEncoder(encoderParams) self._fieldNames = self._encoder.getScalarNames() self._prevValues = [] * len(self._fieldNames) self._twoGramDicts = [dict() for _ in xrange(len(self._fieldNames))]
def __init__(self, inferenceType=InferenceType.TemporalNextStep, encoderParams=()): """ Two-gram model constructor. inferenceType: An opf_utils.InferenceType value that specifies what type of inference (i.e. TemporalNextStep, Classification, etc.) encoders: Sequence of encoder params dictionaries. """ super(TwoGramModel, self).__init__(inferenceType) self._logger = opf_utils.initLogger(self) self._reset = False self._hashToValueDict = dict() self._learningEnabled = True self._encoder = encoders.MultiEncoder(encoderParams) self._fieldNames = self._encoder.getScalarNames() self._prevValues = [None] * len(self._fieldNames) self._twoGramDicts = [dict() for _ in xrange(len(self._fieldNames))]
def __init__(self, inferenceType=InferenceType.TemporalNextStep, fieldNames=[], fieldTypes=[], predictedField=None, predictionSteps=[]): super(PreviousValueModel, self).__init__(inferenceType) self._logger = opf_utils.initLogger(self) self._predictedField = predictedField self._fieldNames = fieldNames self._fieldTypes = fieldTypes # only implement multistep and temporalnextstep if inferenceType == InferenceType.TemporalNextStep: self._predictionSteps = [1] elif inferenceType == InferenceType.TemporalMultiStep: self._predictionSteps = predictionSteps else: assert False, "Previous Value Model only works for next step or multi-step."
def __init__(self, inferenceType=InferenceType.TemporalNextStep, fieldNames=[], fieldTypes=[], predictedField=None, predictionSteps=[]): super(PreviousValueModel, self).__init__(inferenceType) self._logger = opf_utils.initLogger(self) self._predictedField = predictedField self._fieldNames = fieldNames self._fieldTypes = fieldTypes # only implement multistep and temporalnextstep if inferenceType == InferenceType.TemporalNextStep: self._predictionSteps = [1] elif inferenceType == InferenceType.TemporalMultiStep: self._predictionSteps = predictionSteps else: assert False, "Previous Value Model only works for next step or multi-step."
def read(cls, proto): """Deserialize via capnp :param proto: capnp PreviousValueModelProto message reader :returns: new instance of PreviousValueModel deserialized from the given proto """ instance = object.__new__(cls) super(PreviousValueModel, instance).__init__(proto=proto.modelBase) instance._logger = opf_utils.initLogger(instance) instance._predictedField = proto.predictedField instance._fieldNames = list(proto.fieldNames) instance._fieldTypes = list(proto.fieldTypes) instance._predictionSteps = list(proto.predictionSteps) return instance
def __setstate__(self): self._logger = opf_utils.initLogger(self)
def __setstate__(self): self._logger = opf_utils.initLogger(self)