def __init__(self, input_size, hidden_size, CD=ContrastiveDivergence(), CDk=1, *args, **kwargs): Model.__init__(self, *args, **kwargs) self.CD = CD self.CDk = CDk self.input_size = input_size self.hidden_size = hidden_size self.W = theano.shared(value=np.zeros( (self.hidden_size, self.input_size), dtype=theano.config.floatX), name='W') self.b = theano.shared(value=np.zeros(self.hidden_size, dtype=theano.config.floatX), name='b') self.c = theano.shared(value=np.zeros(self.input_size, dtype=theano.config.floatX), name='c') self.parameters = [self.W, self.b, self.c] self.setup()
def __setstate__(self, state): Model.__setstate__(self, state) # Save parameters (e.g. W, b, c) self.parameters = [] for name, value in state['parameters'].items(): param = theano.shared(value, name=name) setattr(self, name, param) self.parameters.append(param) # Hyper parameters self.input_size = state['input_size'] self.hidden_size = state['hidden_size'] self.CDk = state['CDk'] self.CD = state['CD']
def __init__(self, input_size, hidden_size, CD=ContrastiveDivergence(), CDk=1, *args, **kwargs): Model.__init__(self, *args, **kwargs) self.CD = CD self.CDk = CDk self.input_size = input_size self.hidden_size = hidden_size self.W = theano.shared(value=np.zeros((self.hidden_size, self.input_size), dtype=theano.config.floatX), name='W') self.b = theano.shared(value=np.zeros(self.hidden_size, dtype=theano.config.floatX), name='b') self.c = theano.shared(value=np.zeros(self.input_size, dtype=theano.config.floatX), name='c') self.parameters = [self.W, self.b, self.c] self.setup()
def __getstate__(self): state = {} state.update(Model.__getstate__(self)) state['RBM_version'] = 1 # Save parameters (e.g. W, b, c) state["parameters"] = {} for param in self.parameters: state["parameters"][param.name] = param.get_value() # Hyper parameters state['input_size'] = self.input_size state['hidden_size'] = self.hidden_size state['CD'] = self.CD state['CDk'] = self.CDk return state