def test(self): pred_batch = share(np.reshape(np.array([0, 0.2, 0.8, 0, 0.6, 0.4]), (2,3))) tg_batch = share(np.reshape(np.array([0, 0, 1, 0, 0, 1]), (2,3))) a = T.argmax(pred_batch, axis=1) b = T.argmax(tg_batch, axis=1) weights = 1 + 10 * (self.volumes[a] / self.volumes[b]) * (self.n/self.m) return -T.mean(weights * T.log(T.sum(pred_batch * tg_batch, axis=1)))
def __init__(self, volumes): CostFunction.__init__(self) self.m = share(np.max(volumes)) self.n = share(np.min(volumes)) volumes = np.concatenate([np.array([0]), volumes]) self.volumes = share(volumes)
def init_parameters(self, w_shape, b_shape): w_bound = self.compute_bound_parameters_virtual() # initialize weights with random weights self.w = share(np.asarray( np.random.uniform(low=-w_bound, high=w_bound, size=w_shape), dtype=theano.config.floatX), "w") # the bias is a 1D tensor -- one bias per output feature map b_values = 0.1 + np.zeros(b_shape, dtype=theano.config.floatX) # Slightly positive for RELU units self.b = share(b_values, "b") self.update_params()
def outputs_shared(self): if not self.shared_outputs_created: self._outputs_shared = share(self.outputs) self.shared_outputs_created = True else: self._outputs_shared.set_value(self.outputs, borrow=True) return self._outputs_shared
def compute_updates(self, params, grads): updates = OrderedDict() for param_i, grad_i in zip(params, grads): diff = share(np.zeros(param_i.get_value().shape, dtype=theano.config.floatX), "diff") update_diff = self.momentum * diff - self.learning_rate * grad_i updates[param_i] = param_i + update_diff updates[diff] = update_diff return updates
def __init__(self, vec): LayerBlock.__init__(self) self.vec = share(vec)
def __init__(self, learning_rate, momentum): LearningUpdate.__init__(self) self.learning_rate = share(learning_rate, "learning_rate") self.momentum = share(momentum, "momentum") if momentum < 0 or momentum > 1: raise Exception("Momentum value should be between 0 and 1.")
def __init__(self, learning_rate): LearningUpdate.__init__(self) self.learning_rate = share(learning_rate, "learning_rate")