def test_repeat(): assert repeat(None, length=4) == [None, None, None, None] assert repeat([], length=4) == [] assert repeat((), length=4) == [] assert repeat(4, length=4) == [4, 4, 4, 4] assert repeat("check", length=3) == ["check", "check", "check"] assert repeat([2, 3, 4], length=4) == [2, 3, 4, 4] assert repeat([1, 2, 3, 4], length=4) == [1, 2, 3, 4] assert repeat([1, 2, 3, 4, 5], length=4) == [1, 2, 3, 4]
def build(self, hids): hids = gf.repeat(hids) weights, w_velocities = [], [] pre_hid = self.num_attrs for hid in hids + [self.num_classes]: shape = (pre_hid, hid) w = Parameter(torch.zeros(shape).to(self.device)) w_velocity = torch.zeros(shape).to(self.device) weights.append(w) w_velocities.append(w_velocity) pre_hid = hid self.weights, self.w_velocities = weights, w_velocities self._initialize()
def build(self, hids): hids = gf.repeat(hids) weights = [] zeros_initializer = zeros() pre_hid = self.num_attrs for hid in hids + [self.num_classes]: shape = (pre_hid, hid) # use zeros_initializer temporary to save time weight = tf.Variable(zeros_initializer(shape=shape, dtype=self.floatx)) weights.append(weight) pre_hid = hid self.weights = weights self.adj_grad_sum = tf.Variable(tf.zeros_like(self.adj_tensor)) self.x_grad_sum = tf.Variable(tf.zeros_like(self.x_tensor)) self.optimizer = Adam(self.lr, epsilon=1e-8)
def build(self, hids): hids = gf.repeat(hids) weights, w_velocities = [], [] zeros_initializer = zeros() pre_hid = self.num_attrs for hid in hids + [self.num_classes]: shape = (pre_hid, hid) # use zeros_initializer temporary to save time weight = tf.Variable(zeros_initializer(shape=shape, dtype=self.floatx)) w_velocity = tf.Variable(zeros_initializer(shape=shape, dtype=self.floatx)) weights.append(weight) w_velocities.append(w_velocity) pre_hid = hid self.weights, self.w_velocities = weights, w_velocities