Ejemplo n.º 1
0
    def test_repeat_elements(self):
        reps = 3
        for ndims in [1, 2, 3]:
            shape = np.arange(2, 2 + ndims)
            arr = np.arange(np.prod(shape)).reshape(shape)
            arr_th = KTH.variable(arr)
            arr_tf = KTF.variable(arr)

            for rep_axis in range(ndims):
                np_rep = np.repeat(arr, reps, axis=rep_axis)
                th_z = KTH.repeat_elements(arr_th, reps, axis=rep_axis)
                th_rep = KTH.eval(th_z)
                tf_rep = KTF.eval(
                    KTF.repeat_elements(arr_tf, reps, axis=rep_axis))

                assert th_rep.shape == np_rep.shape
                assert tf_rep.shape == np_rep.shape
                assert_allclose(np_rep, th_rep, atol=1e-05)
                assert_allclose(np_rep, tf_rep, atol=1e-05)
                if hasattr(th_z, '_keras_shape'):
                    assert th_z._keras_shape == th_rep.shape

                # test theano shape inference when
                # input shape has None entries
                if K.backend() == 'theano':
                    shape = list(shape)
                    shape[rep_axis] = None
                    x = K.placeholder(shape=shape)
                    y = K.repeat_elements(x, reps, axis=rep_axis)
                    assert y._keras_shape == tuple(shape)
Ejemplo n.º 2
0
    def test_repeat_elements(self):
        reps = 3
        for ndims in [1, 2, 3]:
            shape = np.arange(2, 2 + ndims)
            arr = np.arange(np.prod(shape)).reshape(shape)
            arr_th = KTH.variable(arr)
            arr_tf = KTF.variable(arr)

            for rep_axis in range(ndims):
                np_rep = np.repeat(arr, reps, axis=rep_axis)
                th_z = KTH.repeat_elements(arr_th, reps, axis=rep_axis)
                th_rep = KTH.eval(th_z)
                tf_rep = KTF.eval(
                    KTF.repeat_elements(arr_tf, reps, axis=rep_axis))

                assert th_rep.shape == np_rep.shape
                assert tf_rep.shape == np_rep.shape
                assert_allclose(np_rep, th_rep, atol=1e-05)
                assert_allclose(np_rep, tf_rep, atol=1e-05)
                if hasattr(th_z, '_keras_shape'):
                    assert th_z._keras_shape == th_rep.shape

                # test theano shape inference when
                # input shape has None entries
                if K.backend() == 'theano':
                    shape = list(shape)
                    shape[rep_axis] = None
                    x = K.placeholder(shape=shape)
                    y = K.repeat_elements(x, reps, axis=rep_axis)
                    assert y._keras_shape == tuple(shape)
Ejemplo n.º 3
0
    def social_attention(self, x):
        self.user_embedding = Embedding(input_dim=self.max_user + 1,
                                        output_dim=self.H,
                                        trainable=True,
                                        mask_zero=True)
        all_u_i = self.user_embedding(x)

        w_v = Dense(1, use_bias=False)
        w_x = Dense(self.H, use_bias=False)

        u_i = Lambda(lambda xin: xin[:, 0, :])(all_u_i)

        e = []
        w_all_u_i = []
        u_i = w_x(u_i)
        for j in range(self.walk_length):
            u_j = Lambda(lambda xin: xin[:, j, :])(all_u_i)
            u_j = w_x(u_j)
            e_i_j = LeakyReLU(alpha=0.3)(w_v(concatenate([u_i, u_j])))
            e.append(e_i_j)
            w_all_u_i.append(u_j)
        e = concatenate(e, axis=-1)
        w_all_u_i = Reshape((self.walk_length, self.H))(concatenate(w_all_u_i,
                                                                    axis=-1))
        alpha = Lambda(lambda xin: K.repeat_elements(
            K.expand_dims(K.softmax(xin), -1), rep=self.H, axis=-1))(e)
        u_f_i = Lambda(lambda xin: K.sum(xin, axis=1))(multiply(
            [alpha, w_all_u_i]))
        return u_f_i
Ejemplo n.º 4
0
 def personalized_attention(self, u_s, h_i):
     dim = h_i.shape[-1]
     u_i = Dense(self.H, activation="tanh", use_bias=True)(h_i)
     dot_i_s = dot([u_i, u_s], axes=(2, 1))
     alpha = Lambda(lambda xin: K.repeat_elements(
         K.expand_dims(K.softmax(xin), -1), rep=dim, axis=-1))(dot_i_s)
     s = Lambda(lambda xin: K.sum(xin, axis=1))(multiply([alpha, h_i]))
     return s
    def social_attention(self, x):
        self.user_embedding = Embedding(
            input_dim=self.max_user + 1,
            output_dim=self.H,
            trainable=True,
            # embeddings_initializer='random_normal',
            mask_zero=True)
        all_u_i = self.user_embedding(x)

        w_v = Dense(1, use_bias=False)
        w_1 = Dense(self.H, use_bias=False)
        w_2 = Dense(self.H, use_bias=False)
        w_p = Dense(self.H, use_bias=False)
        w_x = Dense(self.H, use_bias=False)

        u_i = Lambda(lambda xin: xin[:, 0, :])(all_u_i)

        if self.use_social:
            e = []
            w_all_u_i = []
            u_i = w_x(u_i)
            for j in range(self.walk_length):
                u_j = Lambda(lambda xin: xin[:, j, :])(all_u_i)
                u_j = w_x(u_j)
                e_i_j = LeakyReLU(alpha=0.3)(w_v(concatenate([u_i, u_j])))
                e.append(e_i_j)
                w_all_u_i.append(u_j)
            e = concatenate(e, axis=-1)
            w_all_u_i = Reshape((self.walk_length, self.H))(concatenate(w_all_u_i, axis=-1))
            alpha = Lambda(lambda xin: K.repeat_elements(K.expand_dims(
                K.softmax(xin), -1), rep=self.H, axis=-1))(e)
            u_f_i = Lambda(lambda xin: K.sum(xin, axis=1))(multiply([alpha, w_all_u_i]))
        else:
            u_f_i = w_x(u_i)
        """
        if self.use_social:
            u_i = Lambda(lambda xin: xin[:, 0, :])(u_i_i)
            f_i_j = Lambda(lambda xin: xin[:, 1:])(u_i_i)
            e_0 = w_v(Activation("tanh")(add([w_1(u_i), w_2(u_i)])))
            e = [e_0]
            num_max_friends = self.walk_length - 1
            for j in range(num_max_friends):
                f_i = Lambda(lambda xin: xin[:, j, :])(f_i_j)
                e_j = w_v(Activation("tanh")(add([w_1(u_i), w_2(f_i)])))
                e.append(e_j)
            e_i_j = concatenate(e, axis=-1)
            alpha = Lambda(lambda xin: K.repeat_elements(K.expand_dims(
                K.softmax(xin), -1), rep=self.H, axis=-1))(e_i_j)

            u_f_i = Lambda(lambda xin: K.sum(xin, axis=1))(multiply([alpha, u_i_i]))
        """
            # u_f_i = Activation("tanh")(add([w_p(c_i), w_x(u_i)]))

        return u_f_i
Ejemplo n.º 6
0
    def test_repeat_elements(self):
        reps = 3
        for ndims in [1, 2, 3]:
            shape = np.arange(2, 2 + ndims)
            arr = np.arange(np.prod(shape)).reshape(shape)
            arr_th = KTH.variable(arr)
            arr_tf = KTF.variable(arr)

            for rep_axis in range(ndims):
                np_rep = np.repeat(arr, reps, axis=rep_axis)
                th_rep = KTH.eval(KTH.repeat_elements(arr_th, reps, axis=rep_axis))
                tf_rep = KTF.eval(KTF.repeat_elements(arr_tf, reps, axis=rep_axis))

                assert th_rep.shape == np_rep.shape
                assert tf_rep.shape == np_rep.shape
                assert_allclose(np_rep, th_rep, atol=1e-05)
                assert_allclose(np_rep, tf_rep, atol=1e-05)
Ejemplo n.º 7
0
    def test_repeat_elements(self):
        reps = 3
        for ndims in [1, 2, 3]:
            shape = np.arange(2, 2 + ndims)
            arr = np.arange(np.prod(shape)).reshape(shape)
            arr_th = KTH.variable(arr)
            arr_tf = KTF.variable(arr)

            for rep_axis in range(ndims):
                np_rep = np.repeat(arr, reps, axis=rep_axis)
                th_rep = KTH.eval(
                    KTH.repeat_elements(arr_th, reps, axis=rep_axis))
                tf_rep = KTF.eval(
                    KTF.repeat_elements(arr_tf, reps, axis=rep_axis))

                assert th_rep.shape == np_rep.shape
                assert tf_rep.shape == np_rep.shape
                assert_allclose(np_rep, th_rep, atol=1e-05)
                assert_allclose(np_rep, tf_rep, atol=1e-05)