コード例 #1
0
class DotLayer(lasagne.layers.Layer):
    def __init__(self, incoming, num_units, W=lasagne.init.GlorotUniform(), **kwargs):
        super(DotLayer, self).__init__(incoming, **kwargs)

        num_inputs = int(np.prod(self.input_shape[1:]))
        self.manifold = FixedRankEmbeeded(num_inputs, num_units, min(num_inputs, num_units))

        self.num_units = num_units
        #U, S, V = T.nlinalg.svd(W.get_value(), full_matrices=False)
        #W_shared = theano.compile.SharedVariable(name="W", type=fman, value=self.manifold.rand(), strict=True)
        #W_shared.ndim = 2
        self.W = self.add_param(self.manifold.rand(), (num_inputs, num_units), name="W", regularizable=False)
        print(self.params.keys())
        print(self.get_params())
        print(self.get_params(**{"trainable": True}))

        #self.W = self.add_param(W, (num_inputs, num_units), name="W")

        self.op = DotOp(self.manifold)

    def get_output_shape_for(self, input_shape):
        return (input_shape[0], self.num_units)

    def get_output_for(self, input, **kwargs):
        return self.op(input, self.W)
コード例 #2
0
class LowRankLayer(lasagne.layers.Layer):
    def __init__(self, incoming, num_units, rank, **kwargs):
        super(LowRankLayer, self).__init__(incoming, **kwargs)

        num_inputs = int(np.prod(self.input_shape[1:]))
        self.num_inputs = num_inputs
        self.num_units = num_units
        self.shape = (self.num_inputs, self.num_units)
        #self.r = max(1, int(param_density * min(self.shape)))
        self.r = rank

        self.manifold = FixedRankEmbeeded(*self.shape, k=self.r)
        U, S, V = self.manifold.rand_np()
        # give proper names
        self.U = self.add_param(U, (self.num_inputs, self.r), name="U", regularizable=False)
        self.S = self.add_param(S, (self.r, self.r), name="S", regularizable=True)
        self.V = self.add_param(V, (self.r, self.num_units), name="V", regularizable=False)

    def get_output_shape_for(self, input_shape):
        return (input_shape[0], self.num_units)

    def get_output_for(self, input, **kwargs):
        xin_shape = input.shape
        if input.ndim > 2:
            # if the input has more than two dimensions, flatten it into a
            # batch of feature vectors.
            input = input.flatten(2)
        return input.dot(self.U).dot(self.S).dot(self.V)
コード例 #3
0
class OldKronLayer(lasagne.layers.Layer):
    def __init__(self, incoming, num_units, shape2, param_density=1.0, **kwargs):
        super(OldKronLayer, self).__init__(incoming, **kwargs)

        self.num_inputs = int(np.prod(self.input_shape[1:]))
        self.num_units = num_units
        self.shape = (self.num_inputs, self.num_units)
        self.shape2 = shape2
        if self.shape[0] % self.shape2[0] != 0 or self.shape[1] % self.shape2[1] != 0:
            raise ValueError('shape must divide exactly by shape2, but they have {}, {}'.format(self.shape, shape2))

        self.shape1 = self.shape[0] // self.shape2[0], self.shape[1] // self.shape2[1]
        self.kron_shape = (int(np.prod(self.shape1)), int(np.prod(self.shape2)))
        self.r = max(1, int(param_density * min(self.kron_shape)))

        self.manifold = FixedRankEmbeeded(*self.kron_shape, k=self.r)

        U, S, V = self.manifold.rand_np()

        # give proper names
        self.U = self.add_param(U, (self.kron_shape[0], self.r), name="U", regularizable=False)
        self.S = self.add_param(S, (self.r, self.r), name="S", regularizable=True)
        self.V = self.add_param(V, (self.r, self.kron_shape[1]), name="V", regularizable=False)
        print('number_of_params for {}: {}'.format(self.name, np.prod(U.shape) + np.prod(S.shape) + np.prod(V.shape)))
        self.op = KronStep(self.manifold, self.shape1, self.shape2)

    def get_output_shape_for(self, input_shape):
        return (input_shape[0], self.num_units)

    def get_output_for(self, input, **kwargs):
        return self.op(input, self.U, self.S, self.V)
コード例 #4
0
    def __init__(self, incoming, num_units, shape2, param_density=1.0, rank=None, use_rank=True, **kwargs):
        super(KronLayer, self).__init__(incoming, **kwargs)

        rank = 1 if rank is None else rank

        self.num_inputs = int(np.prod(self.input_shape[1:]))
        self.num_units = num_units
        self.shape = (self.num_inputs, self.num_units)
        self.shape2 = shape2
        if self.shape[0] % self.shape2[0] != 0 or self.shape[1] % self.shape2[1] != 0:
            raise ValueError('shape must divide exactly by shape2, but they have {}, {}'.format(self.shape, shape2))

        self.shape1 = self.shape[0] // self.shape2[0], self.shape[1] // self.shape2[1]
        self.kron_shape = (int(np.prod(self.shape1)), int(np.prod(self.shape2)))
        self.r = rank if use_rank else max(1, int(param_density * min(self.kron_shape)))

        self.manifold = FixedRankEmbeeded(*self.kron_shape, k=self.r)

        U, S, V = self.manifold.rand_np()

        # give proper names
        self.U = self.add_param(U, (self.kron_shape[0], self.r), name="U", regularizable=False)
        self.S = self.add_param(S, (self.r, self.r), name="S", regularizable=True)
        self.V = self.add_param(V, (self.r, self.kron_shape[1]), name="V", regularizable=False)
        print('number_of_params for {}: {}'.format(self.name, np.prod(U.shape) + np.prod(S.shape) + np.prod(V.shape)))
コード例 #5
0
class LowRankLayer(lasagne.layers.Layer):
    def __init__(self, incoming, num_units, rank, **kwargs):
        super(LowRankLayer, self).__init__(incoming, **kwargs)

        num_inputs = int(np.prod(self.input_shape[1:]))
        self.num_inputs = num_inputs
        self.num_units = num_units
        self.shape = (self.num_inputs, self.num_units)
        #self.r = max(1, int(param_density * min(self.shape)))
        self.r = rank

        self.manifold = FixedRankEmbeeded(*self.shape, k=self.r)
        U, S, V = self.manifold.rand_np()
        # give proper names
        self.U = self.add_param(U, (self.num_inputs, self.r),
                                name="U",
                                regularizable=False)
        self.S = self.add_param(S, (self.r, self.r),
                                name="S",
                                regularizable=True)
        self.V = self.add_param(V, (self.r, self.num_units),
                                name="V",
                                regularizable=False)

    def get_output_shape_for(self, input_shape):
        return (input_shape[0], self.num_units)

    def get_output_for(self, input, **kwargs):
        xin_shape = input.shape
        if input.ndim > 2:
            # if the input has more than two dimensions, flatten it into a
            # batch of feature vectors.
            input = input.flatten(2)
        return input.dot(self.U).dot(self.S).dot(self.V)
コード例 #6
0
    def __init__(self,
                 incoming,
                 num_units,
                 shape2,
                 param_density=1.0,
                 rank=None,
                 use_rank=True,
                 **kwargs):
        super(KronLayer, self).__init__(incoming, **kwargs)

        rank = 1 if rank is None else rank

        self.num_inputs = int(np.prod(self.input_shape[1:]))
        self.num_units = num_units
        self.shape = (self.num_inputs, self.num_units)
        self.shape2 = shape2
        if self.shape[0] % self.shape2[0] != 0 or self.shape[1] % self.shape2[
                1] != 0:
            raise ValueError(
                'shape must divide exactly by shape2, but they have {}, {}'.
                format(self.shape, shape2))

        self.shape1 = self.shape[0] // self.shape2[0], self.shape[
            1] // self.shape2[1]
        self.kron_shape = (int(np.prod(self.shape1)),
                           int(np.prod(self.shape2)))
        self.r = rank if use_rank else max(
            1, int(param_density * min(self.kron_shape)))

        self.manifold = FixedRankEmbeeded(*self.kron_shape, k=self.r)

        U, S, V = self.manifold.rand_np()

        # give proper names
        self.U = self.add_param(U, (self.kron_shape[0], self.r),
                                name="U",
                                regularizable=False)
        self.S = self.add_param(S, (self.r, self.r),
                                name="S",
                                regularizable=True)
        self.V = self.add_param(V, (self.r, self.kron_shape[1]),
                                name="V",
                                regularizable=False)
        print('number_of_params for {}: {}'.format(
            self.name,
            np.prod(U.shape) + np.prod(S.shape) + np.prod(V.shape)))
コード例 #7
0
    def __init__(self, incoming, num_units, W=lasagne.init.GlorotUniform(), **kwargs):
        super(DotLayer2, self).__init__(incoming, **kwargs)

        num_inputs = int(np.prod(self.input_shape[1:]))

        self.manifold = FixedRankEmbeeded(num_inputs, num_units, min(num_inputs, num_units))
        self.num_units = num_units
        self.W = self.add_param(self.manifold.rand(name="USV"), (num_inputs, num_units), name="W")
        #self.W = self.manifold._normalize_columns(self.W)
        self.op = DotStep(self.manifold)
コード例 #8
0
    def __init__(self, incoming, num_units, rank, **kwargs):
        super(LowRankLayer, self).__init__(incoming, **kwargs)

        num_inputs = int(np.prod(self.input_shape[1:]))
        self.num_inputs = num_inputs
        self.num_units = num_units
        self.shape = (self.num_inputs, self.num_units)
        #self.r = max(1, int(param_density * min(self.shape)))
        self.r = rank

        self.manifold = FixedRankEmbeeded(*self.shape, k=self.r)
        U, S, V = self.manifold.rand_np()
        # give proper names
        self.U = self.add_param(U, (self.num_inputs, self.r),
                                name="U",
                                regularizable=False)
        self.S = self.add_param(S, (self.r, self.r),
                                name="S",
                                regularizable=True)
        self.V = self.add_param(V, (self.r, self.num_units),
                                name="V",
                                regularizable=False)
コード例 #9
0
class OldKronLayer(lasagne.layers.Layer):
    def __init__(self,
                 incoming,
                 num_units,
                 shape2,
                 param_density=1.0,
                 **kwargs):
        super(OldKronLayer, self).__init__(incoming, **kwargs)

        self.num_inputs = int(np.prod(self.input_shape[1:]))
        self.num_units = num_units
        self.shape = (self.num_inputs, self.num_units)
        self.shape2 = shape2
        if self.shape[0] % self.shape2[0] != 0 or self.shape[1] % self.shape2[
                1] != 0:
            raise ValueError(
                'shape must divide exactly by shape2, but they have {}, {}'.
                format(self.shape, shape2))

        self.shape1 = self.shape[0] // self.shape2[0], self.shape[
            1] // self.shape2[1]
        self.kron_shape = (int(np.prod(self.shape1)),
                           int(np.prod(self.shape2)))
        self.r = max(1, int(param_density * min(self.kron_shape)))

        self.manifold = FixedRankEmbeeded(*self.kron_shape, k=self.r)

        U, S, V = self.manifold.rand_np()

        # give proper names
        self.U = self.add_param(U, (self.kron_shape[0], self.r),
                                name="U",
                                regularizable=False)
        self.S = self.add_param(S, (self.r, self.r),
                                name="S",
                                regularizable=True)
        self.V = self.add_param(V, (self.r, self.kron_shape[1]),
                                name="V",
                                regularizable=False)
        print('number_of_params for {}: {}'.format(
            self.name,
            np.prod(U.shape) + np.prod(S.shape) + np.prod(V.shape)))
        self.op = KronStep(self.manifold, self.shape1, self.shape2)

    def get_output_shape_for(self, input_shape):
        return (input_shape[0], self.num_units)

    def get_output_for(self, input, **kwargs):
        return self.op(input, self.U, self.S, self.V)
コード例 #10
0
    def __init__(self, incoming, num_units, rank, **kwargs):
        super(LowRankLayer, self).__init__(incoming, **kwargs)

        num_inputs = int(np.prod(self.input_shape[1:]))
        self.num_inputs = num_inputs
        self.num_units = num_units
        self.shape = (self.num_inputs, self.num_units)
        #self.r = max(1, int(param_density * min(self.shape)))
        self.r = rank

        self.manifold = FixedRankEmbeeded(*self.shape, k=self.r)
        U, S, V = self.manifold.rand_np()
        # give proper names
        self.U = self.add_param(U, (self.num_inputs, self.r), name="U", regularizable=False)
        self.S = self.add_param(S, (self.r, self.r), name="S", regularizable=True)
        self.V = self.add_param(V, (self.r, self.num_units), name="V", regularizable=False)
コード例 #11
0
class KronLayer(lasagne.layers.Layer):
    def __init__(self, incoming, num_units, shape2, param_density=1.0, rank=None, use_rank=True, **kwargs):
        super(KronLayer, self).__init__(incoming, **kwargs)

        rank = 1 if rank is None else rank

        self.num_inputs = int(np.prod(self.input_shape[1:]))
        self.num_units = num_units
        self.shape = (self.num_inputs, self.num_units)
        self.shape2 = shape2
        if self.shape[0] % self.shape2[0] != 0 or self.shape[1] % self.shape2[1] != 0:
            raise ValueError('shape must divide exactly by shape2, but they have {}, {}'.format(self.shape, shape2))

        self.shape1 = self.shape[0] // self.shape2[0], self.shape[1] // self.shape2[1]
        self.kron_shape = (int(np.prod(self.shape1)), int(np.prod(self.shape2)))
        self.r = rank if use_rank else max(1, int(param_density * min(self.kron_shape)))

        self.manifold = FixedRankEmbeeded(*self.kron_shape, k=self.r)

        U, S, V = self.manifold.rand_np()

        # give proper names
        self.U = self.add_param(U, (self.kron_shape[0], self.r), name="U", regularizable=False)
        self.S = self.add_param(S, (self.r, self.r), name="S", regularizable=True)
        self.V = self.add_param(V, (self.r, self.kron_shape[1]), name="V", regularizable=False)
        print('number_of_params for {}: {}'.format(self.name, np.prod(U.shape) + np.prod(S.shape) + np.prod(V.shape)))
        #self.op = KronStep(self.manifold, self.shape1, self.shape2)

    def get_output_shape_for(self, input_shape):
        return (input_shape[0], self.num_units)

    
    def get_output_for(self, input, **kwargs):
        xin_shape = input.shape
        if input.ndim > 2:
            # if the input has more than two dimensions, flatten it into a
            # batch of feature vectors.
            input = input.flatten(2)
        activation = T.zeros((input.shape[0], self.shape1[1] * self.shape2[1]))
        s = T.diag(T.sqrt(T.diag(self.S)))
        u = self.U.dot(s)
        w = s.dot(self.V)
        for i in range(self.manifold._k):
            activation += apply_mat_to_kron(input,
                                u[:, i].reshape((self.shape1[::-1])).T,
                                w[i, :].reshape((self.shape2[::-1])).T)
        return activation
コード例 #12
0
class KronLayer(lasagne.layers.Layer):
    def __init__(self,
                 incoming,
                 num_units,
                 shape2,
                 param_density=1.0,
                 rank=None,
                 use_rank=True,
                 **kwargs):
        super(KronLayer, self).__init__(incoming, **kwargs)

        rank = 1 if rank is None else rank

        self.num_inputs = int(np.prod(self.input_shape[1:]))
        self.num_units = num_units
        self.shape = (self.num_inputs, self.num_units)
        self.shape2 = shape2
        if self.shape[0] % self.shape2[0] != 0 or self.shape[1] % self.shape2[
                1] != 0:
            raise ValueError(
                'shape must divide exactly by shape2, but they have {}, {}'.
                format(self.shape, shape2))

        self.shape1 = self.shape[0] // self.shape2[0], self.shape[
            1] // self.shape2[1]
        self.kron_shape = (int(np.prod(self.shape1)),
                           int(np.prod(self.shape2)))
        self.r = rank if use_rank else max(
            1, int(param_density * min(self.kron_shape)))

        self.manifold = FixedRankEmbeeded(*self.kron_shape, k=self.r)

        U, S, V = self.manifold.rand_np()

        # give proper names
        self.U = self.add_param(U, (self.kron_shape[0], self.r),
                                name="U",
                                regularizable=False)
        self.S = self.add_param(S, (self.r, self.r),
                                name="S",
                                regularizable=True)
        self.V = self.add_param(V, (self.r, self.kron_shape[1]),
                                name="V",
                                regularizable=False)
        print('number_of_params for {}: {}'.format(
            self.name,
            np.prod(U.shape) + np.prod(S.shape) + np.prod(V.shape)))
        #self.op = KronStep(self.manifold, self.shape1, self.shape2)

    def get_output_shape_for(self, input_shape):
        return (input_shape[0], self.num_units)

    def get_output_for(self, input, **kwargs):
        xin_shape = input.shape
        if input.ndim > 2:
            # if the input has more than two dimensions, flatten it into a
            # batch of feature vectors.
            input = input.flatten(2)
        activation = T.zeros((input.shape[0], self.shape1[1] * self.shape2[1]))
        s = T.diag(T.sqrt(T.diag(self.S)))
        u = self.U.dot(s)
        w = s.dot(self.V)
        for i in range(self.manifold._k):
            activation += apply_mat_to_kron(
                input, u[:, i].reshape((self.shape1[::-1])).T, w[i, :].reshape(
                    (self.shape2[::-1])).T)
        return activation