Example #1
0
    def prepare(self):
        # Output layers
        self.output_layer = Chain(self.input_dim).stack(Dense(self.output_size * self.class_size))
        self.softmax_layer = Softmax().initialize(input_dim=self.output_size)

        self.class_layer = Chain(self.input_dim).stack(Dense(self.class_size), Softmax3D())
        self.register_inner_layers(self.class_layer, self.output_layer)
        # Target tensor
        self.target_tensor = T.imatrix("target")
        self.register_external_targets(self.target_tensor)
        # arange cache
        self.arange_cache = theano.shared(np.arange(10 * 64), name="arange_cache")
Example #2
0
    def setup(self):
        """
        All codes that create parameters should be put into 'setup' function.
        """
        self.output_dim = 10
        self.encoder = Chain(self.input_dim).stack(Dense(self.internal_layer_size, "tanh"))
        self.decoder = Chain(self.internal_layer_size).stack(Dense(self.input_dim))
        self.classifier = Chain(self.internal_layer_size).stack(Dense(50, "tanh"), Dense(self.output_dim), Softmax())

        self.register_inner_layers(self.encoder, self.decoder, self.classifier)

        self.target_input = T.ivector("target")
        self.register_external_inputs(self.target_input)
Example #3
0
class MyJointTrainingModel(NeuralLayer):
    """
    A customized model that trains an auto-encoder and MLP classifier simultaneously.
    """
    def __init__(self, internal_layer_size=100):
        super(MyJointTrainingModel, self).__init__("my joint-training model")
        self.internal_layer_size = internal_layer_size

    def prepare(self):
        """
        All codes that create parameters should be put into 'setup' function.
        """
        self.output_dim = 10
        self.encoder = Chain(self.input_dim).stack(
            Dense(self.internal_layer_size, 'tanh'))
        self.decoder = Chain(self.internal_layer_size).stack(
            Dense(self.input_dim))
        self.classifier = Chain(self.internal_layer_size).stack(
            Dense(50, 'tanh'), Dense(self.output_dim), Softmax())

        self.register_inner_layers(self.encoder, self.decoder, self.classifier)

        self.target_input = T.ivector('target')
        self.register_external_inputs(self.target_input)

    def compute_tensor(self, x):
        """
        Build the computation graph here.
        """
        internal_variable = self.encoder.compute_tensor(x)

        decoding_output = self.decoder.compute_tensor(internal_variable)

        classification_output = self.classifier.compute_tensor(
            internal_variable)

        auto_encoder_cost = AutoEncoderCost(decoding_output, x).get()

        classification_cost = CrossEntropyCost(classification_output,
                                               self.target_input).get()

        final_cost = 0.01 * auto_encoder_cost + classification_cost

        error_rate = ErrorRateCost(classification_output,
                                   self.target_input).get()

        self.register_monitors(("err", error_rate),
                               ("encoder_cost", auto_encoder_cost),
                               ("classify_cost", classification_cost))

        return final_cost
Example #4
0
class ClassOutputLayer(NeuralLayer):
    def __init__(self, output_size, class_size):
        super(ClassOutputLayer, self).__init__("class_output")
        self.output_size = output_size
        self.class_size = class_size

    def prepare(self):
        # Output layers
        self.output_layer = Chain(self.input_dim).stack(
            Dense(self.output_size * self.class_size))
        self.softmax_layer = Softmax().initialize(input_dim=self.output_size)

        self.class_layer = Chain(self.input_dim).stack(Dense(self.class_size),
                                                       Softmax3D())
        self.register_inner_layers(self.class_layer, self.output_layer)
        # Target tensor
        self.target_tensor = T.imatrix('target')
        self.register_external_targets(self.target_tensor)
        # arange cache
        self.arange_cache = theano.shared(np.arange(10 * 64),
                                          name="arange_cache")

    def compute_tensor(self, x):
        """
        :param x: (batch, time, vec)
        """
        # Target class
        class_matrix = self.target_tensor // self.output_size
        class_vector = class_matrix.reshape((-1, ))
        # Target index
        target_matrix = self.target_tensor % self.output_size
        target_vector = target_matrix.reshape((-1, ))
        # Input matrix
        input_matrix = x.reshape((-1, self.input_dim))
        # Output matrix
        output_tensor3d = self.output_layer.compute_tensor(x)
        output_matrix = output_tensor3d.reshape(
            (-1, self.class_size, self.output_size))
        arange_vec = self.arange_cache[:output_matrix.shape[0]]
        sub_output_matrix = output_matrix[arange_vec, class_vector]
        # Softmax
        softmax_output_matrix = self.softmax_layer.compute_tensor(
            sub_output_matrix)
        # Class prediction
        class_output_matrix = self.class_layer.compute_tensor(x)
        # Costs
        output_cost = LMCost(softmax_output_matrix, target_vector).get()
        class_cost = LMCost(class_output_matrix, class_matrix).get()
        final_cost = output_cost + class_cost

        return final_cost
Example #5
0
class ClassOutputLayer(NeuralLayer):

    def __init__(self, output_size, class_size):
        super(ClassOutputLayer, self).__init__("class_output")
        self.output_size = output_size
        self.class_size = class_size

    def setup(self):
        # Output layers
        self.output_layer = Chain(self.input_dim).stack(Dense(self.output_size * self.class_size))
        self.softmax_layer = Softmax().connect(input_dim=self.output_size)

        self.class_layer = Chain(self.input_dim).stack(Dense(self.class_size),
                                                        Softmax3D())
        self.register_inner_layers(self.class_layer, self.output_layer)
        # Target tensor
        self.target_tensor = T.imatrix('target')
        self.register_external_targets(self.target_tensor)
        # arange cache
        self.arange_cache = theano.shared(np.arange(10*64), name="arange_cache")


    def output(self, x):
        """
        :param x: (batch, time, vec)
        """
        # Target class
        class_matrix = self.target_tensor // self.output_size
        class_vector = class_matrix.reshape((-1,))
        # Target index
        target_matrix = self.target_tensor % self.output_size
        target_vector = target_matrix.reshape((-1,))
        # Input matrix
        input_matrix = x.reshape((-1, self.input_dim))
        # Output matrix
        output_tensor3d = self.output_layer.output(x)
        output_matrix = output_tensor3d.reshape((-1, self.class_size, self.output_size))
        arange_vec = self.arange_cache[:output_matrix.shape[0]]
        sub_output_matrix = output_matrix[arange_vec, class_vector]
        # Softmax
        softmax_output_matrix = self.softmax_layer.output(sub_output_matrix)
        # Class prediction
        class_output_matrix = self.class_layer.output(x)
        # Costs
        output_cost = LMCost(softmax_output_matrix, target_vector).get()
        class_cost = LMCost(class_output_matrix, class_matrix).get()
        final_cost = output_cost + class_cost

        return final_cost
Example #6
0
    def prepare(self):
        # Output layers
        self.output_layer = Chain(self.input_dim).stack(
            Dense(self.output_size * self.class_size))
        self.softmax_layer = Softmax().initialize(input_dim=self.output_size)

        self.class_layer = Chain(self.input_dim).stack(Dense(self.class_size),
                                                       Softmax3D())
        self.register_inner_layers(self.class_layer, self.output_layer)
        # Target tensor
        self.target_tensor = T.imatrix('target')
        self.register_external_targets(self.target_tensor)
        # arange cache
        self.arange_cache = theano.shared(np.arange(10 * 64),
                                          name="arange_cache")
Example #7
0
class MyJointTrainingModel(NeuralLayer):
    """
    A customized model that trains an auto-encoder and MLP classifier simultaneously.
    """

    def __init__(self, internal_layer_size=100):
        super(MyJointTrainingModel, self).__init__("my joint-training model")
        self.internal_layer_size = internal_layer_size

    def prepare(self):
        """
        All codes that create parameters should be put into 'setup' function.
        """
        self.output_dim = 10
        self.encoder = Chain(self.input_dim).stack(Dense(self.internal_layer_size, 'tanh'))
        self.decoder = Chain(self.internal_layer_size).stack(Dense(self.input_dim))
        self.classifier = Chain(self.internal_layer_size).stack(Dense(50, 'tanh'),
                                                      Dense(self.output_dim),
                                                      Softmax())

        self.register_inner_layers(self.encoder, self.decoder, self.classifier)

        self.target_input = T.ivector('target')
        self.register_external_inputs(self.target_input)

    def compute_tensor(self, x):
        """
        Build the computation graph here.
        """
        internal_variable = self.encoder.compute_tensor(x)

        decoding_output = self.decoder.compute_tensor(internal_variable)

        classification_output = self.classifier.compute_tensor(internal_variable)

        auto_encoder_cost = AutoEncoderCost(decoding_output, x).get()

        classification_cost = CrossEntropyCost(classification_output, self.target_input).get()

        final_cost = 0.01 * auto_encoder_cost + classification_cost

        error_rate = ErrorRateCost(classification_output, self.target_input).get()

        self.register_monitors(("err", error_rate),
                               ("encoder_cost", auto_encoder_cost),
                               ("classify_cost", classification_cost))

        return final_cost
Example #8
0
class FullOutputLayer(NeuralLayer):
    def __init__(self, vocab_size):
        super(FullOutputLayer, self).__init__("full_output")
        self.vocab_size = vocab_size

    def prepare(self):
        self.core = Chain(self.input_dim).stack(Dense(self.vocab_size), Softmax3D())
        self.register_inner_layers(self.core)

    def compute_tensor(self, x):
        return self.core.compute_tensor(x)
Example #9
0
class FullOutputLayer(NeuralLayer):
    def __init__(self, vocab_size):
        super(FullOutputLayer, self).__init__("full_output")
        self.vocab_size = vocab_size

    def prepare(self):
        self.core = Chain(self.input_dim).stack(Dense(self.vocab_size),
                                                Softmax3D())
        self.register_inner_layers(self.core)

    def compute_tensor(self, x):
        return self.core.compute_tensor(x)
Example #10
0
    def setup(self):
        """
        All codes that create parameters should be put into 'setup' function.
        """
        self.output_dim = 10
        self.encoder = Chain(self.input_dim).stack(
            Dense(self.internal_layer_size, 'tanh'))
        self.decoder = Chain(self.internal_layer_size).stack(
            Dense(self.input_dim))
        self.classifier = Chain(self.internal_layer_size).stack(
            Dense(50, 'tanh'), Dense(self.output_dim), Softmax())

        self.register_inner_layers(self.encoder, self.decoder, self.classifier)

        self.target_input = T.ivector('target')
        self.register_external_inputs(self.target_input)
Example #11
0
 def setup(self):
     self.core = Chain(self.input_dim).stack(Dense(self.vocab_size),
                                             Softmax3D())
     self.register_inner_layers(self.core)
Example #12
0
 def prepare(self):
     self.core = Chain(self.input_dim).stack(Dense(self.vocab_size),
                                             Softmax3D())
     self.register_inner_layers(self.core)