Пример #1
0
    def get_params(self):
        consts = []
        updates = []
        
        if hasattr(self, 'params'):
            params = self.params
        else:
            params = []

        if hasattr(self, 'regularizers'):
            regularizers = self.regularizers
        else:
            regularizers = []

        if hasattr(self, 'constraints') and len(self.constraints) == len(params):
            for c in self.constraints:
                if c:
                    consts.append(c)
                else:
                    consts.append(constraints.identity())
        elif hasattr(self, 'constraint') and self.constraint:
            consts += [self.constraint for _ in range(len(params))]
        else:
            consts += [constraints.identity() for _ in range(len(params))]

        if hasattr(self, 'updates') and self.updates:
            updates += self.updates

        return params, regularizers, consts, updates
Пример #2
0
 def set_constraints(self, constraints = None):
     self.constraints = []
     for name in self.params_dict.keys():
         if not constraints:
            self.constraints += [identity() for param in self.params_dict[name]]
         elif constraints.has_key(name):
            self.constraints += [deepcopy(constraints[name]) \
                                 for param in self.params_dict[name]]
         else:
            self.constraints += [identity() for param in self.params_dict[name]]
Пример #3
0
    def test_identity(self):
        from keras.constraints import identity

        identity_instance = identity()

        normed = identity_instance(self.example_array)
        assert (np.all(normed == self.example_array))
Пример #4
0
    def test_identity(self):
        from keras.constraints import identity

        identity_instance = identity()

        normed = identity_instance(self.example_array)
        assert (np.all(normed == self.example_array))
def test_identity_oddballs():
    """
    test the identity constraint on some more exotic input.
    this does not need to pass for the desired real life behaviour,
    but it should in the current implementation.
    """
    identity_instance = constraints.identity()
    oddball_examples = ["Hello", [1], -1, None]
    assert(oddball_examples == identity_instance(oddball_examples))
Пример #6
0
    def __init__(self):
        rng = numpy.random.RandomState(23455)

        self.X1 = T.tensor4('X1', dtype='float32')
        self.X2 = T.tensor4('X2', dtype='float32')
        self.Y = T.ivector('Y')

        self.layer0 = Layer.ConvMaxPool2Layer(rng,
                                              input1=self.X1,
                                              input2=self.X2,
                                              filter_shape=[25, 3, 5, 5],
                                              poolsize=[2, 2])

        self.layer1 = Layer.ConvMaxPool2Layer(rng,
                                              input1=self.layer0.output1,
                                              input2=self.layer0.output2,
                                              filter_shape=[25, 25, 3, 3],
                                              poolsize=[2, 2])

        self.layer2 = Layer.SecretLayer(rng,
                                        input1=self.layer1.output1,
                                        input2=self.layer1.output2,
                                        filter_shape=[25, 25, 5, 5])

        # self.layer3 = Layer.MultiConvMaxPoolLayer(
        #     rng,
        #     input=self.layer2.results,
        #     filter_shape=[25, 25, 3, 3],
        #     poolsize=(2, 2)
        # )

        self.layer3 = Layer.LocalCovLayerDropout(rng,
                                                 input=self.layer2.results,
                                                 n_in=18 * 9 * 25,
                                                 n_out=200)

        self.layer4 = Layer.HiddenLayerDropout(
            rng,
            train_input=self.layer3.train_output,
            test_input=self.layer3.test_output,
            # n_in=25*24*3,
            n_in=800,
            n_out=200)
        # self.layer2 = Layer.ConvMaxPoolLayer(
        #     rng,
        #     input=T.abs_(self.layer1.output1 - self.layer1.output2),
        #     filter_shape=[25, 25, 3, 3],
        #     poolsize=[2, 2]
        # )
        #
        # self.layer3 = Layer.HiddenLayer(
        #     rng,
        #     input=self.layer2.output,
        #     n_in=25*18*5,
        #     n_out=500
        # )

        # self.layer5 = Layer.LogisticRegression(self.layer4.output, 500, 2)
        # self.cost = self.layer5.negative_log_likelihood(self.Y)

        self.layer5 = Layer.LogisticRegressionDropout(
            train_input=self.layer4.train_output,
            test_input=self.layer4.test_output,
            n_in=200,
            n_out=2)
        self.cost = self.layer5.negative_log_likelihood_train(self.Y)

        self.params = self.layer5.params + self.layer4.params + self.layer3.params + self.layer2.params + self.layer1.params + self.layer0.params
        self.grads = T.grad(self.cost, self.params)

        # learning_rate = numpy.float32(0.01)
        # updates = [
        #     (param_i, param_i - learning_rate * grad_i)
        #     for param_i, grad_i in zip(params, grads)
        # ]

        constraints_list = []
        for param in self.params:
            constraints_list.append(identity())

        rms = RMSprop()
        self.updates = rms.get_updates(self.params, constraints_list,
                                       self.cost)
def test_identity():
    identity_instance = constraints.identity()
    normed = identity_instance(example_array)
    assert(np.all(normed == example_array))
Пример #8
0
    def __init__(self):
        rng = numpy.random.RandomState(23455)

        self.X1 = T.tensor4('X1', dtype='float32')
        self.X2 = T.tensor4('X2', dtype='float32')
        self.Y = T.ivector('Y')

        self.layer0 = Layer.ConvMaxPool2Layer(
            rng,
            input1=self.X1,
            input2=self.X2,
            filter_shape=[25, 3, 5, 5],
            poolsize=[2, 2]
        )

        self.layer1 = Layer.ConvMaxPool2Layer(
            rng,
            input1=self.layer0.output1,
            input2=self.layer0.output2,
            filter_shape=[25, 25, 3, 3],
            poolsize=[2, 2]
        )

        self.layer2 = Layer.SecretLayer(
            rng,
            input1=self.layer1.output1,
            input2=self.layer1.output2,
            filter_shape=[25, 25, 5, 5]
        )

        # self.layer3 = Layer.MultiConvMaxPoolLayer(
        #     rng,
        #     input=self.layer2.results,
        #     filter_shape=[25, 25, 3, 3],
        #     poolsize=(2, 2)
        # )

        self.layer3 = Layer.LocalCovLayerDropout(
            rng,
            input=self.layer2.results,
            n_in=18*9*25,
            n_out=200
        )

        self.layer4 = Layer.HiddenLayerDropout(
            rng,
            train_input=self.layer3.train_output,
            test_input=self.layer3.test_output,
            # n_in=25*24*3,
            n_in=800,
            n_out=200
        )
        # self.layer2 = Layer.ConvMaxPoolLayer(
        #     rng,
        #     input=T.abs_(self.layer1.output1 - self.layer1.output2),
        #     filter_shape=[25, 25, 3, 3],
        #     poolsize=[2, 2]
        # )
        #
        # self.layer3 = Layer.HiddenLayer(
        #     rng,
        #     input=self.layer2.output,
        #     n_in=25*18*5,
        #     n_out=500
        # )

        # self.layer5 = Layer.LogisticRegression(self.layer4.output, 500, 2)
        # self.cost = self.layer5.negative_log_likelihood(self.Y)

        self.layer5 = Layer.LogisticRegressionDropout(
            train_input=self.layer4.train_output,
            test_input=self.layer4.test_output,
            n_in=200,
            n_out=2
        )
        self.cost = self.layer5.negative_log_likelihood_train(self.Y)

        self.params = self.layer5.params + self.layer4.params + self.layer3.params + self.layer2.params + self.layer1.params + self.layer0.params
        self.grads = T.grad(self.cost, self.params)

        # learning_rate = numpy.float32(0.01)
        # updates = [
        #     (param_i, param_i - learning_rate * grad_i)
        #     for param_i, grad_i in zip(params, grads)
        # ]

        constraints_list = []
        for param in self.params:
            constraints_list.append(identity())

        rms = RMSprop()
        self.updates = rms.get_updates(self.params, constraints_list, self.cost)
Пример #9
0
layer4 = Layer.LogisticRegression(layer3.output, 500, 2)
cost = layer4.negative_log_likelihood(Y)

params = layer4.params + layer3.params + layer2.params + layer1.params + layer0.params
grads = T.grad(cost, params)

# learning_rate = numpy.float32(0.01)
# updates = [
#     (param_i, param_i - learning_rate * grad_i)
#     for param_i, grad_i in zip(params, grads)
# ]

constraints_list = []
for param in params:
    constraints_list.append(identity())

rms = RMSprop()
updates = rms.get_updates(params, constraints_list, cost)

def read_image(address):
    img = Image.open(open(address))
    img = numpy.asarray(img, dtype='float32') / 256.
    # put image in 4D tensor of shape (1, 3, height, width)
    img = img.transpose(2, 0, 1).reshape(1, 3, 128, 48)
    return img


# img1 = read_image('/home/austin/Documents/Datasets/VIPeR/cam_a/001_45.bmp')
# img2 = read_image('/home/austin/Documents/Datasets/VIPeR/cam_b/091_90.bmp')
# f = theano.function([X1, X2, Y], [cost, layer2.similarity])