Beispiel #1
0
                           crop_size=(32, 32),
                           name='valid')

train_global_mean, train_global_std = train_gen.gcn()
train_pc_matrix = train_gen.zca()
test_gen.gcn(train_global_mean, train_global_std)
test_gen.zca(train_pc_matrix)
valid_gen.gcn(train_global_mean, train_global_std)
valid_gen.zca(train_pc_matrix)

#================Build graph================#

x = T.ftensor4('X')
y = T.ivector('y')

graph = SimpleGraph(experiment_name, 128)
graph.add_layer(Convolution3DLayer((3, 32, 32), (16, 32, 32), (3, 3),
                                   'half',
                                   use_bias=False),
                is_start=True)  # 0
graph.add_layer(BatchNormalization3DLayer((16, 32, 32), 0.95))  # 1
graph.add_layer(ReLU())  # 2

graph.add_layer(
    Convolution3DLayer((16, 32, 32), (128, 32, 32), (3, 3),
                       'half',
                       use_bias=False))  # 3
graph.add_layer(BatchNormalization3DLayer((128, 32, 32), 0.95))  # 4
graph.add_layer(ReLU())  # 5
graph.add_layer(
    Convolution3DLayer((128, 32, 32), (128, 32, 32), (3, 3), 'half'))  # 6
Beispiel #2
0
#================Prepare Data================#

cifar10 = CIFAR10(base_datapath, 'tensor')
train_data = cifar10.train_data
train_data = train_data * 2 - 1
train_gen = SimpleGenerator([train_data], batch_size, 'train')

#================Build Graph================#

z = T.fmatrix('Z')  # (batch_size, 100)
x = T.ftensor4('X')  # (batch_size, 3, 28, 28)

# You can use either upscaling + conv / transposed conv

generator = SimpleGraph(experiment_name + '_gen', batch_size)
generator.add_layer(DenseLayer((100, ), (8192, ), use_bias=False),
                    is_start=True)
generator.add_layer(BatchNormalization1DLayer((8192, ), 0.9))
generator.add_layer(ReLU())
generator.add_layer(ReshapeLayer((8192, ), (512, 4, 4)))
#generator.add_layer(Upscaling3DLayer((512,4,4), (512,8,8), (2,2)))
#generator.add_layer(Convolution3DLayer((512,8,8), (256,8,8), (3,3), 'half', use_bias=False))
generator.add_layer(
    TransposedConvolution3DLayer((512, 4, 4), (256, 8, 8), (3, 3),
                                 'half', (2, 2),
                                 use_bias=False))
generator.add_layer(BatchNormalization3DLayer((256, 8, 8), 0.9))
generator.add_layer(ReLU())
#generator.add_layer(Upscaling3DLayer((256,8,8), (256,16,16), (2,2)))
#generator.add_layer(Convolution3DLayer((256,16,16), (128,16,16), (3,3), 'half', use_bias=False))
Beispiel #3
0
mnist = MNIST(base_datapath, 'flat')
mnist.split_train_valid(50000)
train_data, train_label = mnist.get_fullbatch_train()
test_data, test_label = mnist.get_fullbatch_test()
valid_data, valid_label = mnist.get_fullbatch_valid()

train_gen = SimpleGenerator([train_data, train_label], 250, 'train')
test_gen = SimpleGenerator([test_data, test_label], 250, 'test')
valid_gen = SimpleGenerator([valid_data, valid_label], 250, 'valid')

#================Build graph================#

x = T.fmatrix('X')
y = T.ivector('y')

graph = SimpleGraph(experiment_name, 250)
graph.add_layer(DenseLayer((784, ), (1024, ), use_bias=False),
                is_start=True)  # 0
graph.add_layer(BatchNormalization1DLayer((1024, )))  # 1
graph.add_layer(ReLU())  # 2
for i in range(2):
    graph.add_layer(DenseLayer((1024, ), (1024, ), use_bias=False))  # 3 6
    graph.add_layer(BatchNormalization1DLayer((1024, )))  # 4 7
    graph.add_layer(ReLU())  # 5 8
graph.add_layer(DenseLayer((1024, ), (10, )))  # 9
graph.add_layer(Softmax())  # 10

graph_output, graph_layers = graph.get_output({0: [x]}, -1, 0)

loss = CategoricalCrossentropy().get_output(graph_output, y)
accuracy = CategoricalAccuracy().get_output(graph_output, y)
Beispiel #4
0
import theano
import theano.tensor as T

from lemontree.layers.dense import DenseLayer
from lemontree.layers.activation import ReLU, Sigmoid
from lemontree.layers.objectives import CategoricalCrossentropy

from lemontree.graphs.graph import SimpleGraph
from lemontree.utils.param_utils import print_tags_in_params

graph = SimpleGraph('test', 100)

graph.add_layer(DenseLayer((10,),(10,)), get_from=[])
graph.add_layer(DenseLayer((10,),(10,)), get_from=[-1])
graph.add_layer(DenseLayer((10,),(10,)), get_from=[-1])
graph.add_layer(DenseLayer((10,),(10,)), get_from=[1])
graph.add_layer(DenseLayer((10,),(10,)), get_from=[-1])
graph.add_layer(DenseLayer((10,),(10,)), get_from=[-3])
graph.add_layer(DenseLayer((10,),(10,)), get_from=[-1])
graph.add_layer(DenseLayer((10,),(10,)), get_from=[-3])
graph.add_layer(CategoricalCrossentropy(), get_from=[-1])

print(graph.connections)

input_ = T.fmatrix('X')
label_ = T.ivector('L')

out_, list_ = graph.get_output({-8: [input_], -1:[label_]}, -1, -8)

param_ = graph.get_params(list_)
print_tags_in_params(param_)
Beispiel #5
0
mnist = MNIST(base_datapath, 'flat')
train_data = mnist.train_data
train_data, valid_data = split_data(train_data, 0.9)
test_data = mnist.test_data
train_gen = SimpleGenerator([train_data], 250, 'train')
valid_gen = SimpleGenerator([valid_data], 250, 'valid')
test_gen = SimpleGenerator([test_data], 250, 'test')

rng = MRG(9999)

#================Build graph================#

x = T.fmatrix('X')
z = T.fmatrix('Z')

graph = SimpleGraph(experiment_name, 250)
graph.add_layer(DenseLayer((784, ), (1024, ), use_bias=False),
                get_from=[])  # 0
graph.add_layer(BatchNormalization1DLayer((1024, )))  # 1
graph.add_layer(ReLU(0.1))  # 2
graph.add_layer(DenseLayer((1024, ), (1024, ), use_bias=False))  # 3
graph.add_layer(BatchNormalization1DLayer((1024, )))  # 4
graph.add_layer(ReLU(0.1))  # 5
graph.add_layer(DenseLayer((1024, ), (256, )))  # 6
graph.add_layer(Latent1DLayer((256, ), (128, )))  # 7

graph.add_layer(DenseLayer((128, ), (1024, ), use_bias=False))  # 8
graph.add_layer(BatchNormalization1DLayer((1024, )))  # 9
graph.add_layer(ReLU(0.1))  # 10
graph.add_layer(DenseLayer((1024, ), (1024, ), use_bias=False))  # 11
graph.add_layer(BatchNormalization1DLayer((1024, )))  # 12
Beispiel #6
0
#    for index in range(train_gen.max_index):
#        # run minibatch
#        for trainset in train_gen.get_minibatch(index):  # data, mask, label, reset
#            print(i, index)

#================Build graph================#

x = T.ftensor3('X')  # (batch_size, sequence_length, 300)
m = T.wmatrix('M')  # (batch_size, sequence_length)
r = T.wvector('r')  # (batch_size,)
x_ext = T.ftensor3('X_ext')
m_ext = T.wmatrix('M_ext')
y_ext = T.imatrix('Y_ext')
r_ext = T.wvector('r_ext')

encoder = SimpleGraph(experiment_name + '_enc', batch_size)
encoder.add_layer(LSTMRecurrentLayer(input_shape=(300, ),
                                     output_shape=(512, ),
                                     forget_bias_one=True,
                                     peephole=True,
                                     output_return_index=[-1],
                                     save_state_index=stride_length - 1,
                                     also_return_cell=True,
                                     precompute=False,
                                     unroll=False,
                                     backward=False),
                  is_start=True)

encoder_output, encoder_layers = encoder.get_output({0: [x, m, r]}, -1, 0)
encoder_output_cell = T.squeeze(encoder_output[0])
encoder_output_hidden = T.squeeze(encoder_output[1])
Beispiel #7
0
#for i in range(len(train_gens)):
#    train_gen = train_gens[i]
#    for index in range(train_gen.max_index):
#        # run minibatch
#        for trainset in train_gen.get_minibatch(index):  # data, mask, label, reset
#            print(i, index)

#================Build graph================#

x = T.ftensor3('X')  # (batch_size, sequence_length, 300)
m = T.wmatrix('M')  # (batch_size, sequence_length)
y = T.imatrix('Y')  # (batch_size, sequence_length)
r = T.wvector('r')  # (batch_size,)

graph = SimpleGraph(experiment_name, batch_size)
graph.add_layer(LSTMRecurrentLayer(input_shape=(300,),
                                   output_shape=(1024,),
                                   forget_bias_one=True,
                                   peephole=True,
                                   output_return_index=None,
                                   save_state_index=stride_length-1,
                                   precompute=False,
                                   unroll=False,
                                   backward=False), is_start=True)
# graph.add_layer(TimeDistributedDenseLayer((1024,), (512,)))  # not much time difference, and less memory
graph.add_layer(DenseLayer((1024,), (512,)))
graph.add_layer(TimeDistributedDenseLayerSCP((512,), (glove.vocabulary,)))

graph_output, graph_layers = graph.get_output({0:[x,m,r], -1:[y,m]}, -1, 0)
loss = graph_output[0]