Example #1
0
 def __init__(self, dict_size: int, embedding_size: int, hidden_units: int):
     self.layers = [layer.Embedding(dict_size, embedding_size, 'l0'),
                    layer.Linear(embedding_size, hidden_units, 'l1-0'),
                    layer.Linear(embedding_size, hidden_units, 'l1-1'),
                    layer.Linear(embedding_size, hidden_units, 'l1-2'),
                    layer.Linear(hidden_units, dict_size, 'l2'),
                    layer.Softmax(dict_size, 'l3')]
     self.h: list = None
     return
Example #2
0
 def __init__(self, dict_size: int, embedding_size: int, hidden_units: int):
     self.layers = [layer.Embedding(dict_size, embedding_size, 'l0'),     # layer 0
                    layer.Linear(embedding_size, hidden_units, 'l1-0'),   # layer 1
                    layer.Linear(embedding_size, hidden_units, 'l1-1'),   # layer 2
                    layer.Linear(embedding_size, hidden_units, 'l1-2'),   # layer 3
                    layer.Tanh(hidden_units, 'l2'),                       # layer 4
                    layer.Linear(hidden_units, dict_size, 'l3'),          # layer 5
                    layer.Softmax(dict_size, 'l4')]                       # layer 6
     self.h: list = None
     return
Example #3
0
 def __init__(self, dict_size: int, embedding_size: int, hidden_units: int):
     self.layers = [layer.Embedding(dict_size, embedding_size, 'l0-Embedding'),
                    layer.Recursive(embedding_size, hidden_units, 'l1-R'),
                    layer.Tanh(hidden_units, 'l2-Tanh'),
                    layer.Linear(hidden_units, dict_size, 'l3-L'),
                    layer.Softmax(dict_size, 'l4-Softmax')]
     self.h: list = None
     return
Example #4
0
utf.cross_train(mynn, data_store_train, data_store_valid, train_settings)
"""
#############################
print('Settings 3')
train_settings = TrainSettings(learning_rate=0.005,
                               batch_size=16,
                               momentum=0.0,
                               plot_callback=cb.plot_callback,
                               loss_callback=cb.loss_callback,
                               filename='script-1-1-3',
                               epoch=200,
                               prefix='e16')

layers = [
    layer.Linear(784, 100),
    #layer.BN(100, 100),
    layer.Sigmoid(100),
    layer.Linear(100, 10),
    layer.Softmax(10)
]

mynn = network.MLP(layers)

utf.cross_train(mynn, data_store_train, data_store_valid, train_settings)

print('Settings 4')
train_settings = TrainSettings(learning_rate=0.01,
                               batch_size=16,
                               momentum=0.0,
                               plot_callback=cb.plot_callback,
Example #5
0
print('start initializing...')
network.init_nn(random_seed=1099)

learning_rates = [0.001]
momentums = [0.9]

regularizers = [0.0001]
x_train, y_train = load_data.load_from_path(data_train_filepath)
x_valid, y_valid = load_data.load_from_path(data_valid_filepath)

withBN = True
if not withBN:
    for i2 in range(len(regularizers)):
        for i3 in range(len(momentums)):
            for i4 in range(len(learning_rates)):
                layers = [layer.Linear(784, 100),
                          layer.Sigmoid(100, 100),
                          layer.Linear(100, 25),
                          layer.Sigmoid(25, 25),
                          layer.Linear(25, 10),
                          layer.Softmax(10, 10)]
                name = 'network2' + '-' + str(i2) + '-' + str(i3) + '-' + str(i4) + '.dump'
                myNN = NN(layers, learning_rate=learning_rates[i4], regularizer=regularizers[i2], momentum=momentums[i3])
                myNN.train(x_train, y_train, x_valid, y_valid, epoch=300, batch_size=32)

if withBN:
    for i2 in range(len(regularizers)):
        for i3 in range(len(momentums)):
            for i4 in range(len(learning_rates)):
                layers = [layer.Linear(784, 100),
                          layer.BN(100, 100),
Example #6
0
myNN.train_dump(x_train, y_train, x_valid, y_valid, epoch=200, dump_file=os.path.join(path, '../temp/network-3.dump'))

'''

hidden_units = [20, 100, 200, 500]
regularizers = [0.0, 0.0001, 0.001]
momentums = [0.0, 0.5, 0.9]
learning_rates = [0.1, 0.01, 0.2, 0.5]

for i1 in range(len(hidden_units)):
    for i2 in range(len(regularizers)):
        for i3 in range(len(momentums)):
            for i4 in range(len(learning_rates)):
                layers = [
                    layer.Linear(784, 100),
                    layer.BN(100, 100),
                    layer.ReLU(100, 100),
                    layer.BN(100, 100),
                    layer.SoftmaxLayer(100, 10)
                ]
                name = 'network-' + str(i1) + '-' + str(i2) + '-' + str(
                    i3) + '-' + str(i4) + '.dump'
                myNN = NN(layers,
                          learning_rate=learning_rates[i4],
                          regularizer=regularizers[i2],
                          momentum=momentums[i3])
                myNN.train(x_train,
                           y_train,
                           x_valid,
                           y_valid,