Пример #1
0
            "reflection": [0, 0, 0]
        },  #Bernoulli p
        interp_order=1),
    DefaultNormalizer(tags=["bcolzall:3d"])
]

#####################
#     training      #
#####################

"This is the train dataloader. We will train until this one stops loading data."
"You can set the number of epochs, the datasets and if you want it multiprocessed"
n_epochs = 1000
training_data = BcolzAllDataLoader(sets=TRAINING,
                                   epochs=n_epochs,
                                   preprocessors=preprocessors,
                                   multiprocess=False,
                                   crash_on_exception=True)

"Schedule the reducing of the learning rate. On indexing with the number of epochs, it should return a value for the learning rate."
lr = 0.01
lr_decay = 0.5
learning_rate_schedule = {0.: lr}
for i in range(10):
    learning_rate_schedule[float(2**i)] = lr * (lr_decay**(i + 1))

print learning_rate_schedule

"The function to build updates."
build_updates = lasagne.updates.adam
Пример #2
0
        norm_patch_shape=norm_patch_shape,
        interp_order=1),
    DefaultNormalizer(tags=["bcolzall:3d"])
]


#####################
#     training      #
#####################

"This is the train dataloader. We will train until this one stops loading data."
"You can set the number of epochs, the datasets and if you want it multiprocessed"
n_epochs = 100
training_data = BcolzAllDataLoader(
    sets=TRAINING,
    epochs=n_epochs,
    preprocessors=preprocessors,
    multiprocess=multiprocessing_on,
    crash_on_exception=True)

"Schedule the reducing of the learning rate. On indexing with the number of epochs, it should return a value for the learning rate." 
lr = 0.00001 * batch_size
lr_min = lr/1000.
lr_decay = 0.9
learning_rate_schedule = {}
for i in range(n_epochs):
    lr_ = lr*(lr_decay**i)
    if lr_ < lr_min: break
    learning_rate_schedule[i] = lr_

print learning_rate_schedule