Exemplo n.º 1
0
def run_training(train_suffix, model_name, data_loader, epoch, n_epochs):

    train_name = '%s_%s' % (model_name, train_suffix)
    log_name = "%s.log" % train_name
    if os.path.isfile(log_name):
        close_file(log_name)
        os.remove(log_name)
    csv_log = CSVLogger(log_name, append=True)

    time_checkpoint = TimeCheckpoint(12 * 60 * 60, train_name)
    callbacks = [time_checkpoint, csv_log]
    fit_hist = model.fit_generator(
        data_loader.generator(True),
        validation_data=data_loader.generator(False),
        steps_per_epoch=data_loader.steps_per_epoch,
        validation_steps=data_loader.validation_steps,
        callbacks=callbacks,
        epochs=n_epochs,
        initial_epoch=epoch,
        verbose=0)

    read_hdf_lock.acquire()
    model.save("%s_final.hdf5" % train_name)
    read_hdf_lock.release()
    return fit_hist
Exemplo n.º 2
0
 def on_batch_end(self, batch, logs=None):
     if self.time_interval is None or batch % 100 != 0: return
     current_time = time.time()
     delta_t = current_time - self.last_check_time
     if delta_t >= self.time_interval:
         abs_delta_t_h = (current_time - self.initial_time) / 60. / 60.
         read_hdf_lock.acquire()
         self.model.save('{}_historic_b{}_{:.1f}h.h5'.format(self.file_name_prefix, batch, abs_delta_t_h))
         read_hdf_lock.release()
         self.last_check_time = current_time
Exemplo n.º 3
0
 def on_epoch_end(self, epoch, logs=None):
     read_hdf_lock.acquire()
     self.model.save('{}_e{}.h5'.format(self.file_name_prefix, epoch))
     read_hdf_lock.release()
Exemplo n.º 4
0
 def on_epoch_end(self, epoch, logs=None):
     read_hdf_lock.acquire()
     super(SafeModelCheckpoint, self).on_epoch_end(epoch, logs)
     read_hdf_lock.release()
Exemplo n.º 5
0
            else:
                for i in range(len(x_grads)):
                    x_grads[i] += x_grads_[i]
            if concat_grads == None:
                concat_grads = concat_grads_
            else:
                for i in range(len(concat_grads)):
                    concat_grads[i] += concat_grads_[i]
            num_inputs += df.shape[0]
            read_hdf_lock.acquire()
            df.to_hdf(pred_output,
                      args.tree,
                      append=True,
                      complevel=1,
                      complib='zlib')
            read_hdf_lock.release()
            pbar.update(df.shape[0])
            gc.collect()
            del df
            if num_inputs > 10000:
                break
    file_index += 1

for i in range(len(x_grads)):
    x_grads[i] /= float(num_inputs)
for i in range(len(concat_grads)):
    concat_grads[i] /= float(num_inputs)

import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt