Example #1
0
def run_training(train_suffix, model_name, data_loader, epoch, n_epochs):

    train_name = '%s_%s' % (model_name, train_suffix)
    log_name = "%s.log" % train_name
    if os.path.isfile(log_name):
        close_file(log_name)
        os.remove(log_name)
    csv_log = CSVLogger(log_name, append=True)

    time_checkpoint = TimeCheckpoint(12 * 60 * 60, train_name)
    callbacks = [time_checkpoint, csv_log]
    fit_hist = model.fit_generator(
        data_loader.generator(True),
        validation_data=data_loader.generator(False),
        steps_per_epoch=data_loader.steps_per_epoch,
        validation_steps=data_loader.validation_steps,
        callbacks=callbacks,
        epochs=n_epochs,
        initial_epoch=epoch,
        verbose=0)

    read_hdf_lock.acquire()
    model.save("%s_final.hdf5" % train_name)
    read_hdf_lock.release()
    return fit_hist
 def on_batch_end(self, batch, logs=None):
     if self.time_interval is None or batch % 100 != 0: return
     current_time = time.time()
     delta_t = current_time - self.last_check_time
     if delta_t >= self.time_interval:
         abs_delta_t_h = (current_time - self.initial_time) / 60. / 60.
         read_hdf_lock.acquire()
         self.model.save('{}_historic_b{}_{:.1f}h.h5'.format(self.file_name_prefix, batch, abs_delta_t_h))
         read_hdf_lock.release()
         self.last_check_time = current_time
Example #3
0
 def on_epoch_end(self, epoch, logs=None):
     read_hdf_lock.acquire()
     self.model.save('{}_e{}.h5'.format(self.file_name_prefix, epoch))
     read_hdf_lock.release()
 def on_epoch_end(self, epoch, logs=None):
     read_hdf_lock.acquire()
     super(SafeModelCheckpoint, self).on_epoch_end(epoch, logs)
     read_hdf_lock.release()
Example #5
0
    with tqdm(total=loader.data_size, unit='taus') as pbar:
        for inputs in loader.generator(return_truth=False,
                                       return_weights=False):
            df, x_grads_, concat_grads_ = predictor.Predict(sess, inputs)
            if x_grads == None:
                x_grads = x_grads_
            else:
                for i in range(len(x_grads)):
                    x_grads[i] += x_grads_[i]
            if concat_grads == None:
                concat_grads = concat_grads_
            else:
                for i in range(len(concat_grads)):
                    concat_grads[i] += concat_grads_[i]
            num_inputs += df.shape[0]
            read_hdf_lock.acquire()
            df.to_hdf(pred_output,
                      args.tree,
                      append=True,
                      complevel=1,
                      complib='zlib')
            read_hdf_lock.release()
            pbar.update(df.shape[0])
            gc.collect()
            del df
            if num_inputs > 10000:
                break
    file_index += 1

for i in range(len(x_grads)):
    x_grads[i] /= float(num_inputs)