Exemple #1
0
 def save(name):
     model = self.__dict__[name]
     fname = fname_format.format(name=name)
     os.makedirs(os.path.dirname(fname), exist_ok=True)
     save_model(model, fname, overwrite=False, attrs=attrs)
Exemple #2
0
 def save(name):
     model = self.__dict__[name]
     fname = fname_format.format(name=name)
     os.makedirs(os.path.dirname(fname), exist_ok=True)
     save_model(model, fname, overwrite=False, attrs=attrs)
Exemple #3
0
def run(output_dir, force, tags_3d_hdf5_fname, nb_units, depth, nb_epoch,
        filter_size, project_factor, nb_dense):
    batch_size = 64
    basename = "network_tags3d_n{}_d{}_e{}".format(nb_units, depth, nb_epoch)
    output_basename = os.path.join(output_dir, basename)

    tag_dataset = DistributionHDF5Dataset(tags_3d_hdf5_fname)
    tag_dataset._dataset_created = True
    print("Got {} images from the 3d model".format(tag_dataset.nb_samples))
    weights_fname = output_basename + ".hdf5"
    if os.path.exists(weights_fname) and not force:
        raise OSError("File {} already exists. Use --force to override it")
    elif os.path.exists(weights_fname) and force:
        os.remove(weights_fname)
    os.makedirs(output_dir, exist_ok=True)

    def generator(batch_size):
        for batch in tag_dataset.iter(batch_size):
            labels = []
            for name in batch['labels'].dtype.names:
                labels.append(batch['labels'][name])

            assert not np.isnan(batch['tag3d']).any()
            assert not np.isnan(batch['depth_map']).any()
            labels = np.concatenate(labels, axis=-1)
            yield labels, [batch['tag3d'], batch['depth_map']]

    labels = next(generator(batch_size))[0]
    print("labels.shape ", labels.shape)
    print("labels.dtype ", labels.dtype)
    nb_input = next(generator(batch_size))[0].shape[1]

    x = Input(shape=(nb_input, ))
    tag3d, depth_map = tag3d_network_dense(x,
                                           nb_units=nb_units,
                                           depth=depth,
                                           nb_dense_units=nb_dense)
    g = Model(x, [tag3d, depth_map])
    # optimizer = SGD(momentum=0.8, nesterov=True)
    optimizer = Nadam()

    g.compile(optimizer, loss=['mse', 'mse'], loss_weights=[1, 1 / 3.])

    scheduler = AutomaticLearningRateScheduler(optimizer,
                                               'loss',
                                               epoch_patience=5,
                                               min_improvement=0.0002)
    history = HistoryPerBatch()
    save = SaveModels({basename + '_snapshot_{epoch:^03}.hdf5': g},
                      output_dir=output_dir,
                      hdf5_attrs=tag_dataset.get_distribution_hdf5_attrs())
    history_plot = history.plot_callback(fname=output_basename + "_loss.png",
                                         every_nth_epoch=10)
    g.fit_generator(generator(batch_size),
                    samples_per_epoch=800 * batch_size,
                    nb_epoch=nb_epoch,
                    verbose=1,
                    callbacks=[scheduler, save, history, history_plot])

    nb_visualize = 18**2
    vis_labels, (tags_3d, depth_map) = next(generator(nb_visualize))
    predict_tags_3d, predict_depth_map = g.predict(vis_labels)

    def zip_and_save(fname, *args):
        clipped = list(map(lambda x: np.clip(x, 0, 1)[:, 0], args))
        print(clipped[0].shape)
        tiled = zip_tile(*clipped)
        print(tiled.shape)
        scipy.misc.imsave(fname, tiled)

    zip_and_save(output_basename + "_predict_tags.png", tags_3d,
                 predict_tags_3d)
    zip_and_save(output_basename + "_predict_depth_map.png", depth_map,
                 predict_depth_map)

    save_model(g,
               weights_fname,
               attrs=tag_dataset.get_distribution_hdf5_attrs())
    with open(output_basename + '.json', 'w+') as f:
        f.write(g.to_json())

    with open(output_basename + '_loss_history.json', 'w+') as f:
        json.dump(history.history, f)

    fig, _ = history.plot()
    fig.savefig(output_basename + "_loss.png")
    print("Saved weights to: {}".format(weights_fname))
def run(output_dir, force, tags_3d_hdf5_fname, nb_units, depth,
        nb_epoch, filter_size, project_factor, nb_dense):
    batch_size = 64
    basename = "network_tags3d_n{}_d{}_e{}".format(nb_units, depth, nb_epoch)
    output_basename = os.path.join(output_dir, basename)

    tag_dataset = DistributionHDF5Dataset(tags_3d_hdf5_fname)
    tag_dataset._dataset_created = True
    print("Got {} images from the 3d model".format(tag_dataset.nb_samples))
    weights_fname = output_basename + ".hdf5"
    if os.path.exists(weights_fname) and not force:
        raise OSError("File {} already exists. Use --force to override it")
    elif os.path.exists(weights_fname) and force:
        os.remove(weights_fname)
    os.makedirs(output_dir, exist_ok=True)

    def generator(batch_size):
        for batch in tag_dataset.iter(batch_size):
            labels = []
            for name in batch['labels'].dtype.names:
                labels.append(batch['labels'][name])

            assert not np.isnan(batch['tag3d']).any()
            assert not np.isnan(batch['depth_map']).any()
            labels = np.concatenate(labels, axis=-1)
            yield labels, [batch['tag3d'], batch['depth_map']]

    labels = next(generator(batch_size))[0]
    print("labels.shape ", labels.shape)
    print("labels.dtype ", labels.dtype)
    nb_input = next(generator(batch_size))[0].shape[1]

    x = Input(shape=(nb_input,))
    tag3d, depth_map = tag3d_network_dense(x, nb_units=nb_units, depth=depth,
                                           nb_dense_units=nb_dense)
    g = Model(x, [tag3d, depth_map])
    # optimizer = SGD(momentum=0.8, nesterov=True)
    optimizer = Nadam()

    g.compile(optimizer, loss=['mse', 'mse'], loss_weights=[1, 1/3.])

    scheduler = AutomaticLearningRateScheduler(
        optimizer, 'loss', epoch_patience=5, min_improvement=0.0002)
    history = HistoryPerBatch()
    save = SaveModels({basename + '_snapshot_{epoch:^03}.hdf5': g}, output_dir=output_dir,
                      hdf5_attrs=tag_dataset.get_distribution_hdf5_attrs())
    history_plot = history.plot_callback(fname=output_basename + "_loss.png",
                                         every_nth_epoch=10)
    g.fit_generator(generator(batch_size), samples_per_epoch=800*batch_size,
                    nb_epoch=nb_epoch, verbose=1,
                    callbacks=[scheduler, save, history, history_plot])

    nb_visualize = 18**2
    vis_labels, (tags_3d, depth_map) = next(generator(nb_visualize))
    predict_tags_3d, predict_depth_map = g.predict(vis_labels)

    def zip_and_save(fname, *args):
        clipped = list(map(lambda x: np.clip(x, 0, 1)[:, 0], args))
        print(clipped[0].shape)
        tiled = zip_tile(*clipped)
        print(tiled.shape)
        scipy.misc.imsave(fname, tiled)

    zip_and_save(output_basename + "_predict_tags.png", tags_3d, predict_tags_3d)
    zip_and_save(output_basename + "_predict_depth_map.png", depth_map, predict_depth_map)

    save_model(g, weights_fname, attrs=tag_dataset.get_distribution_hdf5_attrs())
    with open(output_basename + '.json', 'w+') as f:
        f.write(g.to_json())

    with open(output_basename + '_loss_history.json', 'w+') as f:
        json.dump(history.history, f)

    fig, _ = history.plot()
    fig.savefig(output_basename + "_loss.png")
    print("Saved weights to: {}".format(weights_fname))