def test_mask_generator(): shape = (15, ) input = Input(shape=shape) output = tag3d_network_dense([input]) model = Model(input, output) model.compile('adam', 'mse') bs = (64, ) x = np.random.sample(bs + shape) y_depth_map = np.random.sample(bs + (1, 16, 16)) y_mask = np.random.sample(bs + (1, 64, 64)) model.train_on_batch(x, [y_mask, y_depth_map])
def test_render_gan_builder_gan_train_on_batch(): builder = RenderGAN(lambda x: tag3d_network_dense(x, nb_units=4), generator_units=4, discriminator_units=4, labels_shape=(27,)) bs = 19 z, z_offset, labels = data(builder, bs) builder.gan.train_on_batch(g_inputs={ 'z': z }, d_inputs={ 'real': np.random.uniform(-1, 1, (bs,) + builder.data_shape), })
def test_render_gan_builder_generator_train_on_batch(): builder = RenderGAN(lambda x: tag3d_network_dense(x, nb_units=4), generator_units=4, discriminator_units=4, labels_shape=(27,)) bs = 19 z, z_offset, labels = data(builder, bs) real = np.zeros((bs,) + builder.data_shape) builder.generator_given_z.compile('adam', 'mse') builder.generator_given_z.train_on_batch(z, real) builder.generator_given_z_and_labels.compile('adam', 'mse') builder.generator_given_z_and_labels.train_on_batch([z_offset, labels], real)
def test_render_gan_builder_generate(): builder = RenderGAN(lambda x: tag3d_network_dense(x, nb_units=4), generator_units=4, discriminator_units=4, labels_shape=(27,)) bs = 19 z, z_offset, labels = data(builder, bs) fakes = builder.generator_given_z_and_labels.predict([z_offset, labels]) assert fakes.shape == (bs,) + builder.data_shape fakes = builder.generator_given_z.predict(z) assert fakes.shape == (bs,) + builder.data_shape outs = builder.sample_generator_given_z.predict(z) assert len(outs) == len(builder.sample_generator_given_z_output_names)
def test_render_gan_builder_generator_extended(): labels_shape = (27,) z_dim_offset = 50 builder = RenderGAN(lambda x: tag3d_network_dense(x, nb_units=4), generator_units=4, discriminator_units=4, z_dim_offset=z_dim_offset, labels_shape=(27,)) bs = 19 z, z_offset, labels = data(builder, bs) real = np.zeros((bs,) + builder.data_shape) labels_input = Input(shape=labels_shape) z = Input(shape=(z_dim_offset,)) fake = builder.generator_given_z_and_labels([z, labels_input]) m = Model([z, labels_input], [fake]) m.compile('adam', 'mse') m.train_on_batch([z_offset, labels], real)
def run(output_dir, force, tags_3d_hdf5_fname, nb_units, depth, nb_epoch, filter_size, project_factor, nb_dense): batch_size = 64 basename = "network_tags3d_n{}_d{}_e{}".format(nb_units, depth, nb_epoch) output_basename = os.path.join(output_dir, basename) tag_dataset = DistributionHDF5Dataset(tags_3d_hdf5_fname) tag_dataset._dataset_created = True print("Got {} images from the 3d model".format(tag_dataset.nb_samples)) weights_fname = output_basename + ".hdf5" if os.path.exists(weights_fname) and not force: raise OSError("File {} already exists. Use --force to override it") elif os.path.exists(weights_fname) and force: os.remove(weights_fname) os.makedirs(output_dir, exist_ok=True) def generator(batch_size): for batch in tag_dataset.iter(batch_size): labels = [] for name in batch['labels'].dtype.names: labels.append(batch['labels'][name]) assert not np.isnan(batch['tag3d']).any() assert not np.isnan(batch['depth_map']).any() labels = np.concatenate(labels, axis=-1) yield labels, [batch['tag3d'], batch['depth_map']] labels = next(generator(batch_size))[0] print("labels.shape ", labels.shape) print("labels.dtype ", labels.dtype) nb_input = next(generator(batch_size))[0].shape[1] x = Input(shape=(nb_input, )) tag3d, depth_map = tag3d_network_dense(x, nb_units=nb_units, depth=depth, nb_dense_units=nb_dense) g = Model(x, [tag3d, depth_map]) # optimizer = SGD(momentum=0.8, nesterov=True) optimizer = Nadam() g.compile(optimizer, loss=['mse', 'mse'], loss_weights=[1, 1 / 3.]) scheduler = AutomaticLearningRateScheduler(optimizer, 'loss', epoch_patience=5, min_improvement=0.0002) history = HistoryPerBatch() save = SaveModels({basename + '_snapshot_{epoch:^03}.hdf5': g}, output_dir=output_dir, hdf5_attrs=tag_dataset.get_distribution_hdf5_attrs()) history_plot = history.plot_callback(fname=output_basename + "_loss.png", every_nth_epoch=10) g.fit_generator(generator(batch_size), samples_per_epoch=800 * batch_size, nb_epoch=nb_epoch, verbose=1, callbacks=[scheduler, save, history, history_plot]) nb_visualize = 18**2 vis_labels, (tags_3d, depth_map) = next(generator(nb_visualize)) predict_tags_3d, predict_depth_map = g.predict(vis_labels) def zip_and_save(fname, *args): clipped = list(map(lambda x: np.clip(x, 0, 1)[:, 0], args)) print(clipped[0].shape) tiled = zip_tile(*clipped) print(tiled.shape) scipy.misc.imsave(fname, tiled) zip_and_save(output_basename + "_predict_tags.png", tags_3d, predict_tags_3d) zip_and_save(output_basename + "_predict_depth_map.png", depth_map, predict_depth_map) save_model(g, weights_fname, attrs=tag_dataset.get_distribution_hdf5_attrs()) with open(output_basename + '.json', 'w+') as f: f.write(g.to_json()) with open(output_basename + '_loss_history.json', 'w+') as f: json.dump(history.history, f) fig, _ = history.plot() fig.savefig(output_basename + "_loss.png") print("Saved weights to: {}".format(weights_fname))
def run(output_dir, force, tags_3d_hdf5_fname, nb_units, depth, nb_epoch, filter_size, project_factor, nb_dense): batch_size = 64 basename = "network_tags3d_n{}_d{}_e{}".format(nb_units, depth, nb_epoch) output_basename = os.path.join(output_dir, basename) tag_dataset = DistributionHDF5Dataset(tags_3d_hdf5_fname) tag_dataset._dataset_created = True print("Got {} images from the 3d model".format(tag_dataset.nb_samples)) weights_fname = output_basename + ".hdf5" if os.path.exists(weights_fname) and not force: raise OSError("File {} already exists. Use --force to override it") elif os.path.exists(weights_fname) and force: os.remove(weights_fname) os.makedirs(output_dir, exist_ok=True) def generator(batch_size): for batch in tag_dataset.iter(batch_size): labels = [] for name in batch['labels'].dtype.names: labels.append(batch['labels'][name]) assert not np.isnan(batch['tag3d']).any() assert not np.isnan(batch['depth_map']).any() labels = np.concatenate(labels, axis=-1) yield labels, [batch['tag3d'], batch['depth_map']] labels = next(generator(batch_size))[0] print("labels.shape ", labels.shape) print("labels.dtype ", labels.dtype) nb_input = next(generator(batch_size))[0].shape[1] x = Input(shape=(nb_input,)) tag3d, depth_map = tag3d_network_dense(x, nb_units=nb_units, depth=depth, nb_dense_units=nb_dense) g = Model(x, [tag3d, depth_map]) # optimizer = SGD(momentum=0.8, nesterov=True) optimizer = Nadam() g.compile(optimizer, loss=['mse', 'mse'], loss_weights=[1, 1/3.]) scheduler = AutomaticLearningRateScheduler( optimizer, 'loss', epoch_patience=5, min_improvement=0.0002) history = HistoryPerBatch() save = SaveModels({basename + '_snapshot_{epoch:^03}.hdf5': g}, output_dir=output_dir, hdf5_attrs=tag_dataset.get_distribution_hdf5_attrs()) history_plot = history.plot_callback(fname=output_basename + "_loss.png", every_nth_epoch=10) g.fit_generator(generator(batch_size), samples_per_epoch=800*batch_size, nb_epoch=nb_epoch, verbose=1, callbacks=[scheduler, save, history, history_plot]) nb_visualize = 18**2 vis_labels, (tags_3d, depth_map) = next(generator(nb_visualize)) predict_tags_3d, predict_depth_map = g.predict(vis_labels) def zip_and_save(fname, *args): clipped = list(map(lambda x: np.clip(x, 0, 1)[:, 0], args)) print(clipped[0].shape) tiled = zip_tile(*clipped) print(tiled.shape) scipy.misc.imsave(fname, tiled) zip_and_save(output_basename + "_predict_tags.png", tags_3d, predict_tags_3d) zip_and_save(output_basename + "_predict_depth_map.png", depth_map, predict_depth_map) save_model(g, weights_fname, attrs=tag_dataset.get_distribution_hdf5_attrs()) with open(output_basename + '.json', 'w+') as f: f.write(g.to_json()) with open(output_basename + '_loss_history.json', 'w+') as f: json.dump(history.history, f) fig, _ = history.plot() fig.savefig(output_basename + "_loss.png") print("Saved weights to: {}".format(weights_fname))