def test_set_gan_arrow(): fwd = AbsArrow() cond_gen = AddArrow() disc = AbsArrow() gan_arr = set_gan_arrow(fwd, cond_gen, disc, 1) x = np.array([1.0]) z = np.array([0.5]) perm = np.array([1, 0]) result = apply(gan_arr, [x, z, perm])
def gan_shopping_arrow_pi(nitems: int, options) -> CompositeArrow: """Gan on shopping basket""" n_fake_samples = options['n_fake_samples'] n_samples = n_fake_samples + 1 batch_size = options['batch_size'] # nitems = options['nitems'] fwd = SumNArrow(nitems) inv = invert(fwd) info = propagate(inv) def gen_func(args, reuse=False): # import pdb; pdb.set_trace() """Generator function""" with tf.variable_scope("generator", reuse=reuse): inp = tf.concat(args, axis=1) # inp = fully_connected(inp, 10, activation='elu') inp = fully_connected(inp, inv.num_param_ports(), activation='elu') inps = tf.split(inp, axis=1, num_or_size_splits=inv.num_param_ports()) # inps = [tf.Print(inp, [inp[0]], message="Generated!", summarize=100) for inp in inps] return inps def disc_func(args, reuse=False): # import pdb; pdb.set_trace() """Discriminator function """ with tf.variable_scope("discriminator", reuse=reuse): inp = tf.concat(args, axis=2) # inp = tf.Print(inp, [inp[0]], message="inp to disc", summarize=100) # inp = fully_connected(inp, 20, activation='elu') inp = fully_connected(inp, 10, activation='elu') inp = fully_connected(inp, n_samples, activation='sigmoid') return [inp] # Make a conditional generator from the inverse\ num_non_param_in_ports = inv.num_in_ports() - inv.num_param_ports() g_theta = TfLambdaArrow(inv.num_in_ports() - inv.num_param_ports() + 1, inv.num_param_ports(), func=gen_func, name="g_theta") cond_gen = g_from_g_theta(inv, g_theta) disc = TfLambdaArrow(nitems, 1, func=disc_func, name="disc") gan_arr = set_gan_arrow(fwd, cond_gen, disc, n_fake_samples, 2, x_shapes=[(batch_size, 1) for i in range(nitems)], z_shape=(batch_size, 1)) return gan_arr
def set_gan_nn_arrow(options): """Test Gan on fwd function f(x) = x""" fwd_arr = IdentityArrow() n_fake_samples = options['n_fake_samples'] n_samples = n_fake_samples + 1 batch_size = options['batch_size'] def gen_func(args): """Generator function""" with tf.variable_scope("generator", reuse=False): # inp = tf.concat(args, axis=1) inp = args[0] inp = fully_connected(inp, 10, activation='elu') # inp = batch_normalization(inp) # inp = fully_connected(inp, 10, activation='elu') # inp = batch_normalization(inp) inp = fully_connected(inp, 1, activation='sigmoid') # inp = batch_normalization(inp) return [inp] # def gen_func(args): # """Generator function""" # with tf.variable_scope("generator", reuse=False): # return [args[0]+0.2] def disc_func(args): """Discriminator function""" with tf.variable_scope("discriminator", reuse=False): assert len(args) == 1 inp = args[0] inp = fully_connected(inp, 5, activation='elu') # inp = batch_normalization(inp) inp = fully_connected(inp, 5, activation='elu') # inp = batch_normalization(inp) inp = args[0] inp = fully_connected(inp, n_samples, activation='sigmoid') return [inp] cond_gen = TfLambdaArrow(2, 1, func=gen_func) disc = TfLambdaArrow(1, 1, func=disc_func) gan_arr = set_gan_arrow(fwd_arr, cond_gen, disc, n_fake_samples, 2, x_shape=(batch_size, 1), z_shape=(batch_size, 1)) return gan_arr
def gan_shopping_arrow_compare(nitems: int, options) -> CompositeArrow: """Comparison for Gan, g is straight neural network""" n_fake_samples = options['n_fake_samples'] n_samples = n_fake_samples + 1 batch_size = options['batch_size'] # nitems = options['nitems'] fwd = SumNArrow(nitems) def gen_func(args, reuse=False): # import pdb; pdb.set_trace() """Generator function""" with tf.variable_scope("generator", reuse=reuse): inp = tf.concat(args, axis=1) inp = fully_connected(inp, nitems, activation='elu') inps = tf.split(inp, axis=1, num_or_size_splits=nitems) return inps def disc_func(args, reuse=False): # import pdb; pdb.set_trace() """Discriminator function """ with tf.variable_scope("discriminator", reuse=reuse): inp = tf.concat(args, axis=2) inp = fully_connected(inp, 10, activation='elu') inp = fully_connected(inp, n_samples, activation='sigmoid') return [inp] cond_gen = TfLambdaArrow(2, nitems, func=gen_func, name="cond_gen") disc = TfLambdaArrow(nitems, 1, func=disc_func, name="disc") gan_arr = set_gan_arrow(fwd, cond_gen, disc, n_fake_samples, 2, x_shapes=[(batch_size, 1) for i in range(nitems)], z_shape=(batch_size, 1)) return gan_arr
def test_set_gan_nn_arrow(options): fwd = IdentityArrow() n_fake_samples = 1 n_samples = n_fake_samples + 1 def gen_func(args): """Generator function""" with tf.variable_scope("generator", reuse=False): inp = tf.concat(args, axis=1) inp = fully_connected(inp, 1, activation='elu') inp = batch_normalization(inp) inp = fully_connected(inp, 1, activation='elu') inp = batch_normalization(inp) return [inp] # def gen_func(args): # """Generator function""" # with tf.variable_scope("generator", reuse=False): # return [args[0]] def disc_func(args): """Discriminator function""" with tf.variable_scope("discriminator", reuse=False): assert len(args) == 1 inp = args[0] l1 = fully_connected(inp, n_samples, activation='sigmoid') return [l1] cond_gen = TfLambdaArrow(2, 1, func=gen_func) disc = TfLambdaArrow(1, 1, func=disc_func) gan_arr = set_gan_arrow(fwd, cond_gen, disc, n_fake_samples, 2) x = np.array([1.0]) z = np.array([0.5]) perm = np.array([1, 0]) batch_size = 64 set_port_shape(gan_arr.in_port(0), (batch_size, 1)) set_port_shape(gan_arr.in_port(1), (batch_size, 1)) set_port_shape(gan_arr.in_port(2), (n_samples, )) set_port_dtype(gan_arr.in_port(2), 'int32') with tf.name_scope(gan_arr.name): input_tensors = gen_input_tensors(gan_arr, param_port_as_var=False) output_tensors = arrow_to_graph(gan_arr, input_tensors) x_ten, z_ten, perm_ten = input_tensors d_loss, g_loss, fake_x_1 = output_tensors d_loss = tf.reduce_mean(-d_loss) g_loss = tf.reduce_mean(-g_loss) # fetch = {'d_loss': d_loss, 'g_loss': g_loss, 'x_ten': x_ten, 'fake': fake_x_1} fetch = {'d_loss': d_loss, 'g_loss': g_loss} sess = tf.Session() x = np.random.rand(16, 1) z = np.random.rand(16, 1) # output_data = sess.run(fetch, # feed_dict={x_ten: x, z_ten: z, perm_ten: perm}) losses = {'d_loss': d_loss, 'g_loss': g_loss} options = {'learning_rate': 0.01, 'update': 'adam'} d_vars = get_variables('discriminator') g_vars = get_variables('generator') loss_updates = [ updates(d_loss, d_vars, options=options)[1], updates(g_loss, g_vars, options=options)[1] ] fetch['check'] = tf.add_check_numerics_ops() loss_ratios = [1, 1] def train_gen(): while True: x = np.random.rand(batch_size, 1) z = np.random.rand(batch_size, 1) perm = np.arange(n_samples) np.random.shuffle(perm) yield {x_ten: x, z_ten: z, perm_ten: perm} # Summaries summaries = variable_summaries(losses) writers = setup_file_writers('summaries', sess) options['writers'] = writers callbacks = [every_n(summary_writes, 25)] sess.run(tf.initialize_all_variables()) train_loop(sess, loss_updates, fetch, train_generators=[train_gen()], test_generators=None, num_iterations=100000, loss_ratios=loss_ratios, callbacks=callbacks, callbacks=None)
def gan_renderer_arrow(options): """Gan on renderer""" n_fake_samples = options['n_fake_samples'] n_samples = n_fake_samples + 1 batch_size = options['batch_size'] res = options['res'] width = options['width'] height = options['height'] nvoxels = res * res * res npixels = width * height from voxel_render import test_invert_render_graph fwd, inv = test_invert_render_graph(options) from arrows.apply.propagate import propagate info = propagate(inv) def gen_func(args): """Generator function""" with tf.variable_scope("generator", reuse=False): # inp = tf.concat(args, axis=1 shapes = [info[port]['shape'] for port in inv.param_ports()] inp = args[0] inp = fully_connected(inp, 2, activation='elu') return [ fully_connected(inp, shape[1], activation='elu') for shape in shapes ] def disc_func(args): """Discriminator function""" with tf.variable_scope("discriminator", reuse=False): assert len(args) == 1 inp = args[0] inp = fully_connected(inp, n_samples, activation='sigmoid') return [inp] # Make a conditional generator from the inverse\ g_theta = TfLambdaArrow(inv.num_in_ports() - inv.num_param_ports() + 1, inv.num_param_ports(), func=gen_func) cond_gen = g_from_g_theta(inv, g_theta) disc = TfLambdaArrow(1, 1, func=disc_func) def train_gen(): """Generator for x, z and permutation""" from wacacore.util.generators import infinite_batches from voxel_helpers import model_net_40 voxel_data = model_net_40() x_gen = infinite_batches(voxel_data, batch_size=batch_size) while True: x = next(x_gen) x = x.reshape(batch_size, -1) z = np.random.rand(batch_size, 1) perm = np.arange(n_samples) np.random.shuffle(perm) yield {x_ten: x, z_ten: z, perm_ten: perm} gan_arr = set_gan_arrow(fwd, cond_gen, disc, n_fake_samples, 2, x_shape=(batch_size, nvoxels), z_shape=(batch_size, 1)) return gan_arr