Esempio n. 1
0
def build_critic_graph(generator, critic, batch_size=1):
    '''Builds the graph for training the critic part of the improved WGAN'''
    generator_in_critic_training_u = Input(shape=(38, 350, 1), name="noise_u")
    generator_in_critic_training_v = Input(shape=(38, 350, 1), name="noise_v")
    shower_in_critic_training_u = Input(shape=(38, 350, 1),
                                        name='shower_maps_u')
    shower_in_critic_training_v = Input(shape=(38, 350, 1),
                                        name='shower_maps_v')
    generator_out_critic_training = generator(
        [generator_in_critic_training_u, generator_in_critic_training_v])
    # generator_out_critic_training = generator_in_critic_training
    out_critic_training_gen = critic(generator_out_critic_training)
    out_critic_training_shower = critic(
        [shower_in_critic_training_u, shower_in_critic_training_v])
    averaged_batch = []
    # averaged_batch.append(RandomWeightedAverage(batch_size, name='Average_u')([generator_out_critic_training[0], shower_in_critic_training_u]))
    # averaged_batch.append(RandomWeightedAverage(batch_size, name='Average_v')([generator_out_critic_training[1], shower_in_critic_training_v]))
    weights = K.random_uniform((batch_size, 1, 1, 1))
    averaged_batch.append(
        RandomWeightedAverage(batch_size, weights, name='Average_u')(
            [generator_out_critic_training[0], shower_in_critic_training_u]))
    averaged_batch.append(
        RandomWeightedAverage(batch_size, weights, name='Average_v')(
            [generator_out_critic_training[1], shower_in_critic_training_v]))
    averaged_batch_out = critic(averaged_batch)
    return Model(inputs=[
        generator_in_critic_training_u, generator_in_critic_training_v,
        shower_in_critic_training_u, shower_in_critic_training_v
    ],
                 outputs=[
                     out_critic_training_gen, out_critic_training_shower,
                     averaged_batch_out
                 ]), averaged_batch
Esempio n. 2
0
def build_generator_graph(generator, critic):
    '''Builds the graph for training the generator part of the improved WGAN'''
    generator_in = [
        Input(shape=(38, 350, 1), name="Wire_U"),
        Input(shape=(38, 350, 1), name="Wire_V")
    ]
    generator_out = generator(generator_in)
    critic_out = critic(generator_out)
    return Model(inputs=(generator_in), outputs=[critic_out])
def build_critic_graph(generator, critic, batch_size=1):
    '''Builds the graph for training the critic part of the improved WGAN'''
    generator_in_critic_training = Input(shape=(38, 350, 1), name="noise")
    shower_in_critic_training = Input(shape=(38, 350, 1), name='shower_maps')
    generator_out_critic_training = generator([generator_in_critic_training])
    # generator_out_critic_training = generator_in_critic_training
    out_critic_training_gen = critic(generator_out_critic_training)
    out_critic_training_shower = critic(shower_in_critic_training)
    averaged_batch = RandomWeightedAverage(batch_size, name='Average')([generator_out_critic_training, shower_in_critic_training])
    averaged_batch_out = critic(averaged_batch)
    return Model(inputs=[generator_in_critic_training, shower_in_critic_training], outputs=[out_critic_training_gen, out_critic_training_shower, averaged_batch_out]), averaged_batch
Esempio n. 4
0
def build_generator_graph(generator, critic):
    '''Builds the graph for training the generator part of the improved WGAN'''
    generator_in = Input(shape=(38, 350, 1))
    generator_out = generator([generator_in])
    critic_out = critic(generator_out)
    return Model(inputs=[generator_in], outputs=[critic_out])
Esempio n. 5
0
    sys.stdout = orig_stdout
    f.close()


save_in_file()

# The generator_model is used when we want to train the generator layers.
# As such, we ensure that the discriminator layers are not trainable.
# Note that once we compile this model, updating .trainable will have no effect within it. As such, it
# won't cause problems if we later set discriminator.trainable = True for the discriminator_model, as long
# as we compile the generator_model first.
for layer in discriminator.layers:
    layer.trainable = False
discriminator.trainable = False
generator_input = Input(shape=[76, 350, 1])
generator_layers = generator(generator_input)
discriminator_layers_for_generator = discriminator(generator_layers)
generator_model = Model(inputs=[generator_input],
                        outputs=[discriminator_layers_for_generator])
# We use the Adam paramaters from Gulrajani et al.
generator_model.compile(optimizer=Adam(0.0001, beta_1=0.5, beta_2=0.9),
                        loss=wasserstein_loss)

# Now that the generator_model is compiled, we can make the discriminator layers trainable.
for layer in discriminator.layers:
    layer.trainable = True
for layer in generator.layers:
    layer.trainable = False
discriminator.trainable = True
generator.trainable = False
'''
make trainable freeze part of the model
the GAN is created

'''
def make_trainable(model, trainable):
    """ Helper to freeze / unfreeze a model """
    model.trainable = trainable
    for l in model.layers:
        l.trainable = trainable


# Set up GAN by stacking the discriminator on top of the generator
print('\nGenerative Adversarial Network')
gan_input = Input(shape=[38, 350, 1])
gan_output = discriminator(generator(gan_input))
GAN = Model(gan_input, gan_output)
print(GAN.summary())
g_opt = Adam(lr=2e-4, beta_1=0.5, decay=0.0005)
make_trainable(discriminator, False)  # freezes the discriminator when training the GAN
GAN.compile(loss='binary_crossentropy', optimizer=g_opt)
# Compile saves the trainable status of the model --> After the model is compiled, updating using make_trainable will have no effect



print "Saving the structure in file"
'''
Save into file:
    -Generator
    -Discriminator
    -GAN
Esempio n. 7
0
def build_generator_predict(generator):
    '''Builds the graph for training the generator part of the improved WGAN'''
    generator_in = Input(shape=(38, 350, 1))
    generator_out = generator([generator_in])
    return Model(inputs=[generator_in], outputs=[generator_out])
    
    else:        
        
        raise ValueError("generator dimension " +str(dim) +"is not in the constrained dimensions for this structure: dimension must be in [2,4,6] for the TL,'braid_group','symmetric_group' and between (2,10) for ZxZ_group. ")


    if args.structure=='TL_algebra':
                     
        if args.delta==0:
            
            raise ValueError("delta, a parameter for TL algebra, must be nonzero.")
            
        print("training the TL algebra generator.")
        print("generator function : R^"+str(args.generator_dimension) +"-> R^"+str(args.generator_dimension) )      
        Ugen=ut.generator(input_dim=args.generator_dimension
                         ,bias= args.bias
                         ,activation_function=args.network_generator_activation)        
        M=tlnet.tl_algebra_net(Ugen,delta =args.delta  ,input_dim=dim//2)
        model_name=model_string_gen("TL_algebra_relations_trainer_use_bias=")
        model_name_U_gen=model_string_gen("TL_algebra_generator_use_bias=")     
        
        data_in=[data1,data2,data3]
        data_out=data1
        
        
        if args.mode=='training':
            
            print("choosing the training mode. ")   
            ut.train_net(M,data_in,data_out
                         , weight_folder+model_name
                         ,tlnet.tl_loss_wrapper(dim//2)