Example #1
0
    Gen.top_block()
    #Gen.build_monitored([2,5],[0.5,0.5])
    Gen.build()



    painter_optimizer = Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07)
    disc_optimizer = Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07)

    for i in range(10):
        print("Training Discriminator...")
        for dis_training_loop in range(25):
            discri_grads = Dis.get_gradient()
            disc_optimizer.apply_gradients(zip(discri_grads, Dis.model.trainable_weights))
            print('\tDiscriminator loss: ',Dis.__current_loss__)
        

        print("Training Generator...")
        for gen_training_loop in range(10):
            painter_grads = Gen.get_gradient()

            print('\t\t\t'); _grads_summ=0
            for i in range(10):
                _grads_summ += painter_grads[i].numpy().sum()
            print("\tGenerator gradients sum: ",_grads_summ)

            painter_optimizer.apply_gradients(zip(painter_grads, Gen.model.trainable_weights))
            print('\tGenerator loss: ',Gen.__current_loss__)
        
        print(f"Loss at step {i}: Painter: {Gen.__current_loss__}; Discriminator: {Dis.__current_loss__}.")