Beispiel #1
0
    def __init__(self,
                 units_full=128,
                 units_latent=20,
                 u_encoder_block=DenseEncoderDecoder(),
                 u_decoder_block=DenseEncoderDecoder(),
                 F_encoder_block=DenseEncoderDecoder(),
                 F_decoder_block=DenseEncoderDecoder(),
                 operator_initializer=keras.initializers.Identity(),
                 train_autoencoders_only=False,
                 **kwargs):
        super().__init__(**kwargs)  # handles standard args (e.g., name)

        # Place configuration as attributes
        self.l = units_latent

        # u autoencoder
        self.u_encoder = u_encoder_block
        self.u_Reduce = tf.Variable(self.I_seed(units_full, units_latent),
                                    trainable=True)
        self.u_Expand = tf.Variable(self.I_seed(units_latent, units_full),
                                    trainable=True)
        self.u_decoder = u_decoder_block

        # F autoencoder
        self.F_encoder = F_encoder_block
        self.F_Reduce = tf.Variable(self.I_seed(units_full, units_latent),
                                    trainable=True)
        self.F_Expand = tf.Variable(self.I_seed(units_latent, units_full),
                                    trainable=True)
        self.F_decoder = F_decoder_block

        # Now create the operator layer that connects v->f
        self.operator_initializer = operator_initializer
        op_size = (units_latent, units_latent)
        self.Operator = tf.Variable(abs(operator_initializer(op_size)),
                                    trainable=True)

        # Set the NMSE loss function used for custom losses
        self.NMSE = NMSE(name="NormalizedMSE")

        # Boolean for whether or not to train Autoencoders only
        self.train_autoencoders_only = train_autoencoders_only
    'units_full': units_full,
    'units_latent': units_latent,
    'u_encoder_block': DenseEncoderDecoder(**enc_dec_config),
    'u_decoder_block': DenseEncoderDecoder(**enc_dec_config),
    'F_encoder_block': DenseEncoderDecoder(**enc_dec_config),
    'F_decoder_block': DenseEncoderDecoder(**enc_dec_config),
    'operator_initializer': return_sdg_matrix
}

# Aggregate all the training options in one dictionary
training_options = {
    'aec_only_epochs': 25,
    'init_full_epochs': 300,
    'best_model_epochs': 2500,
    'num_init_models': 20,
    'loss_fn': NMSE(),
    'optimizer': keras.optimizers.Adam,
    'optimizer_opts': {},
    'batch_size': 64
}

####################################################################
### Launch the Experiment
####################################################################

# Get a random number generator seed
random_seed = r.randint(0, 10**(10))

# Set the custom objects used in the model (for loading purposes)
custom_objs = {"NormalizedMeanSquaredError": NMSE}