def get_default_config() -> ConfigDict: c = ConfigDict( num_components=1, train_epochs=1000, # Component components_learning_rate=1e-3, components_batch_size=1000, components_num_epochs=10, components_net_reg_loss_fact=0., components_net_drop_prob=0.0, components_net_hidden_layers=[50, 50], # Gating gating_learning_rate=1e-3, gating_batch_size=1000, gating_num_epochs=10, gating_net_reg_loss_fact=0., gating_net_drop_prob=0.0, gating_net_hidden_layers=[50, 50], # Density Ratio Estimation dre_reg_loss_fact= 0.0, # Scaling Factor for L2 regularization of density ratio estimator dre_early_stopping= True, # Use early stopping for density ratio estimator training dre_drop_prob= 0.0, # If smaller than 1 dropout with keep prob = 'keep_prob' is used dre_num_iters= 1000, # Number of density ratio estimator steps each iteration (i.e. max number if early stopping) dre_batch_size= 1000, # Batch size for density ratio estimator training dre_hidden_layers=[ 30, 30 ] # width of density ratio estimator hidden layers ) c.finalize_adding() return c
def get_default_config(): c = ConfigDict( num_components=1, samples_per_component=500, train_epochs=1000, initialization="random", # Component Updates component_kl_bound=0.01, # Mixture Updates weight_kl_bound=0.01, # Density Ratio Estimation dre_reg_loss_fact= 0.0, # Scaling Factor for L2 regularization of density ratio estimator dre_early_stopping= True, # Use early stopping for density ratio estimator training dre_drop_prob= 0.0, # If smaller than 1 dropout with keep prob = 'keep_prob' is used dre_num_iters= 1000, # Number of density ratio estimator steps each iteration (i.e. max number if early stopping) dre_batch_size= 1000, # Batch size for density ratio estimator training dre_hidden_layers=[ 30, 30 ] # width of density ratio estimator hidden layers ) c.finalize_adding() return c
def get_default_config() -> ConfigDict: config = ConfigDict(num_basis=15, bandwidth=3, trans_net_hidden_units=[64, 64], trans_net_hidden_activation="Tanh", learn_trans_covar=True, trans_covar=0.1, learn_initial_state_covar=False, initial_state_covar=10, learning_rate=1e-3, enc_out_norm='pre', clip_gradients=True, never_invalid=True) config.finalize_adding() return config
def get_default_config() -> ConfigDict: config = ConfigDict(num_basis=15, bandwidth=3, trans_net_hidden_units=[], control_net_hidden_units=[60], trans_net_hidden_activation="Tanh", control_net_hidden_activation='ReLU', learn_trans_covar=True, trans_covar=1, learn_initial_state_covar=True, initial_state_covar=1, learning_rate=7e-3, enc_out_norm='post', clip_gradients=True, never_invalid=True) config.finalize_adding() return config