def setUp(self): fake_dataset = Dataset.get_fake(self.input_size, 1) self.model = MADE(fake_dataset, hidden_sizes=self.hidden_sizes, batch_size=fake_dataset['train']['data'].shape[0], hidden_activation=theano.tensor.nnet.sigmoid, use_cond_mask=self.use_cond_mask, direct_input_connect=self.direct_input_connect, direct_output_connect=self.direct_output_connect) self.nb_shuffle = 50
def _get_fake_model(self, in_size, hidden_sizes): fake_dataset = Dataset.get_fake(in_size, 1) return MADE( fake_dataset, # learning_rate=trainingparams['learning_rate'], # decrease_constant=trainingparams['decrease_constant'], hidden_sizes=hidden_sizes, # random_seed=hyperparams['random_seed'], # batch_size=trainingparams['batch_size'], hidden_activation=lambda x: x, use_cond_mask=False, direct_input_connect="Output", direct_output_connect=False)
def _get_conditioning_mask_model(dataset, input_size, hidden_sizes): import theano.tensor as T return MADE( dataset, # learning_rate=trainingparams['learning_rate'], # decrease_constant=trainingparams['decrease_constant'], hidden_sizes=hidden_sizes, # random_seed=hyperparams['random_seed'], # batch_size=trainingparams['batch_size'], #hidden_activation=lambda x: x, hidden_activation=T.nnet.sigmoid, use_cond_mask=True, direct_input_connect="None", direct_output_connect=False, weights_initialization="Diagonal")
def build_model(dataset, trainingparams, hyperparams, hidden_sizes): print '\n### Initializing MADE ... ', start_time = t.time() model = MADE(dataset, learning_rate=trainingparams['learning_rate'], decrease_constant=trainingparams['decrease_constant'], hidden_sizes=hidden_sizes, random_seed=hyperparams['random_seed'], batch_size=trainingparams['batch_size'], hidden_activation=activation_functions[ hyperparams['hidden_activation']], use_cond_mask=hyperparams['use_cond_mask'], direct_input_connect=hyperparams['direct_input_connect'], direct_output_connect=hyperparams['direct_output_connect'], update_rule=trainingparams['update_rule'], dropout_rate=trainingparams['dropout_rate'], weights_initialization=hyperparams['weights_initialization'], mask_distribution=hyperparams['mask_distribution']) print utils.get_done_text(start_time), "###" return model
def setUp(self): ### Testing that the sum of all prob is equal to 1 ### # This test has to be run in 64bit for accuracy self._old_theano_config_floatX = theano.config.floatX theano.config.floatX = 'float64' self.nb_test = 15 self._shuffling_type = "Full" fake_dataset = Dataset.get_permutation(self.input_size) self.model = MADE(fake_dataset, hidden_sizes=self.hidden_sizes, batch_size=fake_dataset['train']['data'].shape[0], hidden_activation=theano.tensor.nnet.sigmoid, use_cond_mask=self.use_cond_mask, direct_input_connect=self.direct_input_connect, direct_output_connect=self.direct_output_connect) # Train the model to have more accurate results for i in range(2 * self.input_size): self.model.shuffle(self._shuffling_type) self.model.learn(i, True)