def __init__( self, hidden_neurons=None, hidden_activation=nn.ReLU(False), output_activation=nn.Sigmoid(), loss=nn.MSELoss(), optimizer='adam', epochs=100, batch_size=32, dropout_rate=0.2, l2_regularizer=0.001, validation_size=0.1, preprocessing=True, verbose=1, random_state=None, threshold=1, ): super(Lstm_VAE, self).__init__() self.hidden_neurons = hidden_neurons self.hidden_activation = hidden_activation self.output_activation = output_activation self.loss = loss self.optimizer = optimizer self.epochs = epochs self.batch_size = batch_size self.dropout_rate = dropout_rate self.l2_regularizer = l2_regularizer self.validation_size = validation_size self.preprocessing = preprocessing self.verbose = verbose self.random_state = random_state self.threshold = threshold # default values if self.hidden_neurons is None: self.hidden_neurons = [32, 16, 16, 32] # Verify the network design is valid if not self.hidden_neurons == self.hidden_neurons[::-1]: print(self.hidden_neurons) raise ValueError("Hidden units should be symmetric") self.hidden_neurons_ = self.hidden_neurons check_parameter(dropout_rate, 0, 1, param_name='dropout_rate', include_left=True)
def __init__(self, hidden_neurons=None, hidden_activation=nn.Tanh(), output_activation=nn.Softmax(dim=1), gmm_k=4 , lambda_energy = 0.1 , lambda_cov_diag = 0.005 , optimizer='adam', epochs=100, batch_size=32,dropout_rate=0.5, l2_regularizer=0.001, validation_size=0.1, preprocessing=True, verbose=1, random_state=None, threshold=1.0): super(Dagmm, self).__init__() self.gmm_k = gmm_k self.lambda_energy = lambda_energy self.lambda_cov_diag = lambda_cov_diag self.hidden_neurons = hidden_neurons self.hidden_activation = hidden_activation self.output_activation = output_activation self.optimizer = optimizer self.dropout_rate = dropout_rate self.epochs = epochs self.batch_size = batch_size self.l2_regularizer = l2_regularizer self.validation_size = validation_size self.preprocessing = preprocessing self.verbose = verbose self.random_state = random_state self.threshold = threshold # default values if self.hidden_neurons is None: self.hidden_neurons = [64, 32, 10, 1, 1, 10, 32, 64] # Verify the network design is valid if not self.hidden_neurons == self.hidden_neurons[::-1]: print(self.hidden_neurons) raise ValueError("Hidden units should be symmetric") self.hidden_neurons_ = self.hidden_neurons check_parameter(dropout_rate, 0, 1, param_name='dropout_rate', include_left=True)
def test_check_parameter_range(self): # verify parameter type correction with assert_raises(TypeError): check_parameter('f', 0, 100) with assert_raises(TypeError): check_parameter(1, 'f', 100) with assert_raises(TypeError): check_parameter(1, 0, 'f') with assert_raises(TypeError): check_parameter(argmaxn(value_list=[1, 2, 3], n=1), 0, 100) # if low and high are both unset with assert_raises(ValueError): check_parameter(50) # if low <= high with assert_raises(ValueError): check_parameter(50, 100, 99) with assert_raises(ValueError): check_parameter(50, 100, 100) # check one side with assert_raises(ValueError): check_parameter(50, low=100) with assert_raises(ValueError): check_parameter(50, high=0) assert_equal(True, check_parameter(50, low=10)) assert_equal(True, check_parameter(50, high=100)) # if check fails with assert_raises(ValueError): check_parameter(-1, 0, 100) with assert_raises(ValueError): check_parameter(101, 0, 100) with assert_raises(ValueError): check_parameter(0.5, 0.2, 0.3) # if check passes assert_equal(True, check_parameter(50, 0, 100)) assert_equal(True, check_parameter(0.5, 0.1, 0.8)) # if includes left or right bounds with assert_raises(ValueError): check_parameter(100, 0, 100, include_left=False, include_right=False) assert_equal(True, check_parameter(0, 0, 100, include_left=True, include_right=False)) assert_equal(True, check_parameter(0, 0, 100, include_left=True, include_right=True)) assert_equal(True, check_parameter(100, 0, 100, include_left=False, include_right=True)) assert_equal(True, check_parameter(100, 0, 100, include_left=True, include_right=True))