def __init__(self, hidden_layer_sizes=(100,), activation="relu", solver='adam', alpha=0.0001, batch_size='auto', learning_rate="constant", learning_rate_init=0.001, power_t=0.5, max_iter=200, shuffle=True, random_state=None, tol=1e-4, verbose=False, warm_start=False, momentum=0.9, nesterovs_momentum=True, early_stopping=False, validation_fraction=0.1, beta_1=0.9, beta_2=0.999, epsilon=1e-8): warnings.filterwarnings(module='sklearn*', action='ignore', category=ConvergenceWarning) BaseWrapperClf.__init__(self) _skMLPClassifier.__init__( self, hidden_layer_sizes, activation, solver, alpha, batch_size, learning_rate, learning_rate_init, power_t, max_iter, shuffle, random_state, tol, verbose, warm_start, momentum, nesterovs_momentum, early_stopping, validation_fraction, beta_1, beta_2, epsilon)
def __init__(self, variables = None, arquitecture = None): v = variables if variables else select_variables() a = arquitecture if arquitecture else select_arquitecture() self.variables = Variable(v) self.arquitecture = Arquitecture(a) self.accuracy = 0 self.fitness = 0 # fitness = accuracy self.genes = (self.variables.raw(), self.arquitecture.raw()) # MLP Classifier init # http://scikit-learn.org/stable/modules/generated/sklearn.neural_network.MLPClassifier.html MLPClassifier.__init__( self, hidden_layer_sizes = tuple(self.arquitecture.raw()), learning_rate = 'constant', learning_rate_init = 0.001, max_iter = 3000 )
def __init__(self, k=1000): if k != None: MutableMLPClassifier.K_features = k MLPClassifier.__init__(self,hidden_layer_sizes=150, activation='logistic', solver='adam')