def generate_random_slm_bls_tie_edv_configuration(
        init_maximum_layers=5,
        maximum_iterations=100,
        maximum_learning_step=10,
        maximum_neuron_connection_weight=0.5,
        maximum_bias_connection_weight=1.0,
        mutation_maximum_new_neurons_per_layer=3):
    configuration = generate_random_slm_bls_configuration(
        init_maximum_layers=init_maximum_layers,
        maximum_iterations=maximum_iterations,
        maximum_learning_step=maximum_learning_step,
        maximum_neuron_connection_weight=maximum_neuron_connection_weight,
        maximum_bias_connection_weight=maximum_bias_connection_weight,
        mutation_maximum_new_neurons_per_layer=
        mutation_maximum_new_neurons_per_layer)

    stopping_crit = random.randint(1, 2)
    # EDV
    if stopping_crit == 1:
        configuration['stopping_criterion'] = ErrorDeviationVariationCriterion(
            maximum_iterations=maximum_iterations)
    # TIE
    else:
        configuration[
            'stopping_criterion'] = TrainingImprovementEffectivenessCriterion(
                maximum_iterations=maximum_iterations)

    configuration['population_size'] = 100

    return configuration
Beispiel #2
0
def get_random_config_slm_ols_edv(init_maximum_layers=5,
                                  maximum_iterations=200,
                                  maximum_neuron_connection_weight=0.5,
                                  maximum_bias_connection_weight=1.0,
                                  mutation_maximum_new_neurons_per_layer=3):
    config = {}

    config['stopping_criterion'] = ErrorDeviationVariationCriterion(
        maximum_iterations=maximum_iterations)
    config['population_size'] = 100

    config['layers'] = init_maximum_layers

    config['learning_step'] = 'optimized'

    config['maximum_neuron_connection_weight'] = random.uniform(
        0.1, maximum_neuron_connection_weight)
    config['maximum_bias_connection_weight'] = random.uniform(
        0.1, maximum_bias_connection_weight)

    config['mutation_operator'] = Mutation4(
        maximum_new_neurons_per_layer=mutation_maximum_new_neurons_per_layer,
        maximum_bias_connection_weight=config['maximum_bias_connection_weight']
    )

    config['random_sampling_technique'] = False
    config['random_weighting_technique'] = False

    return config
def generate_random_slm_ols_edv_configuration(init_maximum_layers=5, maximum_iterations=250, maximum_neuron_connection_weight=0.5, maximum_bias_connection_weight=1.0, mutation_maximum_new_neurons_per_layer=3):
#def generate_random_slm_ols_edv_configuration(init_maximum_layers=5, maximum_iterations=100, maximum_neuron_connection_weight=0.5, maximum_bias_connection_weight=1.0, mutation_maximum_new_neurons_per_layer=3):
    configuration = generate_random_slm_ols_configuration(init_maximum_layers=init_maximum_layers, maximum_iterations=maximum_iterations, maximum_neuron_connection_weight=maximum_neuron_connection_weight, maximum_bias_connection_weight=maximum_bias_connection_weight, mutation_maximum_new_neurons_per_layer=mutation_maximum_new_neurons_per_layer)
    
    configuration['stopping_criterion'] = ErrorDeviationVariationCriterion(maximum_iterations=maximum_iterations)
    
    configuration['population_size'] = 100
    
    return configuration
 def test_edv(self):
     print('EDV tests of fit()...')
     algorithm = SemanticLearningMachine(
         100, ErrorDeviationVariationCriterion(0.25), 3, 0.01, 50,
         Mutation2())
     X = get_input_variables(self.training).as_matrix()
     y = get_target_variable(self.training).as_matrix()
     algorithm.fit(X, y, RootMeanSquaredError, verbose=True)
     self.assertTrue(expr=algorithm.champion)
     print()
Beispiel #5
0
 def test_edv(self):
     print('EDV tests of fit()...')
     def time_seconds(): return default_timer()
     start_time = time_seconds()
     algorithm = SemanticLearningMachine(100, ErrorDeviationVariationCriterion(0.25), 3, 0.01, 50, Mutation2(), RootMeanSquaredError, True)
     X = get_input_variables(self.training).values
     y = get_target_variable(self.training).values
     start_time = time_seconds()
     algorithm.fit(X, y, RootMeanSquaredError, verbose=False)
     print("time to train algorithm: ", (time_seconds()-start_time))
     self.assertTrue(expr=algorithm.champion)
     print()
Beispiel #6
0
 def test_predict(self): 
     print("testing predict()...")
     base_learner = SemanticLearningMachine(50, ErrorDeviationVariationCriterion(0.25), 2, 1, 10, Mutation2())
     ensemble_learner = EnsembleRandomIndependentWeighting(base_learner, 100, weight_range=2)
     X = get_input_variables(self.training).values
     y = get_target_variable(self.training).values
     def time_seconds(): return default_timer()
     start_time = time_seconds()
     ensemble_learner.fit(X, y, RootMeanSquaredError, verbose=False)
     print("time to train algorithm: ", (time_seconds()-start_time))
     start_time = time_seconds()
     prediction = ensemble_learner.predict(get_input_variables(self.validation).values)
     print("time to predict algorithm: ", (time_seconds()-start_time))
     self.assertTrue(expr=len(prediction) == len(get_target_variable(self.validation).values))
     print()
Beispiel #7
0
def get_random_config_slm_fls_tie_edv(
        init_maximum_layers=5,
        maximum_iterations=200,
        maximum_learning_step=1,
        maximum_neuron_connection_weight=0.5,
        maximum_bias_connection_weight=1.0,
        mutation_maximum_new_neurons_per_layer=3):
    config = {}

    stopping_crit = random.randint(1, 2)
    # EDV
    if stopping_crit == 1:
        config['stopping_criterion'] = ErrorDeviationVariationCriterion(
            maximum_iterations=maximum_iterations)
    # TIE
    else:
        config[
            'stopping_criterion'] = TrainingImprovementEffectivenessCriterion(
                maximum_iterations=maximum_iterations)

    config['population_size'] = 100

    config['layers'] = init_maximum_layers

    # config['learning_step'] = random.uniform(0.1, maximum_learning_step)
    config['learning_step'] = random.uniform(0.001, maximum_learning_step)

    config['maximum_neuron_connection_weight'] = random.uniform(
        0.1, maximum_neuron_connection_weight)
    config['maximum_bias_connection_weight'] = random.uniform(
        0.1, maximum_bias_connection_weight)

    config['mutation_operator'] = Mutation4(
        maximum_new_neurons_per_layer=mutation_maximum_new_neurons_per_layer,
        maximum_bias_connection_weight=config['maximum_bias_connection_weight']
    )

    config['random_sampling_technique'] = False
    config['random_weighting_technique'] = False

    return config
Beispiel #8
0
_BASE_PARAMETERS = {'number_generations': 200, 'population_size': 100}

_SLM_FLS_PARAMETERS = {
    'stopping_criterion':
    [MaxGenerationsCriterion(_BASE_PARAMETERS.get('number_generations'))],
    'population_size': [_BASE_PARAMETERS.get('population_size')],
    'layers': [1, 2, 3],
    'learning_step': [0.01],
    'max_connections': [1, 10, 50],
    'mutation_operator': [Mutation2()]
}

_SLM_OLS_PARAMETERS = {
    'stopping_criterion': [
        ErrorDeviationVariationCriterion(0.25),
        ErrorDeviationVariationCriterion(0.5)
    ],
    'population_size': [_BASE_PARAMETERS.get('population_size')],
    'layers': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
    'learning_step': ['optimized'],
    'max_connections': [1, 10, 50, 100],
    'mutation_operator': [Mutation2()]
}

_NEAT_PARAMETERS = {
    'stopping_criterion':
    [MaxGenerationsCriterion(_BASE_PARAMETERS.get(('number_generations')))],
    'population_size': [_BASE_PARAMETERS.get('population_size')],
    'compatibility_threshold': [3, 4],
    'compatibility_disjoint_coefficient': [1],