Ejemplo n.º 1
0
def get_random_config_slm_ols_edv(init_maximum_layers=5,
                                  maximum_iterations=200,
                                  maximum_neuron_connection_weight=0.5,
                                  maximum_bias_connection_weight=1.0,
                                  mutation_maximum_new_neurons_per_layer=3):
    config = {}

    config['stopping_criterion'] = ErrorDeviationVariationCriterion(
        maximum_iterations=maximum_iterations)
    config['population_size'] = 100

    config['layers'] = init_maximum_layers

    config['learning_step'] = 'optimized'

    config['maximum_neuron_connection_weight'] = random.uniform(
        0.1, maximum_neuron_connection_weight)
    config['maximum_bias_connection_weight'] = random.uniform(
        0.1, maximum_bias_connection_weight)

    config['mutation_operator'] = Mutation4(
        maximum_new_neurons_per_layer=mutation_maximum_new_neurons_per_layer,
        maximum_bias_connection_weight=config['maximum_bias_connection_weight']
    )

    config['random_sampling_technique'] = False
    config['random_weighting_technique'] = False

    return config
def generate_random_slm_bls_configuration(
        option=None,
        init_maximum_layers=5,
        maximum_iterations=100,
        maximum_learning_step=10,
        maximum_neuron_connection_weight=0.5,
        maximum_bias_connection_weight=1.0,
        mutation_maximum_new_neurons_per_layer=3):
    configuration = {}

    configuration['stopping_criterion'] = MaxGenerationsCriterion(
        random.randint(1, maximum_iterations))
    #===========================================================================
    # configuration['stopping_criterion'] = MaxGenerationsCriterion(DEFAULT_NUMBER_OF_ITERATIONS)
    #===========================================================================

    configuration['population_size'] = DEFAULT_POPULATION_SIZE

    configuration['layers'] = init_maximum_layers

    configuration['learning_step'] = random.uniform(0.001,
                                                    maximum_learning_step)

    configuration['maximum_neuron_connection_weight'] = random.uniform(
        0.1, maximum_neuron_connection_weight)
    configuration['maximum_bias_connection_weight'] = random.uniform(
        0.1, maximum_bias_connection_weight)

    configuration['mutation_operator'] = Mutation4(
        maximum_new_neurons_per_layer=mutation_maximum_new_neurons_per_layer,
        maximum_bias_connection_weight=configuration[
            'maximum_bias_connection_weight'])

    configuration['random_sampling_technique'] = False
    configuration['random_weighting_technique'] = False

    configuration['protected_ols'] = False

    configuration['bootstrap_ols'] = False

    configuration['store_ls_history'] = True
    """
    if option == 0:  # no RST and no RWT
        config['random_sampling_technique'] = False
        config['random_weighting_technique'] = False
    elif option == 1:  # RST
        config['random_sampling_technique'] = True
        config['random_weighting_technique'] = False
        config['subset_ratio'] = random.uniform(0.01, 0.99)
    elif option == 2:  # RWT
        config['random_sampling_technique'] = False
        config['random_weighting_technique'] = True
        config['weight_range'] = 1
    """

    return configuration
Ejemplo n.º 3
0
 def __init__(self,
              population_size,
              stopping_criterion,
              layers,
              learning_step,
              max_connections=None,
              mutation_operator=Mutation4(),
              init_minimum_layers=2,
              init_maximum_neurons_per_layer=5,
              maximum_neuron_connection_weight=0.5,
              maximum_bias_connection_weight=1.0,
              subset_ratio=1,
              weight_range=1.0,
              random_sampling_technique=False,
              random_weighting_technique=False,
              protected_ols=False,
              bootstrap_ols=False,
              bootstrap_ols_samples=10,
              bootstrap_ols_criterion='median',
              high_absolute_ls_difference=1,
              store_ls_history=False):
     super().__init__(population_size, stopping_criterion)
     self.layers = layers
     self.learning_step = learning_step
     self.max_connections = max_connections
     self.mutation_operator = mutation_operator
     self.init_minimum_layers = init_minimum_layers
     self.init_maximum_neurons_per_layer = init_maximum_neurons_per_layer
     self.maximum_neuron_connection_weight = maximum_neuron_connection_weight
     self.maximum_bias_connection_weight = maximum_bias_connection_weight
     self.next_champion = None
     self.random_sampling_technique = random_sampling_technique
     self.random_weighting_technique = random_weighting_technique
     self.subset_ratio = subset_ratio
     self.weight_range = weight_range
     self.protected_ols = protected_ols
     self.bootstrap_ols = bootstrap_ols
     self.bootstrap_ols_samples = bootstrap_ols_samples
     self.bootstrap_ols_criterion = bootstrap_ols_criterion
     if self.bootstrap_ols:
         self.high_absolute_ls_difference = high_absolute_ls_difference
         self.high_absolute_differences_history = []
     self.store_ls_history = store_ls_history
     if self.store_ls_history:
         self.ls_history = []
     self.zero_ls_by_activation_function = {}
     self.zero_ls_history = []
     self.lr_intercept = None
Ejemplo n.º 4
0
def get_random_config_slm_fls_grouped(
        option=None,
        init_maximum_layers=5,
        maximum_iterations=200,
        maximum_learning_step=1,
        maximum_neuron_connection_weight=0.5,
        maximum_bias_connection_weight=1.0,
        mutation_maximum_new_neurons_per_layer=3):
    config = {}

    config['stopping_criterion'] = MaxGenerationsCriterion(
        random.randint(1, maximum_iterations))

    config['population_size'] = 10

    config['layers'] = init_maximum_layers

    # config['learning_step'] = random.uniform(0.1, maximum_learning_step)
    config['learning_step'] = random.uniform(0.001, maximum_learning_step)

    config['maximum_neuron_connection_weight'] = random.uniform(
        0.1, maximum_neuron_connection_weight)
    config['maximum_bias_connection_weight'] = random.uniform(
        0.1, maximum_bias_connection_weight)

    config['mutation_operator'] = Mutation4(
        maximum_new_neurons_per_layer=mutation_maximum_new_neurons_per_layer,
        maximum_bias_connection_weight=config['maximum_bias_connection_weight']
    )

    config['random_sampling_technique'] = False
    config['random_weighting_technique'] = False
    """
    if option == 0:  # no RST and no RWT
        config['random_sampling_technique'] = False
        config['random_weighting_technique'] = False
    elif option == 1:  # RST
        config['random_sampling_technique'] = True
        config['random_weighting_technique'] = False
        config['subset_ratio'] = random.uniform(0.01, 0.99)
    elif option == 2:  # RWT
        config['random_sampling_technique'] = False
        config['random_weighting_technique'] = True
        config['weight_range'] = 1
    """

    return config
Ejemplo n.º 5
0
def get_random_config_slm_fls_tie_edv(
        init_maximum_layers=5,
        maximum_iterations=200,
        maximum_learning_step=1,
        maximum_neuron_connection_weight=0.5,
        maximum_bias_connection_weight=1.0,
        mutation_maximum_new_neurons_per_layer=3):
    config = {}

    stopping_crit = random.randint(1, 2)
    # EDV
    if stopping_crit == 1:
        config['stopping_criterion'] = ErrorDeviationVariationCriterion(
            maximum_iterations=maximum_iterations)
    # TIE
    else:
        config[
            'stopping_criterion'] = TrainingImprovementEffectivenessCriterion(
                maximum_iterations=maximum_iterations)

    config['population_size'] = 100

    config['layers'] = init_maximum_layers

    # config['learning_step'] = random.uniform(0.1, maximum_learning_step)
    config['learning_step'] = random.uniform(0.001, maximum_learning_step)

    config['maximum_neuron_connection_weight'] = random.uniform(
        0.1, maximum_neuron_connection_weight)
    config['maximum_bias_connection_weight'] = random.uniform(
        0.1, maximum_bias_connection_weight)

    config['mutation_operator'] = Mutation4(
        maximum_new_neurons_per_layer=mutation_maximum_new_neurons_per_layer,
        maximum_bias_connection_weight=config['maximum_bias_connection_weight']
    )

    config['random_sampling_technique'] = False
    config['random_weighting_technique'] = False

    return config