def main(): name = 'L2L-FUN-FACE' experiment = Experiment("../results/") trajectory_name = name traj, all_jube_params, = experiment.prepare_experiment(name=name, trajectory_name=trajectory_name, log_stdout=True) ## Benchmark function function_id = 4 bench_functs = BenchmarkedFunctions() (benchmark_name, benchmark_function), benchmark_parameters = \ bench_functs.get_function_by_index(function_id, noise=True) optimizee_seed = 100 ## Innerloop simulator optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) ## Outerloop optimizer initialization parameters = FACEParameters(min_pop_size=20, max_pop_size=50, n_elite=10, smoothing=0.2, temp_decay=0, n_iteration=1, distribution=Gaussian(), n_expand=5, stop_criterion=np.inf, seed=109) optimizer = FACEOptimizer(traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(-0.1,), parameters=parameters, optimizee_bounding_func=optimizee.bounding_func) experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, optimizer_parameters=parameters, optimizee_parameters=None) experiment.end_experiment(optimizer)
def main(): # TODO: use the experiment module to prepare and run later the simulation # define a directory to store the results experiment = Experiment(root_dir_path='~/home/user/L2L/results') # TODO when using the template: use keywords to prepare the experiment and # create a dictionary for jube parameters # prepare_experiment returns the trajectory and all jube parameters jube_params = {"nodes": "2", "walltime": "10:00:00", "ppn": "1", "cpu_pp": "1"} traj, all_jube_params = experiment.prepare_experiment(name='L2L', log_stdout=True, **jube_params) ## Innerloop simulator # TODO when using the template: Change the optimizee to the appropriate # Optimizee class optimizee = Optimizee(traj) # TODO Create optimizee parameters optimizee_parameters = OptimizeeParameters() ## Outerloop optimizer initialization # TODO when using the template: Change the optimizer to the appropriate # Optimizer class and use the right value for optimizee_fitness_weights. # Length is the number of dimensions of fitness, and negative value # implies minimization and vice versa optimizer_parameters = OptimizerParameters() optimizer = Optimizer(traj, optimizee.create_individual, (1.0,), optimizer_parameters) experiment.run_experiment(optimizee=optimizee, optimizee_parameters=optimizee_parameters, optimizer=optimizer, optimizer_parameters=optimizer_parameters)
def main(): fit, swc, ref = sys.argv[1:] name = 'ARBOR-FUN' results_folder = '../results' trajectory_name = 'ARBOR' experiment = Experiment(results_folder) traj, _ = experiment.prepare_experiment(trajectory_name=trajectory_name, name=name, jube_parameter={}) # Innerloop simulator optimizee = ArbSCOptimizee(traj, fit, swc, ref) # Outerloop optimizer initialization parameters = GeneticAlgorithmParameters(seed=0, popsize=50, CXPB=0.5, MUTPB=0.3, NGEN=100, indpb=0.02, tournsize=15, matepar=0.5, mutpar=1) optimizer = GeneticAlgorithmOptimizer( traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(-0.1, ), parameters=parameters) experiment.run_experiment(optimizee=optimizee, optimizer=optimizer, optimizer_parameters=parameters, optimizee_parameters=None) experiment.end_experiment(optimizer)
def main(): name = 'L2L-FUN-GS' experiment = Experiment(root_dir_path='../results') traj, _ = experiment.prepare_experiment(name=name, log_stdout=True) ## Benchmark function function_id = 4 bench_functs = BenchmarkedFunctions() (benchmark_name, benchmark_function), benchmark_parameters = \ bench_functs.get_function_by_index(function_id, noise=True) optimizee_seed = 100 random_state = np.random.RandomState(seed=optimizee_seed) function_tools.plot(benchmark_function, random_state) ## Innerloop simulator optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) ## Outerloop optimizer initialization n_grid_divs_per_axis = 30 parameters = GridSearchParameters(param_grid={ 'coords': (optimizee.bound[0], optimizee.bound[1], n_grid_divs_per_axis) }) optimizer = GridSearchOptimizer(traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(-0.1,), parameters=parameters) # Experiment run experiment.run_experiment(optimizee=optimizee, optimizer=optimizer, optimizee_parameters=parameters) # End experiment experiment.end_experiment(optimizer)
def main(): from l2l.utils.experiment import Experiment experiment = Experiment(root_dir_path='../../Data_Produced/L2L') name = 'L2L-TEST-WholeBrain' traj, _ = experiment.prepare_experiment(name=name, log_stdout=True, multiprocessing=False) optimizee = WholeBrainOptimizee(traj, {'we': (0, 10)}) traj.individual = sdict(optimizee.create_individual())
def setUp(self): self.experiment = Experiment(root_dir_path='../../results') jube_params = {} try: self.trajectory, _ = self.experiment.prepare_experiment( name='test_trajectory', log_stdout=True, add_time=True, automatic_storing=True, jube_parameter=jube_params) except FileNotFoundError as fe: self.fail( "{} \n L2L is not well configured. Missing path file.".format( fe)) self.paths = self.experiment.paths
def setUp(self): # Test function function_id = 14 bench_functs = BenchmarkedFunctions() (benchmark_name, benchmark_function), benchmark_parameters = \ bench_functs.get_function_by_index(function_id, noise=True) self.experiment = Experiment(root_dir_path='../../results') jube_params = {} self.trajectory, all_jube_params = self.experiment.prepare_experiment( name='L2L', log_stdout=True, jube_parameter=jube_params) self.optimizee_parameters = namedtuple('OptimizeeParameters', []) self.optimizee = FunctionGeneratorOptimizee(self.trajectory, benchmark_function, seed=1)
def run_experiment(): experiment = Experiment("../results/") name = 'L2L-FUN-ES' trajectory_name = 'mirroring-and-fitness-shaping' traj, all_jube_params = experiment.prepare_experiment( name=name, trajectory_name=trajectory_name, log_stdout=True) ## Benchmark function function_id = 14 bench_functs = BenchmarkedFunctions() (benchmark_name, benchmark_function), benchmark_parameters = \ bench_functs.get_function_by_index(function_id, noise=True) optimizee_seed = 200 ## Innerloop simulator optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) ## Outerloop optimizer initialization optimizer_seed = 1234 parameters = EvolutionStrategiesParameters(learning_rate=0.1, noise_std=1.0, mirrored_sampling_enabled=True, fitness_shaping_enabled=True, pop_size=20, n_iteration=1000, stop_criterion=np.Inf, seed=optimizer_seed) optimizer = EvolutionStrategiesOptimizer( traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(-1., ), parameters=parameters, optimizee_bounding_func=optimizee.bounding_func) # Run experiment experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, optimizer_parameters=parameters) # End experiment experiment.end_experiment(optimizer)
def main(): name = 'L2L-FunctionGenerator-SA' experiment = Experiment("../results/") traj, all_jube_params = experiment.prepare_experiment(name=name, log_stdout=True) ## Benchmark function function_id = 14 bench_functs = BenchmarkedFunctions() (benchmark_name, benchmark_function), benchmark_parameters = \ bench_functs.get_function_by_index(function_id, noise=True) optimizee_seed = 100 random_state = np.random.RandomState(seed=optimizee_seed) function_tools.plot(benchmark_function, random_state) ## Innerloop simulator optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) ## Outerloop optimizer initialization parameters = SimulatedAnnealingParameters( n_parallel_runs=50, noisy_step=.03, temp_decay=.99, n_iteration=100, stop_criterion=np.Inf, seed=np.random.randint(1e5), cooling_schedule=AvailableCoolingSchedules.QUADRATIC_ADDAPTIVE) optimizer = SimulatedAnnealingOptimizer( traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(-1, ), parameters=parameters, optimizee_bounding_func=optimizee.bounding_func) # Run experiment experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, optimizer_parameters=parameters) # End experiment experiment.end_experiment(optimizer)
def run_experiment(): name = 'L2L-MNIST-CE' experiment = Experiment("../results/") traj, all_jube_params = experiment.prepare_experiment(name=name, trajectory_name=name, log_stdout=True) optimizee_seed = 200 optimizee_parameters = MNISTOptimizeeParameters(n_hidden=10, seed=optimizee_seed, use_small_mnist=True) ## Innerloop simulator optimizee = MNISTOptimizee(traj, optimizee_parameters) ## Outerloop optimizer initialization optimizer_seed = 1234 optimizer_parameters = CrossEntropyParameters(pop_size=40, rho=0.9, smoothing=0.0, temp_decay=0, n_iteration=5000, distribution=NoisyGaussian( noise_magnitude=1., noise_decay=0.99), stop_criterion=np.inf, seed=optimizer_seed) optimizer = CrossEntropyOptimizer( traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(1., ), parameters=optimizer_parameters, optimizee_bounding_func=optimizee.bounding_func) # Run experiment experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, optimizer_parameters=optimizer_parameters, optimizee_parameters=optimizee_parameters) # End experiment experiment.end_experiment(optimizer)
def main(): name = 'L2L-FUN-GD' experiment = Experiment("../results") traj, all_jube_params = experiment.prepare_experiment(name=name, trajectory_name=name) ## Benchmark function function_id = 4 bench_functs = BenchmarkedFunctions() (benchmark_name, benchmark_function), benchmark_parameters = \ bench_functs.get_function_by_index(function_id, noise=True) optimizee_seed = 100 random_state = np.random.RandomState(seed=optimizee_seed) ## Innerloop simulator optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) ## Outerloop optimizer initialization # parameters = ClassicGDParameters(learning_rate=0.01, exploration_step_size=0.01, # n_random_steps=5, n_iteration=100, # stop_criterion=np.Inf) # parameters = AdamParameters(learning_rate=0.01, exploration_step_size=0.01, n_random_steps=5, first_order_decay=0.8, # second_order_decay=0.8, n_iteration=100, stop_criterion=np.Inf) # parameters = StochasticGDParameters(learning_rate=0.01, stochastic_deviation=1, stochastic_decay=0.99, # exploration_step_size=0.01, n_random_steps=5, n_iteration=100, # stop_criterion=np.Inf) parameters = RMSPropParameters(learning_rate=0.01, exploration_step_size=0.01, n_random_steps=5, momentum_decay=0.5, n_iteration=100, stop_criterion=np.Inf, seed=99) optimizer = GradientDescentOptimizer(traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(0.1,), parameters=parameters, optimizee_bounding_func=optimizee.bounding_func) experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, optimizer_parameters=parameters) experiment.end_experiment(optimizer)
def main(): experiment = Experiment(root_dir_path='../results') name = 'L2L-FUN-GA' traj, _ = experiment.prepare_experiment(name=name, log_stdout=True) ## Benchmark function function_id = 4 bench_functs = BenchmarkedFunctions() (benchmark_name, benchmark_function), benchmark_parameters = \ bench_functs.get_function_by_index(function_id, noise=True) optimizee_seed = 100 random_state = np.random.RandomState(seed=optimizee_seed) function_tools.plot(benchmark_function, random_state) ## Innerloop simulator optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) ## Outerloop optimizer initialization parameters = GeneticAlgorithmParameters(seed=0, popsize=50, CXPB=0.5, MUTPB=0.3, NGEN=100, indpb=0.02, tournsize=15, matepar=0.5, mutpar=1) optimizer = GeneticAlgorithmOptimizer( traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(-0.1, ), parameters=parameters) experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, optimizee_parameters=parameters) experiment.end_experiment(optimizer)
def test_juberunner_setup(self): self.experiment = Experiment(root_dir_path='../../results') self.trajectory, _ = self.experiment.prepare_experiment( name='test_trajectory', trajectory='test_trajectory', filename=".", file_title='{} data'.format('test_trajectory'), comment='{} data'.format('test_trajectory'), add_time=True, automatic_storing=True, log_stdout=False, jube_parameter={}) self.trajectory.f_add_parameter_group("JUBE_params", "Contains JUBE parameters") self.trajectory.f_add_parameter_to_group( "JUBE_params", "exec", "python " + os.path.join( self.paths.simulation_path, "run_files/run_optimizee.py")) self.trajectory.f_add_parameter_to_group("JUBE_params", "paths", self.paths) ## Benchmark function function_id = 14 bench_functs = BenchmarkedFunctions() (benchmark_name, benchmark_function), benchmark_parameters = \ bench_functs.get_function_by_index(function_id, noise=True) optimizee_seed = 1 optimizee = FunctionGeneratorOptimizee(self.trajectory, benchmark_function, seed=optimizee_seed) jube.prepare_optimizee(optimizee, self.paths.root_dir_path) fname = os.path.join(self.paths.root_dir_path, "optimizee.bin") try: f = open(fname, "r") f.close() except Exception: self.fail()
def run_experiment(): name = 'L2L-MNIST-ES' trajectory_name = 'mirroring-and-fitness-shaping' experiment = Experiment("../results/") traj, all_jube_params = experiment.prepare_experiment(name=name, trajectory_name=trajectory_name, log_stdout=True) optimizee_seed = 200 optimizee_parameters = MNISTOptimizeeParameters(n_hidden=10, seed=optimizee_seed, use_small_mnist=True) ## Innerloop simulator optimizee = MNISTOptimizee(traj, optimizee_parameters) ## Outerloop optimizer initialization optimizer_seed = 1234 optimizer_parameters = EvolutionStrategiesParameters( learning_rate=0.1, noise_std=0.1, mirrored_sampling_enabled=True, fitness_shaping_enabled=True, pop_size=20, n_iteration=2000, stop_criterion=np.Inf, seed=optimizer_seed) optimizer = EvolutionStrategiesOptimizer( traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(1.,), parameters=optimizer_parameters, optimizee_bounding_func=optimizee.bounding_func) # Run experiment experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, optimizer_parameters=optimizer_parameters, optimizee_parameters=optimizee_parameters) # End experiment experiment.end_experiment(optimizer)
def Fitting(): baseOutPath = 'Data_Produced/DecoEtAl2020' # %%%%%%%%%%%%%%% Set General Model Parameters we = 2.1 # Global Coupling parameter, found in the DecoEtAl2018_Prepro_* file... J_fileName = baseOutPath+"/J_Balance_we2.1.mat" # "Data_Produced/SC90/J_test_we{}.mat" balancedG = BalanceFIC.Balance_J9(we, C, False, J_fileName) balancedG['J'] = balancedG['J'].flatten() balancedG['we'] = balancedG['we'] neuronalModel.setParms(balancedG) # distanceSettings = {'FC': (FC, False), 'swFCD': (swFCD, True), 'GBC': (GBC, False)} # 'phFCD': (phFCD, True) distanceSettings = {'swFCD': (swFCD, True)} swFCD.windowSize = 80 swFCD.windowStep = 18 # J_fileNames = baseOutPath+"/J_Balance_we{}.mat" # step = 0.05 # Alphas = np.arange(-0.6, 0+step, step) # Range used in the original code for B # Betas = np.arange(0, 2+step, step) # Range used in the original code for Z Alphas = np.arange(-0.6, 0+0.1, 0.1) # reduced range for DEBUG only!!! Betas = np.arange(0, 2+0.2, 0.2) # reduced range for DEBUG only!!! # grid = np.meshgrid(Alphas, Betas) # grid = np.round(grid[0],3), np.round(grid[1],3) # gridParms = [{'alpha': a, 'beta': b} for a,b in np.nditer(grid)] # Model Simulations # ------------------------------------------ # Now, optimize all alpha (B), beta (Z) values: determine optimal (B,Z) to work with print("\n\n###################################################################") print("# Fitting (B,Z)") print("###################################################################\n") experiment = Experiment(root_dir_path='Data_Produced/L2L') name = 'L2L-DecoEtAl2020-Prepro' traj, _ = experiment.prepare_experiment(name=name, log_stdout=True, multiprocessing=False) # Setup the WhileBrain optimizee WBOptimizee.neuronalModel = neuronalModel WBOptimizee.integrator = integrator WBOptimizee.simulateBOLD = simulateBOLD distanceSettings = {'swFCD': (swFCD, True)} # We need to overwrite this, as L2L only works with ONE observable at a time. WBOptimizee.measure = distanceSettings['swFCD'][0] # Measure to use to compute the error WBOptimizee.applyFilters = distanceSettings['swFCD'][1] # Whether to apply filters to the resulting signal or not outEmpFileName = baseOutPath + '/fNeuro_emp_L2L.mat' WBOptimizee.processedEmp = processEmpiricalSubjects(tc_transf, distanceSettings, outEmpFileName)['swFCD'] # reference values (e.g., empirical) to compare to. WBOptimizee.N = N # Number of regions in the parcellation WBOptimizee.trials = NumTrials # Number of trials to try optimizee_parameters = namedtuple('OptimizeeParameters', []) filePattern = baseOutPath + '/fitting_{}_L2L.mat' optimizee = WBOptimizee.WholeBrainOptimizee(traj, {'alpha': (-0.6, 0), 'beta': (0., 2.)}, outFilenamePattern=filePattern) #setupFunc=setupFunc, # =================== Test for debug only # traj.individual = sdict(optimizee.create_individual()) # testing_error = optimizee.simulate(traj) # print("Testing error is %s", testing_error) # =================== end Test # Setup the GridSearchOptimizer optimizer_parameters = GridSearchParameters(param_grid={ 'alpha': (-0.6, 0., 6), 'beta': (0., 2., 10) }) optimizer = GridSearchOptimizer(traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(-1.,), # minimize! parameters=optimizer_parameters) experiment.run_experiment(optimizee=optimizee, optimizee_parameters=optimizee_parameters, optimizer=optimizer, optimizer_parameters=optimizer_parameters) experiment.end_experiment(optimizer) print(f"best: alpha={experiment.optimizer.best_individual['alpha']} & beta={experiment.optimizer.best_individual['beta']}") # fitting = optim1D.distanceForAll_Parms(tc_transf, grid, gridParms, NumSimSubjects=NumTrials, # distanceSettings=distanceSettings, # parmLabel='BZ', # outFilePath=baseOutPath) # # optimal = {sd: distanceSettings[sd][0].findMinMax(fitting[sd]) for sd in distanceSettings} # ------------------------------------------ # ------------------------------------------ filePath = baseOutPath+'/DecoEtAl2020_fittingBZ.mat' # sio.savemat(filePath, #{'JI': JI}) # {'Alphas': Alphas, # 'Betas': Betas, # 'swFCDfitt': fitting['swFCD'], # swFCDfitt, # 'FCfitt': fitting['FC'], # FCfitt, # 'GBCfitt': fitting['GBC'], # GBCfitt # }) print(f"DONE!!! (file: {filePath})")
import arbor as arb import numpy as np from random import randrange as rand import sys from os.path import abspath as expand from l2l.utils.experiment import Experiment from l2l.optimizers.evolution import GeneticAlgorithmOptimizer, GeneticAlgorithmParameters from l2l.optimizees.arbor.SC import ArbSCOptimizee fit, swc, ref = list(map(expand, sys.argv[1:])) name = 'ARBOR-FUN' results_folder = '../results' trajectory_name = 'ARBOR' experiment = Experiment(results_folder) traj, _ = experiment.prepare_experiment( trajectory_name=trajectory_name, name=name, jube_parameter={"exec": "srun -n 1 -c 8 --exclusive python"}) # Innerloop simulator optimizee = ArbSCOptimizee(traj, fit, swc, ref) # Outerloop optimizer initialization parameters = GeneticAlgorithmParameters(seed=0, popsize=100, CXPB=0.5, MUTPB=0.3, NGEN=10, indpb=0.02, tournsize=100, matepar=0.5, mutpar=1)
def prepro(): # Make the neuronal model to work as the DMF model # neuronalModel.alpha = 0. # neuronalModel.beta = 0. # distanceSettings = {'FC': (FC, False), 'swFCD': (swFCD, True), 'GBC': (GBC, False)} # 'phFCD': (phFCD, True) distanceSettings = {'swFCD': (swFCD, True)} swFCD.windowSize = 80 swFCD.windowStep = 18 # baseGOptimNames = baseOutPath+"/fitting_we{}.mat" # step = 0.001 # WEs = np.arange(0, 3.+step, step) # Range used in the original code # WEs = np.arange(0, 3.+step, 0.05) # reduced range for DEBUG only!!! # Model Simulations # ------------------------------------------ BalanceFIC.verbose = True # balancedParms = BalanceFIC.Balance_AllJ9(C, WEs, baseName=J_fileNames) # modelParms = [balancedParms[i] for i in balancedParms] # Now, optimize all we (G) values: determine optimal G to work with print( "\n\n###################################################################" ) print("# Compute optimization with L2L") print( "###################################################################\n" ) experiment = Experiment(root_dir_path='Data_Produced/L2L') name = 'L2L-DecoEtAl2020-Prepro' traj, _ = experiment.prepare_experiment(name=name, log_stdout=True, multiprocessing=False) # Setup the WhileBrain optimizee WBOptimizee.neuronalModel = neuronalModel WBOptimizee.integrator = integrator WBOptimizee.simulateBOLD = simulateBOLD WBOptimizee.measure = distanceSettings['swFCD'][ 0] # Measure to use to compute the error WBOptimizee.applyFilters = distanceSettings['swFCD'][ 1] # Whether to apply filters to the resulting signal or not outEmpFileName = baseOutPath + '/fNeuro_emp_L2L.mat' WBOptimizee.processedEmp = processEmpiricalSubjects( tc_transf, distanceSettings, outEmpFileName)[ 'swFCD'] # reference values (e.g., empirical) to compare to. WBOptimizee.N = N # Number of regions in the parcellation WBOptimizee.trials = NumTrials # Number of trials to try optimizee_parameters = namedtuple('OptimizeeParameters', []) filePattern = baseOutPath + '/fitting_{}_L2L.mat' optimizee = WBOptimizee.WholeBrainOptimizee(traj, {'we': (0., 3.)}, setupFunc=setupFunc, outFilenamePattern=filePattern) # =================== Test for debug only # traj.individual = sdict(optimizee.create_individual()) # testing_error = optimizee.simulate(traj) # print("Testing error is %s", testing_error) # =================== end Test # Setup the GridSearchOptimizer n_grid_divs_per_axis = 60 # 0.05 optimizer_parameters = GridSearchParameters( param_grid={'we': (0., 3., n_grid_divs_per_axis)}) optimizer = GridSearchOptimizer( traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(-1., ), # minimize! parameters=optimizer_parameters) experiment.run_experiment(optimizee=optimizee, optimizee_parameters=optimizee_parameters, optimizer=optimizer, optimizer_parameters=optimizer_parameters) experiment.end_experiment(optimizer) print(f"best: {experiment.optimizer.best_individual['we']}") # fitting = parmSweep.distanceForAll_Parms(tc_transf, WEs, modelParms, NumSimSubjects=NumTrials, # distanceSettings=distanceSettings, # parmLabel='we', # outFilePath=baseOutPath) # optimal = {sd: distanceSettings[sd][0].findMinMax(fitting[sd]) for sd in distanceSettings} # ------------------------------------------ # ------------------------------------------ filePath = baseOutPath + '/DecoEtAl2020_fneuro-L2L.mat' # sio.savemat(filePath, #{'JI': JI}) # {'we': WEs, # 'swFCDfitt': fitting['swFCD'], # swFCDfitt, # 'FCfitt': fitting['FC'], # FCfitt, # 'GBCfitt': fitting['GBC'], # GBCfitt # }) # print(f"DONE!!! (file: {filePath})") plotTrajectory1D(optimizer.param_list['we'], [v for (i, v) in traj.current_results]) print("DONE!!!")
def main(): experiment = Experiment(root_dir_path='Data_Produced/L2L') # name = 'L2L-FUN-GA' name = 'L2L-FUN-GS' traj, _ = experiment.prepare_experiment(name=name, log_stdout=True, multiprocessing=False) # --------------------------------------------------------------------------------------------------------- # Benchmark function """ Ackley function has a large hole in at the centre surrounded by small hill like regions. Algorithms can get trapped in one of its many local minima. reference: https://www.sfu.ca/~ssurjano/ackley.html :param dims: dimensionality of the function Note: uses the recommended variable values, which are: a = 20, b = 0.2 and c = 2π. """ function_id = 4 # Select Ackley2d bench_functs = BenchmarkedFunctions() (benchmark_name, benchmark_function), benchmark_parameters = \ bench_functs.get_function_by_index(function_id, noise=True) # --------------------------------------------------------------------------------------------------------- optimizee_seed = 100 random_state = np.random.RandomState(seed=optimizee_seed) # function_tools.plot(benchmark_function, random_state) ## Innerloop simulator optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) ## Outerloop optimizer initialization # parameters = GeneticAlgorithmParameters(seed=0, pop_size=50, cx_prob=0.5, # mut_prob=0.3, n_iteration=100, # ind_prob=0.02, # tourn_size=15, mate_par=0.5, # mut_par=1 # ) # # optimizer = GeneticAlgorithmOptimizer(traj, optimizee_create_individual=optimizee.create_individual, # optimizee_fitness_weights=(-0.1,), # parameters=parameters) # Setup the GridSearchOptimizer n_grid_divs_per_axis = 30 parameters = GridSearchParameters(param_grid={ 'coords': (optimizee.bound[0], optimizee.bound[1], n_grid_divs_per_axis) }) optimizer = GridSearchOptimizer( traj, optimizee_create_individual=optimizee.create_individual, optimizee_fitness_weights=(-0.1, ), # minimize! parameters=parameters) ## Optimization!!! experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, optimizee_parameters=parameters) experiment.end_experiment(optimizer) print(f"best: {experiment.optimizer.best_individual['coords']}")