Exemple #1
0
def main():
    name = 'L2L-FUN-GS'
    experiment = Experiment(root_dir_path='../results')
    traj, _ = experiment.prepare_experiment(name=name, log_stdout=True)

    ## Benchmark function
    function_id = 4
    bench_functs = BenchmarkedFunctions()
    (benchmark_name, benchmark_function), benchmark_parameters = \
        bench_functs.get_function_by_index(function_id, noise=True)

    optimizee_seed = 100
    random_state = np.random.RandomState(seed=optimizee_seed)
    function_tools.plot(benchmark_function, random_state)

    ## Innerloop simulator
    optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed)

    ## Outerloop optimizer initialization
    n_grid_divs_per_axis = 30
    parameters = GridSearchParameters(param_grid={
        'coords': (optimizee.bound[0], optimizee.bound[1], n_grid_divs_per_axis)
    })
    optimizer = GridSearchOptimizer(traj, optimizee_create_individual=optimizee.create_individual,
                                    optimizee_fitness_weights=(-0.1,),
                                    parameters=parameters)
    # Experiment run
    experiment.run_experiment(optimizee=optimizee, optimizer=optimizer,
                              optimizee_parameters=parameters)
    # End experiment
    experiment.end_experiment(optimizer)
Exemple #2
0
    def test_gd(self):
        n_grid_divs_per_axis = 2
        optimizer_parameters = GridSearchParameters(
            param_grid={
                'coords': (self.optimizee.bound[0], self.optimizee.bound[1],
                           n_grid_divs_per_axis)
            })
        optimizer = GridSearchOptimizer(
            self.trajectory,
            optimizee_create_individual=self.optimizee.create_individual,
            optimizee_fitness_weights=(-0.1, ),
            parameters=optimizer_parameters)
        self.assertIsNotNone(optimizer.parameters)
        self.assertIsNotNone(self.experiment)

        try:

            self.experiment.run_experiment(
                optimizee=self.optimizee,
                optimizee_parameters=self.optimizee_parameters,
                optimizer=optimizer,
                optimizer_parameters=optimizer_parameters)
        except Exception as e:
            self.fail(e.__name__)
        print(self.experiment.optimizer)
        best = self.experiment.optimizer.best_individual['coords']
        self.assertEqual(best[0], 5)
        self.assertEqual(best[1], 5)
        self.experiment.end_experiment(optimizer)
def prepro():
    # Make the neuronal model to work as the DMF model
    # neuronalModel.alpha = 0.
    # neuronalModel.beta = 0.

    # distanceSettings = {'FC': (FC, False), 'swFCD': (swFCD, True), 'GBC': (GBC, False)}  #   'phFCD': (phFCD, True)
    distanceSettings = {'swFCD': (swFCD, True)}
    swFCD.windowSize = 80
    swFCD.windowStep = 18

    # baseGOptimNames = baseOutPath+"/fitting_we{}.mat"

    # step = 0.001
    # WEs = np.arange(0, 3.+step, step)  # Range used in the original code
    # WEs = np.arange(0, 3.+step, 0.05)  # reduced range for DEBUG only!!!

    # Model Simulations
    # ------------------------------------------
    BalanceFIC.verbose = True
    # balancedParms = BalanceFIC.Balance_AllJ9(C, WEs, baseName=J_fileNames)
    # modelParms = [balancedParms[i] for i in balancedParms]

    # Now, optimize all we (G) values: determine optimal G to work with
    print(
        "\n\n###################################################################"
    )
    print("# Compute optimization with L2L")
    print(
        "###################################################################\n"
    )
    experiment = Experiment(root_dir_path='Data_Produced/L2L')
    name = 'L2L-DecoEtAl2020-Prepro'
    traj, _ = experiment.prepare_experiment(name=name,
                                            log_stdout=True,
                                            multiprocessing=False)

    # Setup the WhileBrain optimizee
    WBOptimizee.neuronalModel = neuronalModel
    WBOptimizee.integrator = integrator
    WBOptimizee.simulateBOLD = simulateBOLD
    WBOptimizee.measure = distanceSettings['swFCD'][
        0]  # Measure to use to compute the error
    WBOptimizee.applyFilters = distanceSettings['swFCD'][
        1]  # Whether to apply filters to the resulting signal or not
    outEmpFileName = baseOutPath + '/fNeuro_emp_L2L.mat'
    WBOptimizee.processedEmp = processEmpiricalSubjects(
        tc_transf, distanceSettings, outEmpFileName)[
            'swFCD']  # reference values (e.g., empirical) to compare to.
    WBOptimizee.N = N  # Number of regions in the parcellation
    WBOptimizee.trials = NumTrials  # Number of trials to try
    optimizee_parameters = namedtuple('OptimizeeParameters', [])

    filePattern = baseOutPath + '/fitting_{}_L2L.mat'
    optimizee = WBOptimizee.WholeBrainOptimizee(traj, {'we': (0., 3.)},
                                                setupFunc=setupFunc,
                                                outFilenamePattern=filePattern)

    # =================== Test for debug only
    # traj.individual = sdict(optimizee.create_individual())
    # testing_error = optimizee.simulate(traj)
    # print("Testing error is %s", testing_error)
    # =================== end Test

    # Setup the GridSearchOptimizer
    n_grid_divs_per_axis = 60  # 0.05
    optimizer_parameters = GridSearchParameters(
        param_grid={'we': (0., 3., n_grid_divs_per_axis)})
    optimizer = GridSearchOptimizer(
        traj,
        optimizee_create_individual=optimizee.create_individual,
        optimizee_fitness_weights=(-1., ),  # minimize!
        parameters=optimizer_parameters)

    experiment.run_experiment(optimizee=optimizee,
                              optimizee_parameters=optimizee_parameters,
                              optimizer=optimizer,
                              optimizer_parameters=optimizer_parameters)
    experiment.end_experiment(optimizer)
    print(f"best: {experiment.optimizer.best_individual['we']}")
    # fitting = parmSweep.distanceForAll_Parms(tc_transf, WEs, modelParms, NumSimSubjects=NumTrials,
    #                                          distanceSettings=distanceSettings,
    #                                          parmLabel='we',
    #                                          outFilePath=baseOutPath)

    # optimal = {sd: distanceSettings[sd][0].findMinMax(fitting[sd]) for sd in distanceSettings}
    # ------------------------------------------
    # ------------------------------------------

    filePath = baseOutPath + '/DecoEtAl2020_fneuro-L2L.mat'
    # sio.savemat(filePath, #{'JI': JI})
    #             {'we': WEs,
    #              'swFCDfitt': fitting['swFCD'],  # swFCDfitt,
    #              'FCfitt': fitting['FC'],  # FCfitt,
    #              'GBCfitt': fitting['GBC'],  # GBCfitt
    #             })
    # print(f"DONE!!! (file: {filePath})")
    plotTrajectory1D(optimizer.param_list['we'],
                     [v for (i, v) in traj.current_results])
    print("DONE!!!")
Exemple #4
0
def main():
    name = 'L2L-FUN-GS'
    try:
        with open('bin/path.conf') as f:
            root_dir_path = f.read().strip()
    except FileNotFoundError:
        raise FileNotFoundError(
            "You have not set the root path to store your results."
            " Write the path to a path.conf text file in the bin directory"
            " before running the simulation"
        )
    paths = Paths(name, dict(run_no='test'), root_dir_path=root_dir_path)

    print("All output logs can be found in directory ", paths.logs_path)

    traj_file = os.path.join(paths.output_dir_path, 'data.h5')

    # Create an environment that handles running our simulation
    # This initializes an environment
    env = Environment(trajectory=name, filename=traj_file, file_title='{} data'.format(name),
                      comment='{} data'.format(name),
                      add_time=True,
                      automatic_storing=True,
                      log_stdout=False,  # Sends stdout to logs
                      )
    create_shared_logger_data(logger_names=['bin', 'optimizers'],
                              log_levels=['INFO', 'INFO'],
                              log_to_consoles=[True, True],
                              sim_name=name,
                              log_directory=paths.logs_path)
    configure_loggers()

    # Get the trajectory from the environment
    traj = env.trajectory

    # Get the trajectory from the environment
    traj = env.trajectory

    # Set JUBE params
    traj.f_add_parameter_group("JUBE_params", "Contains JUBE parameters")
    # Execution command
    traj.f_add_parameter_to_group("JUBE_params", "exec", "python " +
                                  os.path.join(paths.simulation_path, "run_files/run_optimizee.py"))
    # Paths
    traj.f_add_parameter_to_group("JUBE_params", "paths", paths)


    ## Benchmark function
    function_id = 4
    bench_functs = BenchmarkedFunctions()
    (benchmark_name, benchmark_function), benchmark_parameters = \
        bench_functs.get_function_by_index(function_id, noise=True)

    optimizee_seed = 100
    random_state = np.random.RandomState(seed=optimizee_seed)
    function_tools.plot(benchmark_function, random_state)

    ## Innerloop simulator
    optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed)

    # Prepare optimizee for jube runs
    jube.prepare_optimizee(optimizee, paths.simulation_path)

    ## Outerloop optimizer initialization
    n_grid_divs_per_axis = 30
    parameters = GridSearchParameters(param_grid={
        'coords': (optimizee.bound[0], optimizee.bound[1], n_grid_divs_per_axis)
    })
    optimizer = GridSearchOptimizer(traj, optimizee_create_individual=optimizee.create_individual,
                                    optimizee_fitness_weights=(-0.1,),
                                    parameters=parameters)

    # Add post processing
    env.add_postprocessing(optimizer.post_process)

    # Run the simulation with all parameter combinations
    env.run(optimizee.simulate)

    ## Outerloop optimizer end
    optimizer.end(traj)

    # Finally disable logging and close all log-files
    env.disable_logging()
Exemple #5
0
def main():
    name = 'L2L-FUN-GS'
    try:
        with open('bin/path.conf') as f:
            root_dir_path = f.read().strip()
    except FileNotFoundError:
        raise FileNotFoundError(
            "You have not set the root path to store your results."
            " Write the path to a path.conf text file in the bin directory"
            " before running the simulation")
    paths = Paths(name, dict(run_no='test'), root_dir_path=root_dir_path)

    print("All output logs can be found in directory ", paths.logs_path)

    traj_file = os.path.join(paths.output_dir_path, 'data.h5')

    # Create an environment that handles running our simulation
    # This initializes an environment
    env = Environment(
        trajectory=name,
        filename=traj_file,
        file_title='{} data'.format(name),
        comment='{} data'.format(name),
        add_time=True,
        automatic_storing=True,
        log_stdout=False,  # Sends stdout to logs
    )
    create_shared_logger_data(logger_names=['bin', 'optimizers'],
                              log_levels=['INFO', 'INFO'],
                              log_to_consoles=[True, True],
                              sim_name=name,
                              log_directory=paths.logs_path)
    configure_loggers()

    # Get the trajectory from the environment
    traj = env.trajectory

    # Get the trajectory from the environment
    traj = env.trajectory

    # Set JUBE params
    traj.f_add_parameter_group("JUBE_params", "Contains JUBE parameters")

    # Scheduler parameters
    # Name of the scheduler
    # traj.f_add_parameter_to_group("JUBE_params", "scheduler", "Slurm")
    # Command to submit jobs to the schedulers
    traj.f_add_parameter_to_group("JUBE_params", "submit_cmd", "sbatch")
    # Template file for the particular scheduler
    traj.f_add_parameter_to_group("JUBE_params", "job_file", "job.run")
    # Number of nodes to request for each run
    traj.f_add_parameter_to_group("JUBE_params", "nodes", "1")
    # Requested time for the compute resources
    traj.f_add_parameter_to_group("JUBE_params", "walltime", "00:01:00")
    # MPI Processes per node
    traj.f_add_parameter_to_group("JUBE_params", "ppn", "1")
    # CPU cores per MPI process
    traj.f_add_parameter_to_group("JUBE_params", "cpu_pp", "1")
    # Threads per process
    traj.f_add_parameter_to_group("JUBE_params", "threads_pp", "1")
    # Type of emails to be sent from the scheduler
    traj.f_add_parameter_to_group("JUBE_params", "mail_mode", "ALL")
    # Email to notify events from the scheduler
    traj.f_add_parameter_to_group("JUBE_params", "mail_address",
                                  "*****@*****.**")
    # Error file for the job
    traj.f_add_parameter_to_group("JUBE_params", "err_file", "stderr")
    # Output file for the job
    traj.f_add_parameter_to_group("JUBE_params", "out_file", "stdout")
    # JUBE parameters for multiprocessing. Relevant even without scheduler.
    # MPI Processes per job
    traj.f_add_parameter_to_group("JUBE_params", "tasks_per_job", "1")
    # The execution command
    traj.f_add_parameter_to_group(
        "JUBE_params", "exec",
        "mpirun python3 " + root_dir_path + "/run_files/run_optimizee.py")
    # Ready file for a generation
    traj.f_add_parameter_to_group("JUBE_params", "ready_file",
                                  root_dir_path + "/readyfiles/ready_w_")
    # Path where the job will be executed
    traj.f_add_parameter_to_group("JUBE_params", "work_path", root_dir_path)

    ## Benchmark function
    function_id = 4
    bench_functs = BenchmarkedFunctions()
    (benchmark_name, benchmark_function), benchmark_parameters = \
        bench_functs.get_function_by_index(function_id, noise=True)

    optimizee_seed = 100
    random_state = np.random.RandomState(seed=optimizee_seed)
    function_tools.plot(benchmark_function, random_state)

    ## Innerloop simulator
    optimizee = FunctionGeneratorOptimizee(traj,
                                           benchmark_function,
                                           seed=optimizee_seed)

    # Prepare optimizee for jube runs
    jube.prepare_optimizee(optimizee, root_dir_path)

    ## Outerloop optimizer initialization
    n_grid_divs_per_axis = 30
    parameters = GridSearchParameters(param_grid={
        'coords': (optimizee.bound[0], optimizee.bound[1],
                   n_grid_divs_per_axis)
    })
    optimizer = GridSearchOptimizer(
        traj,
        optimizee_create_individual=optimizee.create_individual,
        optimizee_fitness_weights=(-0.1, ),
        parameters=parameters)

    # Add post processing
    env.add_postprocessing(optimizer.post_process)

    # Run the simulation with all parameter combinations
    env.run(optimizee.simulate)

    ## Outerloop optimizer end
    optimizer.end(traj)

    # Finally disable logging and close all log-files
    env.disable_logging()
Exemple #6
0
def main():
    experiment = Experiment(root_dir_path='Data_Produced/L2L')
    # name = 'L2L-FUN-GA'
    name = 'L2L-FUN-GS'
    traj, _ = experiment.prepare_experiment(name=name,
                                            log_stdout=True,
                                            multiprocessing=False)

    # ---------------------------------------------------------------------------------------------------------
    # Benchmark function
    """
    Ackley function has a large hole in at the centre surrounded by small hill like regions. Algorithms can get
    trapped in one of its many local minima.
    reference: https://www.sfu.ca/~ssurjano/ackley.html
    :param dims: dimensionality of the function
    Note: uses the recommended variable values, which are: a = 20, b = 0.2 and c = 2π.
    """
    function_id = 4  # Select Ackley2d
    bench_functs = BenchmarkedFunctions()
    (benchmark_name, benchmark_function), benchmark_parameters = \
        bench_functs.get_function_by_index(function_id, noise=True)
    # ---------------------------------------------------------------------------------------------------------

    optimizee_seed = 100
    random_state = np.random.RandomState(seed=optimizee_seed)
    # function_tools.plot(benchmark_function, random_state)

    ## Innerloop simulator
    optimizee = FunctionGeneratorOptimizee(traj,
                                           benchmark_function,
                                           seed=optimizee_seed)

    ## Outerloop optimizer initialization
    # parameters = GeneticAlgorithmParameters(seed=0, pop_size=50, cx_prob=0.5,
    #                                         mut_prob=0.3, n_iteration=100,
    #                                         ind_prob=0.02,
    #                                         tourn_size=15, mate_par=0.5,
    #                                         mut_par=1
    #                                         )
    #
    # optimizer = GeneticAlgorithmOptimizer(traj, optimizee_create_individual=optimizee.create_individual,
    #                                       optimizee_fitness_weights=(-0.1,),
    #                                       parameters=parameters)

    # Setup the GridSearchOptimizer
    n_grid_divs_per_axis = 30
    parameters = GridSearchParameters(param_grid={
        'coords': (optimizee.bound[0], optimizee.bound[1],
                   n_grid_divs_per_axis)
    })
    optimizer = GridSearchOptimizer(
        traj,
        optimizee_create_individual=optimizee.create_individual,
        optimizee_fitness_weights=(-0.1, ),  # minimize!
        parameters=parameters)

    ## Optimization!!!
    experiment.run_experiment(optimizer=optimizer,
                              optimizee=optimizee,
                              optimizee_parameters=parameters)
    experiment.end_experiment(optimizer)
    print(f"best: {experiment.optimizer.best_individual['coords']}")
Exemple #7
0
def Fitting():
    baseOutPath = 'Data_Produced/DecoEtAl2020'

    # %%%%%%%%%%%%%%% Set General Model Parameters
    we = 2.1  # Global Coupling parameter, found in the DecoEtAl2018_Prepro_* file...
    J_fileName = baseOutPath+"/J_Balance_we2.1.mat"  # "Data_Produced/SC90/J_test_we{}.mat"
    balancedG = BalanceFIC.Balance_J9(we, C, False, J_fileName)
    balancedG['J'] = balancedG['J'].flatten()
    balancedG['we'] = balancedG['we']
    neuronalModel.setParms(balancedG)

    # distanceSettings = {'FC': (FC, False), 'swFCD': (swFCD, True), 'GBC': (GBC, False)}  #   'phFCD': (phFCD, True)
    distanceSettings = {'swFCD': (swFCD, True)}
    swFCD.windowSize = 80
    swFCD.windowStep = 18

    # J_fileNames = baseOutPath+"/J_Balance_we{}.mat"

    # step = 0.05
    # Alphas = np.arange(-0.6, 0+step, step)  # Range used in the original code for B
    # Betas = np.arange(0, 2+step, step)    # Range used in the original code for Z
    Alphas = np.arange(-0.6, 0+0.1, 0.1)  # reduced range for DEBUG only!!!
    Betas = np.arange(0, 2+0.2, 0.2)  # reduced range for DEBUG only!!!

    # grid = np.meshgrid(Alphas, Betas)
    # grid = np.round(grid[0],3), np.round(grid[1],3)
    # gridParms = [{'alpha': a, 'beta': b} for a,b in np.nditer(grid)]

    # Model Simulations
    # ------------------------------------------
    # Now, optimize all alpha (B), beta (Z) values: determine optimal (B,Z) to work with
    print("\n\n###################################################################")
    print("# Fitting (B,Z)")
    print("###################################################################\n")
    experiment = Experiment(root_dir_path='Data_Produced/L2L')
    name = 'L2L-DecoEtAl2020-Prepro'
    traj, _ = experiment.prepare_experiment(name=name, log_stdout=True, multiprocessing=False)

    # Setup the WhileBrain optimizee
    WBOptimizee.neuronalModel = neuronalModel
    WBOptimizee.integrator = integrator
    WBOptimizee.simulateBOLD = simulateBOLD
    distanceSettings = {'swFCD': (swFCD, True)}  # We need to overwrite this, as L2L only works with ONE observable at a time.
    WBOptimizee.measure = distanceSettings['swFCD'][0]  # Measure to use to compute the error
    WBOptimizee.applyFilters = distanceSettings['swFCD'][1]  # Whether to apply filters to the resulting signal or not
    outEmpFileName = baseOutPath + '/fNeuro_emp_L2L.mat'
    WBOptimizee.processedEmp = processEmpiricalSubjects(tc_transf,
                                                        distanceSettings,
                                                        outEmpFileName)['swFCD']  # reference values (e.g., empirical) to compare to.
    WBOptimizee.N = N  # Number of regions in the parcellation
    WBOptimizee.trials = NumTrials  # Number of trials to try
    optimizee_parameters = namedtuple('OptimizeeParameters', [])

    filePattern = baseOutPath + '/fitting_{}_L2L.mat'
    optimizee = WBOptimizee.WholeBrainOptimizee(traj, {'alpha': (-0.6, 0), 'beta': (0., 2.)}, outFilenamePattern=filePattern)  #setupFunc=setupFunc,

    # =================== Test for debug only
    # traj.individual = sdict(optimizee.create_individual())
    # testing_error = optimizee.simulate(traj)
    # print("Testing error is %s", testing_error)
    # =================== end Test

    # Setup the GridSearchOptimizer
    optimizer_parameters = GridSearchParameters(param_grid={
        'alpha': (-0.6, 0., 6),
        'beta': (0., 2., 10)
    })
    optimizer = GridSearchOptimizer(traj,
                                    optimizee_create_individual=optimizee.create_individual,
                                    optimizee_fitness_weights=(-1.,),  # minimize!
                                    parameters=optimizer_parameters)

    experiment.run_experiment(optimizee=optimizee,
                              optimizee_parameters=optimizee_parameters,
                              optimizer=optimizer,
                              optimizer_parameters=optimizer_parameters)
    experiment.end_experiment(optimizer)
    print(f"best: alpha={experiment.optimizer.best_individual['alpha']} & beta={experiment.optimizer.best_individual['beta']}")

    # fitting = optim1D.distanceForAll_Parms(tc_transf, grid, gridParms, NumSimSubjects=NumTrials,
    #                                        distanceSettings=distanceSettings,
    #                                        parmLabel='BZ',
    #                                        outFilePath=baseOutPath)
    #
    # optimal = {sd: distanceSettings[sd][0].findMinMax(fitting[sd]) for sd in distanceSettings}
    # ------------------------------------------
    # ------------------------------------------

    filePath = baseOutPath+'/DecoEtAl2020_fittingBZ.mat'
    # sio.savemat(filePath, #{'JI': JI})
    #             {'Alphas': Alphas,
    #              'Betas': Betas,
    #              'swFCDfitt': fitting['swFCD'],  # swFCDfitt,
    #              'FCfitt': fitting['FC'],  # FCfitt,
    #              'GBCfitt': fitting['GBC'],  # GBCfitt
    #             })
    print(f"DONE!!! (file: {filePath})")