config['creating_baseyear_cache_configuration'].cache_from_database = False
        config['creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = options.existing_cache_to_copy
        if options.years_to_cache is not None:
            config['creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval(options.years_to_cache)

    number_of_runs = config.get("number_of_runs", 1)
    number_of_runs_in_parallel = min(config.get("parallel_runs", 1), number_of_runs)
    # generate seeds for multiple runs
    root_seed = config.get("seed", None)
    seed(root_seed)
    # generate different seed for each run (each seed contains 1 number)
    seed_array = randint(1,2**30, number_of_runs)
    list_of_cache_directories = []
    for irun in range(number_of_runs):
        config['seed']= (seed_array[irun],)
        this_config = config.copy()
        if ((irun + 1) % number_of_runs_in_parallel) == 0:
            run_in_background = False
        else:
            run_in_background = True
        run_manager.setup_new_run(cache_directory = this_config['cache_directory'],
                                  configuration = this_config)
        run_manager.run_run(this_config, run_as_multiprocess=False,
                            run_in_background=run_in_background)
        if irun == 0:
            # log file for the multiple runs will be located in the first cache
            first_cache_directory = this_config['cache_directory']
            log_file = os.path.join(first_cache_directory, 'multiple_runs.log')
            logger.enable_file_logging(log_file)
            logger.log_status("Multiple runs: %s replications" % number_of_runs)
            logger.log_status("root random seed = %s" % str(root_seed))
            config[
                'creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval(
                    options.years_to_cache)

    number_of_runs = config.get("number_of_runs", 1)
    number_of_runs_in_parallel = min(config.get("parallel_runs", 1),
                                     number_of_runs)
    # generate seeds for multiple runs
    root_seed = config.get("seed", None)
    seed(root_seed)
    # generate different seed for each run (each seed contains 1 number)
    seed_array = randint(1, 2**30, number_of_runs)
    list_of_cache_directories = []
    for irun in range(number_of_runs):
        config['seed'] = (seed_array[irun], )
        this_config = config.copy()
        if ((irun + 1) % number_of_runs_in_parallel) == 0:
            run_in_background = False
        else:
            run_in_background = True
        run_manager.setup_new_run(
            cache_directory=this_config['cache_directory'],
            configuration=this_config)
        run_manager.run_run(this_config,
                            run_as_multiprocess=False,
                            run_in_background=run_in_background)
        if irun == 0:
            # log file for the multiple runs will be located in the first cache
            first_cache_directory = this_config['cache_directory']
            log_file = os.path.join(first_cache_directory, 'multiple_runs.log')
            logger.enable_file_logging(log_file)