Beispiel #1
0
        except ImportError:
            # TODO: Once all fully-specified configurations are stored as classes,
            #       get rid of this use.
            import_stmt = 'from %s import run_configuration as config' % opus_path
            exec(import_stmt)
    elif options.xml_configuration is not None:
        if options.scenario_name is None:
            parser.print_help()
            sys.exit(1)
        config = XMLConfiguration(
            options.xml_configuration).get_run_configuration(
                options.scenario_name)

    from_database_configuration = config.get(
        'scenario_database_configuration',
        ScenarioDatabaseConfiguration(
            database_name=options.scenario_database_name,
            database_configuration=options.scenario_database_configuration,
        ))
    to_database_name = options.flattened_database_name or (
        from_database_configuration.database_name + '_flattened')
    to_database_configuration = ScenarioDatabaseConfiguration(
        protocol=from_database_configuration.protocol,
        host_name=from_database_configuration.host_name,
        user_name=from_database_configuration.user_name,
        password=from_database_configuration.password,
        database_name=to_database_name)

    if config.get('creating_baseyear_cache_configuration', None):
        tables_to_copy = config[
            'creating_baseyear_cache_configuration'].tables_to_cache
    else:
        insert_auto_generated_cache_directory_if_needed(config)
    else:
        parser.print_help()
        sys.exit(1)

    if options.existing_cache_to_copy is not None:
        config[
            'creating_baseyear_cache_configuration'].cache_from_database = False
        config[
            'creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = options.existing_cache_to_copy
        if options.years_to_cache is not None:
            config[
                'creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval(
                    options.years_to_cache)

    number_of_runs = config.get("number_of_runs", 1)
    number_of_runs_in_parallel = min(config.get("parallel_runs", 1),
                                     number_of_runs)
    # generate seeds for multiple runs
    root_seed = config.get("seed", None)
    seed(root_seed)
    # generate different seed for each run (each seed contains 1 number)
    seed_array = randint(1, 2**30, number_of_runs)
    list_of_cache_directories = []
    for irun in range(number_of_runs):
        config['seed'] = (seed_array[irun], )
        this_config = config.copy()
        if ((irun + 1) % number_of_runs_in_parallel) == 0:
            run_in_background = False
        else:
            run_in_background = True
        if options.scenario_name is None:
            parser.print_help()
            sys.exit(1)
        config = XMLConfiguration(options.xml_configuration).get_run_configuration(options.scenario_name)
        insert_auto_generated_cache_directory_if_needed(config)
    else:
        parser.print_help()
        sys.exit(1)

    if options.existing_cache_to_copy is not None:
        config['creating_baseyear_cache_configuration'].cache_from_database = False
        config['creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = options.existing_cache_to_copy
        if options.years_to_cache is not None:
            config['creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval(options.years_to_cache)

    number_of_runs = config.get("number_of_runs", 1)
    number_of_runs_in_parallel = min(config.get("parallel_runs", 1), number_of_runs)
    # generate seeds for multiple runs
    root_seed = config.get("seed", None)
    seed(root_seed)
    # generate different seed for each run (each seed contains 1 number)
    seed_array = randint(1,2**30, number_of_runs)
    list_of_cache_directories = []
    for irun in range(number_of_runs):
        config['seed']= (seed_array[irun],)
        this_config = config.copy()
        if ((irun + 1) % number_of_runs_in_parallel) == 0:
            run_in_background = False
        else:
            run_in_background = True
        run_manager.setup_new_run(cache_directory = this_config['cache_directory'],
Beispiel #4
0
        opus_path = options.configuration_path
        try:
            config = get_config_from_opus_path(opus_path)
        except ImportError:
            # TODO: Once all fully-specified configurations are stored as classes,
            #       get rid of this use.
            import_stmt = 'from %s import run_configuration as config' % opus_path
            exec(import_stmt)
    elif options.xml_configuration is not None:
        if options.scenario_name is None:
            parser.print_help()
            sys.exit(1)
        config = XMLConfiguration(options.xml_configuration).get_run_configuration(options.scenario_name)
    
    from_database_configuration = config.get('scenario_database_configuration', ScenarioDatabaseConfiguration(database_name = options.scenario_database_name,
                                                                                                              database_configuration=options.scenario_database_configuration,
                                                                                                              ))
    to_database_name = options.flattened_database_name or (from_database_configuration.database_name + '_flattened')
    to_database_configuration = ScenarioDatabaseConfiguration(
                                    protocol = from_database_configuration.protocol,
                                    host_name = from_database_configuration.host_name,
                                    user_name = from_database_configuration.user_name,
                                    password = from_database_configuration.password,
                                    database_name = to_database_name)

    if config.get('creating_baseyear_cache_configuration', None):
        tables_to_copy = config['creating_baseyear_cache_configuration'].tables_to_cache
    else:
        tables_to_copy = []  # copy all tables in the chain
    
    copier = FlattenScenarioDatabaseChain()