Пример #1
0
 def compare_travel_data_sets(self):
     
     # get copied travel data csv
     copied_travel_data_location = os.path.join( self.destination, 'opus_matsim', 'tmp')
     if not os.path.exists(copied_travel_data_location):
         raise StandardError('Travel data not found: %s' % copied_travel_data_location)
     logger.log_status('Get copied travel data: %s' % copied_travel_data_location)
     # convert travel data csv into travel data set matrix
     in_storage = csv_storage(storage_location = copied_travel_data_location)
     table_name = "travel_data"
     travel_data_attribute = 'single_vehicle_to_work_travel_cost'
     travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name )
     travel_data_attribute_mat = travel_data_set.get_attribute_as_matrix(travel_data_attribute, fill=999)
     # get exsisting travel data set and convert it also into travel data set matrix
     year = self.run_config['base_year']+2
     attribute_cache = AttributeCache(cache_directory=self.run_config['cache_directory'])
     cache_storage = attribute_cache.get_flt_storage_for_year(year)
     existing_travel_data_set = TravelDataDataset( in_storage=cache_storage, in_table_name=table_name )
     existing_travel_data_attribute_mat = existing_travel_data_set.get_attribute_as_matrix(travel_data_attribute, fill=999)
     
     from numpy import savetxt # for debugging
     savetxt( os.path.join(self.destination, 'origin_travel_data.txt'), travel_data_attribute_mat , fmt="%f")
     savetxt( os.path.join(self.destination, 'existing_travel_data') , existing_travel_data_attribute_mat, fmt="%f")
     
     # compare both data set matices
     compare = travel_data_attribute_mat == existing_travel_data_attribute_mat
     # return result
     return compare.all()     
    def compare_travel_data_sets(self):

        # get copied travel data csv
        copied_travel_data_location = os.path.join(self.destination, "opus_matsim", "tmp")
        if not os.path.exists(copied_travel_data_location):
            raise StandardError("Travel data not found: %s" % copied_travel_data_location)
        logger.log_status("Get copied travel data: %s" % copied_travel_data_location)
        # convert travel data csv into travel data set matrix
        in_storage = csv_storage(storage_location=copied_travel_data_location)
        table_name = "travel_data"
        travel_data_attribute = "single_vehicle_to_work_travel_cost"
        travel_data_set = TravelDataDataset(in_storage=in_storage, in_table_name=table_name)
        travel_data_attribute_mat = travel_data_set.get_attribute_as_matrix(travel_data_attribute, fill=999)
        # get exsisting travel data set and convert it also into travel data set matrix
        year = self.run_config["base_year"] + 2
        attribute_cache = AttributeCache(cache_directory=self.run_config["cache_directory"])
        cache_storage = attribute_cache.get_flt_storage_for_year(year)
        existing_travel_data_set = TravelDataDataset(in_storage=cache_storage, in_table_name=table_name)
        existing_travel_data_attribute_mat = existing_travel_data_set.get_attribute_as_matrix(
            travel_data_attribute, fill=999
        )

        from numpy import savetxt  # for debugging

        savetxt(os.path.join(self.destination, "origin_travel_data.txt"), travel_data_attribute_mat, fmt="%f")
        savetxt(os.path.join(self.destination, "existing_travel_data"), existing_travel_data_attribute_mat, fmt="%f")

        # compare both data set matices
        compare = travel_data_attribute_mat == existing_travel_data_attribute_mat
        # return result
        return compare.all()
Пример #3
0
def opusRun(progressCB, logCB, config):

    tm_config = config["travel_model_configuration"]
    for key, val in tm_config.iteritems():
        tm_config[str(key)] = str(val)

    opus_data_directory = tm_config["travel_data_dir"]
    opus_data_year = tm_config["year_dir"]
    csv_data_path = tm_config["travel_data_path"]
    table_name = tm_config["travel_data_table_name"]

    input_storage = csv_storage(storage_location=csv_data_path)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True, package_order=[], in_storage=AttributeCache())

    if table_name == "ALL":
        logCB("caching all tables...\n")
        lst = input_storage.get_table_names()
    else:
        lst = [table_name]

    for i in lst:
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" % (i, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(dataset_name=i, in_storage=input_storage, out_storage=output_storage)

    logCB("Successfully exported all tables.")
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    esri_data_path = params_dict["esri_data_path"]
    esri_table_name = params_dict["esri_table_name"]
    opus_data_directory = params_dict["opus_data_directory"]
    opus_data_year = params_dict["opus_data_year"]

    input_storage = esri_storage(storage_location=esri_data_path)
    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)

    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True, package_order=[], in_storage=AttributeCache())

    if esri_table_name == "ALL":
        logCB("Sending all tables to OPUS storage...\n")
        lst = input_storage.get_table_names()
        for i in lst:
            ExportStorage().export_dataset(dataset_name=i, in_storage=input_storage, out_storage=output_storage)

    else:
        logCB("Exporting table '%s' to OPUS storage located at %s...\n" % (esri_table_name, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name=esri_table_name, in_storage=input_storage, out_storage=output_storage
        )
        logCB("Finished exporting table '%s'\n" % (esri_table_name))
Пример #5
0
def opusRun(progressCB,logCB,params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)
        
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    csv_data_path = params_dict['csv_data_path']
    table_name = params_dict['csv_table_name']
    
    input_storage = csv_storage(storage_location = csv_data_path)
    
    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        logCB('caching all tables...\n')
        lst = input_storage.get_table_names()
    else:
        lst = [table_name]
        
    for i in lst:
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
                   (i, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name = i,
            in_storage = input_storage,
            out_storage = output_storage)

    logCB("Successfully exported all tables.")
Пример #6
0
def opusRun(progressCB,logCB,params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_directory = paths.prepend_opus_home_if_relative(opus_data_directory)
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']
    overwrite = params_dict['overwrite']
    
    dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration = dbs_config)
    opusdb = server.get_database(database_name=database_name, create_if_doesnt_exist=False)
    
    input_storage = sql_storage(storage_location = opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        lst = input_storage.get_table_names()
    else:
        lst = re.split(' +', table_name.strip())
        
    tables = len(lst)
    lst_out = create_list_string(lst, ', ')

    logCB('caching tables:\n%s\n' % lst_out)
        
    for j, i in enumerate(lst, start=1):
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
                   (i, opus_data_year, opus_data_directory))
        try:
            ExportStorage().export_dataset(
                                           dataset_name = i,
                                           in_storage = input_storage,
                                           out_storage = output_storage,
                                           overwrite = overwrite,
                                           )
        except:
            logCB('Error in exporting %s.' % i)
        progressCB(100 * j / tables)

    logCB('successfully cached tables:\n%s\n' % lst_out)
Пример #7
0
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_directory = paths.prepend_opus_home_if_relative(
        opus_data_directory)
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']
    overwrite = params_dict['overwrite']

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name,
                                 create_if_doesnt_exist=False)

    input_storage = sql_storage(storage_location=opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        lst = input_storage.get_table_names()
    else:
        lst = re.split(' +', table_name.strip())

    tables = len(lst)
    lst_out = create_list_string(lst, ', ')

    logCB('caching tables:\n%s\n' % lst_out)

    for j, i in enumerate(lst, start=1):
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
              (i, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name=i,
            in_storage=input_storage,
            out_storage=output_storage,
            overwrite=overwrite,
        )
        progressCB(100 * j / tables)

    logCB('successfully cached tables:\n%s\n' % lst_out)
Пример #8
0
def opusRun(progressCB,logCB,params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    # Output csv data path
    csv_data_path = params_dict['csv_data_path']
    # Data clasification - Database (must be specified)
    opus_data_directory = params_dict['opus_data_directory']
    # Data clasification - Dataset (explicit or ALL)
    opus_data_year = params_dict['opus_data_year']
    # Data clasification - Array (explicit or ALL)
    opus_table_name = params_dict['opus_table_name']

    execute_after_export = params_dict['execute_after_export']

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [int(year) for year in os.listdir(opus_data_directory) if year.isdigit() and len(year) == 4]

    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:

        input_storage = attribute_cache.get_flt_storage_for_year(year)
        
        output_storage = csv_storage(storage_location = csv_data_path)

        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())
        
        if opus_table_name != 'ALL':
            opus_table_name_list = [opus_table_name]
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i,year,opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name = i,
                in_storage = input_storage,
                out_storage = output_storage,
                )
            
        logCB("Successfully exported all datasets.")

    if execute_after_export:
        file_name_list = [output_storage._get_file_path_for_table(i)
                          for i in opus_table_name_list]
        subprocess.Popen([execute_after_export] + file_name_list)
Пример #9
0
def opusRun(progressCB, logCB, params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    database_name = params_dict['database_name']
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    opus_table_name = params_dict['opus_table_name']

    database_server_connection = params_dict['database_server_connection']
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [
        int(year) for year in os.listdir(opus_data_directory)
        if year.isdigit() and len(year) == 4
    ]
    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:
        #input_storage = sql_storage(storage_location = opusdb)
        input_storage = attribute_cache.get_flt_storage_for_year(year)
        #output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
        if opus_data_year == 'ALL':
            opusdb = server.get_database(database_name=database_name + "_" +
                                         str(year))
        output_storage = sql_storage(storage_location=opusdb)
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())

        if opus_table_name != 'ALL':
            opus_table_name_list = re.split(' +', opus_table_name.strip())
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i, year, opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
Пример #10
0
def setup_environment(cache_directory, year, package_order, additional_datasets={}):
    gc.collect()
    ss = SimulationState(new_instance=True)
    ss.set_cache_directory(cache_directory)
    ss.set_current_time(year)
    ac = AttributeCache()
    storage = ac.get_flt_storage_for_year(year)
    sc = SessionConfiguration(new_instance=True,
                         package_order=package_order,
                         in_storage=ac)
    logger.log_status("Setup environment for year %s. Use cache directory %s." % (year, storage.get_storage_location()))
    dp = sc.get_dataset_pool()
    for name, ds in additional_datasets.iteritems():
        dp.replace_dataset(name, ds)
    return dp
Пример #11
0
def opusRun(progressCB, logCB, params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    # Output esri data path
    esri_data_path = params_dict['esri_data_path']
    # Data clasification - Database (must be specified)
    opus_data_directory = params_dict['opus_data_directory']
    # Data clasification - Dataset (explicit or ALL)
    opus_data_year = params_dict['opus_data_year']
    # Data clasification - Array (explicit or ALL)
    opus_table_name = params_dict['opus_table_name']

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [
        int(year) for year in os.listdir(opus_data_directory)
        if year.isdigit() and len(year) == 4
    ]

    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:

        input_storage = attribute_cache.get_flt_storage_for_year(year)

        if esri_is_avail:
            output_storage = esri_storage(storage_location=esri_data_path)
        else:
            output_storage = None
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())

        if opus_table_name != 'ALL':
            opus_table_name_list = [opus_table_name]
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i, year, opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
Пример #12
0
def opusRun(progressCB,logCB,params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    # Output esri data path
    esri_data_path = params_dict['esri_data_path']
    # Data clasification - Database (must be specified)
    opus_data_directory = params_dict['opus_data_directory']
    # Data clasification - Dataset (explicit or ALL)
    opus_data_year = params_dict['opus_data_year']
    # Data clasification - Array (explicit or ALL)
    opus_table_name = params_dict['opus_table_name']

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [int(year) for year in os.listdir(opus_data_directory) if year.isdigit() and len(year) == 4]

    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:

        input_storage = attribute_cache.get_flt_storage_for_year(year)
        
        if esri_is_avail:
            output_storage = esri_storage(storage_location = esri_data_path)
        else:
            output_storage = None
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())
        
        if opus_table_name != 'ALL':
            opus_table_name_list = [opus_table_name]
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i,year,opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name = i,
                in_storage = input_storage,
                out_storage = output_storage,
                )
def opusRun(progressCB,logCB,params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    database_name = params_dict['database_name']
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    opus_table_name = params_dict['opus_table_name']
    
    database_server_connection = params_dict['database_server_connection']
    dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration = dbs_config)
    opusdb = server.get_database(database_name=database_name)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [int(year) for year in os.listdir(opus_data_directory) if year.isdigit() and len(year) == 4]
    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:
        #input_storage = sql_storage(storage_location = opusdb)
        input_storage = attribute_cache.get_flt_storage_for_year(year)
                
        #output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
        output_storage = sql_storage(storage_location = opusdb)
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())
        
        if opus_table_name != 'ALL':
            opus_table_name_list = [opus_table_name]
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i,year,opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name = i,
                in_storage = input_storage,
                out_storage = output_storage,
                )
Пример #14
0
def setup_environment(cache_directory,
                      year,
                      package_order,
                      additional_datasets={}):
    gc.collect()
    ss = SimulationState(new_instance=True)
    ss.set_cache_directory(cache_directory)
    ss.set_current_time(year)
    ac = AttributeCache()
    storage = ac.get_flt_storage_for_year(year)
    sc = SessionConfiguration(new_instance=True,
                              package_order=package_order,
                              in_storage=ac)
    logger.log_status(
        "Setup environment for year %s. Use cache directory %s." %
        (year, storage.get_storage_location()))
    dp = sc.get_dataset_pool()
    for name, ds in additional_datasets.iteritems():
        dp.replace_dataset(name, ds)
    return dp
def opusRun(progressCB,logCB,params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']
    
    dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration = dbs_config)
    opusdb = server.get_database(database_name=database_name)
    
    input_storage = sql_storage(storage_location = opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        logCB('caching all tables...\n')
        lst = input_storage.get_table_names()
        for i in lst:
            ExportStorage().export_dataset(
                dataset_name = i,
                in_storage = input_storage,
                out_storage = output_storage,
            )
    else:
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
                   (table_name, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name = table_name,
            in_storage = input_storage,
            out_storage = output_storage)
Пример #16
0
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name)

    input_storage = sql_storage(storage_location=opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        logCB('caching all tables...\n')
        lst = input_storage.get_table_names()
        for i in lst:
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
    else:
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
              (table_name, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(dataset_name=table_name,
                                       in_storage=input_storage,
                                       out_storage=output_storage)
Пример #17
0
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    esri_data_path = params_dict['esri_data_path']
    esri_table_name = params_dict['esri_table_name']
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']

    input_storage = esri_storage(storage_location=esri_data_path)
    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)

    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if esri_table_name == 'ALL':
        logCB("Sending all tables to OPUS storage...\n")
        lst = input_storage.get_table_names()
        for i in lst:
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )

    else:
        logCB("Exporting table '%s' to OPUS storage located at %s...\n" %
              (esri_table_name, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name=esri_table_name,
            in_storage=input_storage,
            out_storage=output_storage,
        )
        logCB("Finished exporting table '%s'\n" % (esri_table_name))
 def create_attribute_cache_with_data(self, cache_dir, data):
     """Populate the cache_dir with the given datasets for the given years.
     
     data is a dictionary with year as key.
     The value of each year is a dictionary with dataset name as key.
     The value for each dataset name is a dictionary with attribute name as key.
     The value for each attribute name is a numpy array of values.
     
     cache_dir must exist.
     """
     
     SimulationState().set_cache_directory(cache_dir)
     attr_cache = AttributeCache()
     
     for year, datasets in data.iteritems():
         year_dir = os.path.join(cache_dir, str(year))
         if not os.path.exists(year_dir):
             os.makedirs(year_dir)
         SimulationState().set_current_time(year)
         flt_storage = attr_cache.get_flt_storage_for_year(year)
         for dataset_name, attributes in datasets.iteritems():
             flt_storage.write_table(table_name=dataset_name, table_data=attributes)
Пример #19
0
    def create_attribute_cache_with_data(self, cache_dir, data):
        """Populate the cache_dir with the given datasets for the given years.
        
        data is a dictionary with year as key.
        The value of each year is a dictionary with dataset name as key.
        The value for each dataset name is a dictionary with attribute name as key.
        The value for each attribute name is a numpy array of values.
        
        cache_dir must exist.
        """

        SimulationState().set_cache_directory(cache_dir)
        attr_cache = AttributeCache()

        for year, datasets in data.iteritems():
            year_dir = os.path.join(cache_dir, str(year))
            if not os.path.exists(year_dir):
                os.makedirs(year_dir)
            SimulationState().set_current_time(year)
            flt_storage = attr_cache.get_flt_storage_for_year(year)
            for dataset_name, attributes in datasets.iteritems():
                flt_storage.write_table(table_name=dataset_name,
                                        table_data=attributes)
Пример #20
0
    def run(self, table_names, out_storage=None, table_name_pattern=None, cache_directory=None, year=None, **kwargs):
        """
        export specified tables to database

        table_name_pattern: For example '{table_name}_{scenario_name}_{year}'
        """
        if not hasattr(self, "out_storage"):
            if out_storage is None:
                raise ValueError, "Either out_storage argument needs to be specified or " + "prepare_for_run called before run method to create a valid out_storage."
            else:
                self.out_storage = out_storage
        sim_state = SimulationState()
        if sim_state.get_current_time() == 0:
            sim_state.set_current_time(9999)
        if cache_directory is None:
            cache_directory = sim_state.get_cache_directory()

        attr_cache = AttributeCache(cache_directory=cache_directory)
        if year is None:
            years = attr_cache._get_sorted_list_of_years()
        else:
            assert isinstance(year, int)
            years = [year]

        for table_name in table_names:
            kwargs["table_name"] = table_name
            for year in years:
                kwargs["year"] = year
                out_table_name = table_name_pattern.format(**kwargs)
                in_storage = attr_cache.get_flt_storage_for_year(year)
                # cache_path = os.path.join(cache_directory, str(year))
                # in_storage = flt_storage(storage_location=cache_path)
                # TODO drop_table(table_name) if table_name exists
                ExportStorage().export_dataset(
                    table_name, in_storage=in_storage, out_storage=self.out_storage, out_dataset_name=out_table_name
                )
        self.post_run(kwargs["scenario_name"], years)
Пример #21
0
 def __init__(self, output_dir=None,  year=None):
     ''' Constructor
     '''
     # get working path as an anchor e.g. to determine the config file location.
     self.working_path = test_path.__path__[0]
     print "Working path: %s" % self.working_path
     # get config file location
     self.config_file = os.path.join( self.working_path, 'configs', 'seattle_parcel_travel_cost_test.xml')
     
     # get seattle_parcel configuration
     config = XMLConfiguration( self.config_file ).get_run_configuration( "Seattle_baseline" )
     
     self.input_storage = None
     
     # get first simulation year
     self.year = year
     if self.year == None:
         self.year = config['base_year']
         base_year_data_path = os.path.join( os.environ['OPUS_DATA_PATH'], 'seattle_parcel', 'base_year_data')
         attribute_cache = AttributeCache(cache_directory=base_year_data_path)
         self.input_storage = attribute_cache.get_flt_storage_for_year(self.year)
     else:
         attribute_cache = AttributeCache().get_flt_storage_for_year(self.year)
         self.input_storage = attribute_cache
     
     # get output dir path
     output_directory = output_dir
     if output_directory == None:
         # set deafult
         output_directory = os.path.join( os.environ['OPUS_HOME'], 'opus_matsim', 'tmp')
     if not os.path.exists( output_directory ):
         try: os.mkdir( output_directory )
         except: pass
     
     # init 
     self.csv_data_path = output_directory # os.path.join(output_directory, 'travel_data_dir')
Пример #22
0
 def __init__(self, output_dir=None,  year=None):
     ''' Constructor
     '''
     # get working path as an anchor e.g. to determine the config file location.
     self.working_path = test_path.__path__[0]
     print "Working path: %s" % self.working_path
     # get config file location
     self.config_file = os.path.join( self.working_path, 'configs', 'seattle_parcel_travel_cost_test.xml')
     
     # get seattle_parcel configuration
     config = XMLConfiguration( self.config_file ).get_run_configuration( "Seattle_baseline" )
     
     self.input_storage = None
     
     # get first simulation year
     self.year = year
     if self.year == None:
         self.year = config['base_year']
         base_year_data_path = paths.get_opus_data_path_path('seattle_parcel', 'base_year_data')
         attribute_cache = AttributeCache(cache_directory=base_year_data_path)
         self.input_storage = attribute_cache.get_flt_storage_for_year(self.year)
     else:
         attribute_cache = AttributeCache().get_flt_storage_for_year(self.year)
         self.input_storage = attribute_cache
     
     # get output dir path
     output_directory = output_dir
     if output_directory == None:
         # set deafult
         output_directory = paths.get_opus_home_path('opus_matsim', 'tmp')
     if not os.path.exists( output_directory ):
         try: os.mkdir( output_directory )
         except: pass
     
     # init 
     self.csv_data_path = output_directory # os.path.join(output_directory, 'travel_data_dir')
        parser.print_help()
        sys.exit(1)

    if table_name is None:
        table_name = 'ALL'
    
    dbserverconfig = EstimationDatabaseConfiguration(
        database_configuration = options.database_configuration 
    )
    opusdb = OpusDatabase(dbserverconfig, db_name)

    input_storage = sql_storage(storage_location = opusdb)

    attribute_cache = AttributeCache(cache_directory=attribute_cache_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(cache_year)
    SimulationState().set_current_time(cache_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        logger.log_status('Caching all tables in database...')
        lst = input_storage.get_table_names()
        for i in lst:
            ExportStorage().export_dataset(
                dataset_name = i,
                in_storage = input_storage,
                out_storage = output_storage,
            )
        sys.exit(1)
    tab_directory = options.tab_directory
    attribute_cache_directory = options.attribute_cache_directory
    table_name = options.table_name
    cache_year = options.cache_year

    if (tab_directory is None or attribute_cache_directory is None
            or table_name is None or cache_year is None):

        parser.print_help()
        sys.exit(1)

    input_storage = tab_storage(storage_location=tab_directory)

    attribute_cache = AttributeCache(cache_directory=attribute_cache_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(cache_year)
    SimulationState().set_current_time(cache_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    logger.start_block(
        "Exporting table '%s' to year %s of cache located at %s..." %
        (table_name, cache_year, attribute_cache_directory))
    try:
        ExportStorage().export_dataset(
            dataset_name=table_name,
            in_storage=input_storage,
            out_storage=output_storage,
        )
    finally: