Exemple #1
0
 def setUp(self):
     self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp_test_indicators')
     storage = StorageFactory().get_storage('dict_storage')
     
     tables_data = {
         'large_areas': {
             "large_area_id":array([1, 2, 3])
             },
         'fazes': {
             "faz_id":array([1,2,3,4]),
             "large_area_id":array([1,2,3,3]),
             },
         'zones': {
             "zone_id":array([1,2,3,4,5]),
             "faz_id":array([1,2,2,3,4]),
             },
         'gridcells': {
             "grid_id":array([1,2,3,4,5,6,7,8,9]),
             "zone_id":array([1,1,1,2,2,3,3,4,5]),
             "total_land_value":array([10, 11, 12, 13, 14, 15, 16, 17, 18]),
             "is_in_plan_type_group_residential":array([1, 1, 1, 1, 0, 0, 1, 0, 1])    
         },
        'urbansim_constants':{
             'acres': array([1.5]),
         },
     }
     
     self.year = 1000
     SimulationState().set_current_time(self.year)
     exporter = ExportStorage()
     for table_name, table_data in tables_data.iteritems():
         storage.write_table(table_name=table_name, table_data=table_data)
         exporter.export_dataset(dataset_name=table_name, 
                                 in_storage=storage, 
                                 out_storage=AttributeCache(cache_directory=self.temp_dir))
    def setUp(self):
        self.temp_dir = tempfile.mkdtemp(prefix="opus_tmp_test_indicators")
        storage = StorageFactory().get_storage("dict_storage")

        tables_data = {
            "large_areas": {"large_area_id": array([1, 2, 3])},
            "fazes": {"faz_id": array([1, 2, 3, 4]), "large_area_id": array([1, 2, 3, 3])},
            "zones": {"zone_id": array([1, 2, 3, 4, 5]), "faz_id": array([1, 2, 2, 3, 4])},
            "gridcells": {
                "grid_id": array([1, 2, 3, 4, 5, 6, 7, 8, 9]),
                "zone_id": array([1, 1, 1, 2, 2, 3, 3, 4, 5]),
                "total_land_value": array([10, 11, 12, 13, 14, 15, 16, 17, 18]),
                "is_in_plan_type_group_residential": array([1, 1, 1, 1, 0, 0, 1, 0, 1]),
            },
            "urbansim_constants": {"acres": array([1.5])},
        }

        self.year = 1000
        SimulationState().set_current_time(self.year)
        exporter = ExportStorage()
        for table_name, table_data in tables_data.iteritems():
            storage.write_table(table_name=table_name, table_data=table_data)
            exporter.export_dataset(
                dataset_name=table_name, in_storage=storage, out_storage=AttributeCache(cache_directory=self.temp_dir)
            )
Exemple #3
0
def opusRun(progressCB,logCB,params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)
        
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    csv_data_path = params_dict['csv_data_path']
    table_name = params_dict['csv_table_name']
    
    input_storage = csv_storage(storage_location = csv_data_path)
    
    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        logCB('caching all tables...\n')
        lst = input_storage.get_table_names()
    else:
        lst = [table_name]
        
    for i in lst:
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
                   (i, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name = i,
            in_storage = input_storage,
            out_storage = output_storage)

    logCB("Successfully exported all tables.")
	def run (self):
		time = -1
		latest = ""
		directoryname = 'data/vibe_gridcell/runs/'
		if self.isParcel is True:
			directoryname = 'data/vibe_parcel/'
		for filename in os.listdir(os.path.join(os.environ['OPUS_HOME'], directoryname)):
			print filename
			if time == -1:
				time = os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename))
				latest = filename
			if os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename)) > time:
				time = os.path.getmtime(os.path.join(os.environ['OPUS_HOME'], directoryname, filename))
				latest = filename

		config = DatabaseServerConfiguration(host_name = 'localhost',
                                           user_name = 'urbansim',
					   password = '******',
                                           protocol = 'mysql')
		db_server = DatabaseServer(config)
		for i in range(1981, 1980+int(self.YearsToRun)):
			newdir = latest + '/' + str(i)
			flt_directory_in = os.path.join(os.environ['OPUS_HOME'], directoryname ,newdir)
			input_storage = flt_storage(storage_location = flt_directory_in)	
			db = db_server.get_database('ress_'+str(i))
			output_storage = StorageFactory().get_storage('sql_storage', storage_location = db)
			ExportStorage().export(in_storage=input_storage, out_storage=output_storage)
Exemple #5
0
 def export(self):
     ''' Run export process
     '''
     output_storage = csv_storage(storage_location = self.csv_data_path)
     
     ExportStorage().export_dataset(
             dataset_name = 'travel_data',
             in_storage = self.input_storage,
             out_storage = output_storage,
     )
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name)

    input_storage = sql_storage(storage_location=opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        logCB('caching all tables...\n')
        lst = input_storage.get_table_names()
        for i in lst:
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
    else:
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
              (table_name, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(dataset_name=table_name,
                                       in_storage=input_storage,
                                       out_storage=output_storage)
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    opus_data_directory = params_dict['opus_data_directory']
    opus_data_directory = paths.prepend_opus_home_if_relative(
        opus_data_directory)
    opus_data_year = params_dict['opus_data_year']
    database_name = params_dict['database_name']
    table_name = params_dict['table_name']
    database_server_connection = params_dict['database_server_connection']
    overwrite = params_dict['overwrite']

    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name,
                                 create_if_doesnt_exist=False)

    input_storage = sql_storage(storage_location=opusdb)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        lst = input_storage.get_table_names()
    else:
        lst = re.split(' +', table_name.strip())

    tables = len(lst)
    lst_out = create_list_string(lst, ', ')

    logCB('caching tables:\n%s\n' % lst_out)

    for j, i in enumerate(lst, start=1):
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
              (i, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name=i,
            in_storage=input_storage,
            out_storage=output_storage,
            overwrite=overwrite,
        )
        progressCB(100 * j / tables)

    logCB('successfully cached tables:\n%s\n' % lst_out)
def opusRun(progressCB,logCB,params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    # Output csv data path
    csv_data_path = params_dict['csv_data_path']
    # Data clasification - Database (must be specified)
    opus_data_directory = params_dict['opus_data_directory']
    # Data clasification - Dataset (explicit or ALL)
    opus_data_year = params_dict['opus_data_year']
    # Data clasification - Array (explicit or ALL)
    opus_table_name = params_dict['opus_table_name']

    execute_after_export = params_dict['execute_after_export']

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [int(year) for year in os.listdir(opus_data_directory) if year.isdigit() and len(year) == 4]

    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:

        input_storage = attribute_cache.get_flt_storage_for_year(year)
        
        output_storage = csv_storage(storage_location = csv_data_path)

        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())
        
        if opus_table_name != 'ALL':
            opus_table_name_list = [opus_table_name]
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i,year,opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name = i,
                in_storage = input_storage,
                out_storage = output_storage,
                )
            
        logCB("Successfully exported all datasets.")

    if execute_after_export:
        file_name_list = [output_storage._get_file_path_for_table(i)
                          for i in opus_table_name_list]
        subprocess.Popen([execute_after_export] + file_name_list)
Exemple #9
0
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    esri_data_path = params_dict['esri_data_path']
    esri_table_name = params_dict['esri_table_name']
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']

    input_storage = esri_storage(storage_location=esri_data_path)
    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)

    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if esri_table_name == 'ALL':
        logCB("Sending all tables to OPUS storage...\n")
        lst = input_storage.get_table_names()
        for i in lst:
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )

    else:
        logCB("Exporting table '%s' to OPUS storage located at %s...\n" %
              (esri_table_name, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name=esri_table_name,
            in_storage=input_storage,
            out_storage=output_storage,
        )
        logCB("Finished exporting table '%s'\n" % (esri_table_name))
def opusRun(progressCB, logCB, params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    database_name = params_dict['database_name']
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    opus_table_name = params_dict['opus_table_name']

    database_server_connection = params_dict['database_server_connection']
    dbs_config = DatabaseServerConfiguration(
        database_configuration=database_server_connection)
    server = DatabaseServer(database_server_configuration=dbs_config)
    opusdb = server.get_database(database_name=database_name)

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [
        int(year) for year in os.listdir(opus_data_directory)
        if year.isdigit() and len(year) == 4
    ]
    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:
        #input_storage = sql_storage(storage_location = opusdb)
        input_storage = attribute_cache.get_flt_storage_for_year(year)
        #output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
        if opus_data_year == 'ALL':
            opusdb = server.get_database(database_name=database_name + "_" +
                                         str(year))
        output_storage = sql_storage(storage_location=opusdb)
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())

        if opus_table_name != 'ALL':
            opus_table_name_list = re.split(' +', opus_table_name.strip())
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i, year, opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
def opusRun(progressCB, logCB, params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    # Output esri data path
    esri_data_path = params_dict['esri_data_path']
    # Data clasification - Database (must be specified)
    opus_data_directory = params_dict['opus_data_directory']
    # Data clasification - Dataset (explicit or ALL)
    opus_data_year = params_dict['opus_data_year']
    # Data clasification - Array (explicit or ALL)
    opus_table_name = params_dict['opus_table_name']

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [
        int(year) for year in os.listdir(opus_data_directory)
        if year.isdigit() and len(year) == 4
    ]

    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:

        input_storage = attribute_cache.get_flt_storage_for_year(year)

        if esri_is_avail:
            output_storage = esri_storage(storage_location=esri_data_path)
        else:
            output_storage = None
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())

        if opus_table_name != 'ALL':
            opus_table_name_list = [opus_table_name]
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i, year, opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
    attribute_cache_directory = options.attribute_cache_directory
    table_name = options.table_name
    cache_year = options.cache_year

    if (tab_directory is None or attribute_cache_directory is None
            or table_name is None or cache_year is None):

        parser.print_help()
        sys.exit(1)

    input_storage = tab_storage(storage_location=tab_directory)

    attribute_cache = AttributeCache(cache_directory=attribute_cache_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(cache_year)
    SimulationState().set_current_time(cache_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    logger.start_block(
        "Exporting table '%s' to year %s of cache located at %s..." %
        (table_name, cache_year, attribute_cache_directory))
    try:
        ExportStorage().export_dataset(
            dataset_name=table_name,
            in_storage=input_storage,
            out_storage=output_storage,
        )
    finally:
        logger.end_block()
 def _get_exporter(self):
     """hook for unit tests"""
     if self._exporter is None:
         # Create default exporter object.
         self._exporter = ExportStorage()
     return self._exporter
    parser = OptionParser()
    
    parser.add_option('-c', '--cache_path', dest='cache_path', type='string', 
        help='The filesystem path to the cache to export (required)')
    parser.add_option('-o', '--output_directory', dest='output_directory', 
        type='string', help='The filesystem path of the database to which '
            'output will be written (required)')
    parser.add_option('-t', '--table_name', dest='table_name', 
        type='string', help='Name of table to be exported (optional). Used if only one table should be exported.')

    (options, args) = parser.parse_args()
    
    cache_path = options.cache_path
    output_directory = options.output_directory
    table_name = options.table_name
    
    if None in (cache_path, output_directory):
        parser.print_help()
        sys.exit(1)

    in_storage = flt_storage(storage_location = cache_path)

    out_storage = tab_storage(storage_location = output_directory)
    
    if not os.path.exists(output_directory):
        os.makedirs(output_directory)

    if table_name is not None:
        ExportStorage().export_dataset(table_name, in_storage=in_storage, out_storage=out_storage)
    else:
        ExportStorage().export(in_storage=in_storage, out_storage=out_storage)
Exemple #15
0
    )
    parser.add_option(
        '--compression',
        dest='compression',
        type='string',
        default=None,
        help=
        'Compression type. Available: gzip, lzf. Default is no compression.')
    (options, args) = parser.parse_args()

    cache_path = options.cache_path
    output_file = options.output_file
    table_name = options.table_name

    if None in (cache_path, output_file):
        parser.print_help()
        sys.exit(1)

    in_storage = flt_storage(storage_location=cache_path)

    out_storage = hdf5g_storage(storage_location=output_file)

    if table_name is not None:
        ExportStorage().export_dataset(table_name,
                                       in_storage=in_storage,
                                       out_storage=out_storage,
                                       compression=options.compression)
    else:
        ExportStorage().export(in_storage=in_storage,
                               out_storage=out_storage,
                               compression=options.compression)