def setUp(self): self.temp_dir = mkdtemp(prefix='psrc_parcel_test_emme_skims') self.bank_file = os.path.join(self.temp_dir, '2015-travelmodel.h5') self.bank_storage = hdf5g_storage(storage_location=self.bank_file) data = { 'au1tim': array([[2, 5, 7], [1.3,7.9,3], [6, 10, 0]]), 'biketm': array([[10, 50, 65], [13, 10.9, 40], [56, 100, 21]]) } self.bank_storage.write_table(table_name = 'Bank1', table_data = data) zone_storage = StorageFactory().get_storage('dict_storage') zone_table_name = 'zone' zone_storage.write_table( table_name=zone_table_name, table_data={ 'zone_id': array([1,2,3]), 'travel_time_to_airport': ones((3,)), 'travel_time_to_cbd': ones((3,)) }, ) self.zone_set = ZoneDataset(in_storage=zone_storage, in_table_name=zone_table_name) travel_data_storage = StorageFactory().get_storage('dict_storage') travel_data_table_name = 'travel_data' travel_data_storage.write_table( table_name=travel_data_table_name, table_data={ 'from_zone_id':array([1,1,1,2,2,2,3,3,3]), 'to_zone_id':array([ 1,2,3,3,2,1,1,2,3]), }, ) self.travel_data_set = TravelDataDataset(in_storage=travel_data_storage, in_table_name=travel_data_table_name)
if __name__ == '__main__': parser = OptionParser(description='Converts tables in Opus cache directory into one hdf5 file. Each table is stored as a separate group in the file.') parser.add_option('-c', '--cache_path', dest='cache_path', type='string', help='The filesystem path to the cache to export (required)') parser.add_option('-o', '--output_file', dest='output_file', type='string', help='The full file name to which output will be written (required)') parser.add_option('-t', '--table_name', dest='table_name', type='string', help='Name of table to be exported (optional). If not used, all tables are exported.') parser.add_option('--compression', dest='compression', type='string', default=None, help='Compression type. Available: gzip, lzf. Default is no compression.') (options, args) = parser.parse_args() cache_path = options.cache_path output_file = options.output_file table_name = options.table_name if None in (cache_path, output_file): parser.print_help() sys.exit(1) in_storage = flt_storage(storage_location = cache_path) out_storage = hdf5g_storage(storage_location = output_file) if table_name is not None: ExportStorage().export_dataset(table_name, in_storage=in_storage, out_storage=out_storage, compression=options.compression) else: ExportStorage().export(in_storage=in_storage, out_storage=out_storage, compression=options.compression)
) parser.add_option( '--compression', dest='compression', type='string', default=None, help= 'Compression type. Available: gzip, lzf. Default is no compression.') (options, args) = parser.parse_args() cache_path = options.cache_path output_file = options.output_file table_name = options.table_name if None in (cache_path, output_file): parser.print_help() sys.exit(1) in_storage = flt_storage(storage_location=cache_path) out_storage = hdf5g_storage(storage_location=output_file) if table_name is not None: ExportStorage().export_dataset(table_name, in_storage=in_storage, out_storage=out_storage, compression=options.compression) else: ExportStorage().export(in_storage=in_storage, out_storage=out_storage, compression=options.compression)
help="The attribute cache year into which to write the output (required).") (options, args) = parser.parse_args() file_name = options.hdf5_file attribute_cache_directory = options.attribute_cache_directory cache_year = options.cache_year if (file_name is None or attribute_cache_directory is None or cache_year is None): parser.print_help() sys.exit(1) input_storage = hdf5g_storage(storage_location = file_name) attribute_cache = AttributeCache(cache_directory=attribute_cache_directory) output_storage = attribute_cache.get_flt_storage_for_year(cache_year) SimulationState().set_current_time(cache_year) SessionConfiguration(new_instance=True, package_order=[], in_storage=AttributeCache()) for table_name in input_storage.get_table_names(): logger.start_block("Exporting table '%s' to year %s of cache located at %s..." % (table_name, cache_year, attribute_cache_directory)) try: ExportStorage().export_dataset( dataset_name = table_name, in_storage = input_storage,
help= "The attribute cache year into which to write the output (required).") (options, args) = parser.parse_args() file_name = options.hdf5_file attribute_cache_directory = options.attribute_cache_directory cache_year = options.cache_year if (file_name is None or attribute_cache_directory is None or cache_year is None): parser.print_help() sys.exit(1) input_storage = hdf5g_storage(storage_location=file_name) attribute_cache = AttributeCache(cache_directory=attribute_cache_directory) output_storage = attribute_cache.get_flt_storage_for_year(cache_year) SimulationState().set_current_time(cache_year) SessionConfiguration(new_instance=True, package_order=[], in_storage=AttributeCache()) for table_name in input_storage.get_table_names(): logger.start_block( "Exporting table '%s' to year %s of cache located at %s..." % (table_name, cache_year, attribute_cache_directory)) try: ExportStorage().export_dataset( dataset_name=table_name,