def get_travel_data_from_travel_model(self, config, year, zone_set): """ """ logger.log_status('Starting GetMatsimDataIntoCache.get_travel_data...') # print >> sys.stderr, "MATSim replaces only _some_ of the columns of travel_data. Yet, Urbansim does not truly merge them" # print >> sys.stderr, " but simply overwrites the columns, without looking for a different sequence of from_zone_id, to_zone_id" # solved 3dec08 by hana input_directory = os.path.join(os.environ['OPUS_HOME'], "opus_matsim", "tmp") logger.log_status("input_directory: " + input_directory) in_storage = csv_storage(storage_location=input_directory) table_name = "travel_data" travel_data_set = TravelDataDataset(in_storage=in_storage, in_table_name=table_name) cache_storage = AttributeCache().get_flt_storage_for_year(year) existing_travel_data_set = TravelDataDataset(in_storage=cache_storage, in_table_name=table_name) ##TODO:This may not work or may be wrong after the id_names of travel_data ##changed from ['from_zone_id', 'to_zone_id'] to _hidden_id (lmwang) existing_travel_data_set.join( travel_data_set, travel_data_set.get_non_id_primary_attribute_names(), metadata=AttributeType.PRIMARY) return existing_travel_data_set
def get_travel_data_from_travel_model(self, config, year, zone_set): """ """ logger.log_status('Starting GetMatsimDataIntoCache.get_travel_data...') input_directory = os.path.join(config['root'], 'opus_matsim', 'tmp') logger.log_status("input_directory: " + input_directory) in_storage = csv_storage(storage_location=input_directory) table_name = "travel_data" travel_data_set = TravelDataDataset(in_storage=in_storage, in_table_name=table_name) cache_storage = AttributeCache().get_flt_storage_for_year(year) existing_travel_data_set = TravelDataDataset(in_storage=cache_storage, in_table_name=table_name) ##TODO:This may not work or may be wrong after the id_names of travel_data ##changed from ['from_zone_id', 'to_zone_id'] to _hidden_id (lmwang) existing_travel_data_set.join( travel_data_set, travel_data_set.get_non_id_primary_attribute_names(), metadata=AttributeType.PRIMARY) return existing_travel_data_set
def get_travel_data_from_travel_model(self, config, year, zone_set): """ Reads the output from the travel model and imports the fresh computed travel data attributes into the cache. """ logger.log_status('Starting GetMatsimDataIntoCache.get_travel_data...') #try: # tnicolai :for debugging # import pydevd # pydevd.settrace() #except: pass self.init(year, config); # tnicolai: experimental -> import workplace accessibility from matsim #self.get_workplace_accessibility_into_cache(year) # import travel data from matsim travel_data_set = TravelDataDataset( in_storage=self.in_storage, in_table_name=self.travel_data_table_name ) # load actual travel data set from cache existing_travel_data_set = TravelDataDataset( in_storage=self.cache_storage, in_table_name=self.travel_data_table_name ) # join remaining data set with imported travel data existing_travel_data_set.join(travel_data_set, travel_data_set.get_non_id_primary_attribute_names(),metadata=AttributeType.PRIMARY) # return new travel data set return existing_travel_data_set
def get_travel_data_from_travel_model(self, config, year, zone_set): """ Integrates modified travel times and pre-computed travel costs into the UrbanSim cache. """ logger.log_status('Starting GetTestTravelDataIntoCache.get_travel_data...') # get sensitivity test path asan anchor to determine the location of the MATSim travel_data file (see below). test_dir_path = test_dir.__path__[0] # for debugging try: #tnicolai import pydevd pydevd.settrace() except: pass # get the exsisting travel data from the current year logger.log_status('Loading travel data from UrbanSim cache (year:%i)' % year) table_name = "travel_data" cache_storage = AttributeCache().get_flt_storage_for_year(year) existing_travel_data_set = TravelDataDataset( in_storage=cache_storage, in_table_name=table_name ) ###### modifyed travel time travel data logger.log_status('Integrating modifyed travel times in year %i for next simulation year.') input_directory = os.path.join( os.environ['OPUS_HOME'], "opus_matsim", "tmp" ) logger.log_status("input_directory: " + input_directory ) # location of the modified travel time travel_data in_storage = csv_storage(storage_location = input_directory) # create travel data set (travel times) travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name ) # join the modifyed travel times with the travel data set of the current year existing_travel_data_set.join(travel_data_set, travel_data_set.get_non_id_primary_attribute_names(),metadata=AttributeType.PRIMARY) ##### pre-calcualted MATSim travel data (travel costs) # logger.log_status('Integrating pre-calculated travel costs (MATSim) in year %i for next simulation year.') # input_directory = os.path.join( test_dir_path, 'data', 'travel_cost') # logger.log_status("input_directory: " + input_directory ) # # check source file # if not os.path.exists( input_directory ): # print 'File not found! %s' % input_directory # sys.exit() # location of pre-calculated MATSim travel costs # in_storage = csv_storage(storage_location = input_directory) # create travel data set (travel costs) # travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name ) # join travel data set from pre-calcualted MATSim results # existing_travel_data_set.join(travel_data_set, travel_data_set.get_non_id_primary_attribute_names(),metadata=AttributeType.PRIMARY) return existing_travel_data_set
def get_travel_data_from_travel_model(self, config, year, zone_set): """ Reads the output from the travel model and imports the fresh computed travel data into the cache. """ logger.log_status('Starting GetMatsimDataIntoCache.get_travel_data...') # print >> sys.stderr, "MATSim replaces only _some_ of the columns of travel_data. Yet, Urbansim does not truly merge them" # print >> sys.stderr, " but simply overwrites the columns, without looking for a different sequence of from_zone_id, to_zone_id" # solved 3dec08 by hana #try: # tnicolai :for debugging # import pydevd # pydevd.settrace() #except: pass self.init(year, config); # import parcel-based accessibilities from MATSim into parcel table if( self.__get_value_as_boolean('cell_based_accessibility', self.matsim_controler) ): self.get_parcel_based_accessibility_into_cache(year) # import zone-based accessibilities from MATSim into zones table if( self.__get_value_as_boolean('zone_based_accessibility', self.matsim_controler) ): self.get_zone_based_accessibility_into_cache(year) # import agent performances from MATSim into persons table if( self.__get_value_as_boolean( 'agent_performance', self.matsim_controler ) ): self.get_agent_performance_into_cache(year) # delete travel data attributes in UrbanSim data store (see list above: "self.delete_travel_data_columns") -> (may causes errors in some models) # self.clear_cache_travel_data(year) # load current travel data set from cache existing_travel_data_set = TravelDataDataset( in_storage=self.cache_storage, in_table_name=self.travel_data_table_name ) # import zone2zone impedances from MATSim into travel_data table if(self.__get_value_as_boolean('zone2zone_impedance', self.matsim_controler)): logger.log_status('Importing zone to zone impedances from MATSim ...') travel_data_set = TravelDataDataset( in_storage=self.in_storage, in_table_name=self.travel_data_table_name ) # tnicolai : Replace travel data table # Case 1) Delete all 'columns' but the 'columns' passing to urbansim first. then no join operation is needed # Case 2) Join the data sets and delete unneeded 'columns' here ##TODO:This may not work or may be wrong after the id_names of travel_data ##changed from ['from_zone_id', 'to_zone_id'] to _hidden_id (lmwang) # join current data set with imported matsim travel data existing_travel_data_set.join(travel_data_set, travel_data_set.get_non_id_primary_attribute_names(),metadata=AttributeType.PRIMARY) logger.log_status('Finished join operation for zone to zone impedances (travel_data_set).') # return new travel data set return existing_travel_data_set
def get_travel_data_from_travel_model(self, config, year, zone_set): """ """ logger.log_status('Starting GetMatsimDataIntoCache.get_travel_data...') input_directory = os.path.join( config['root'], 'opus_matsim', 'tmp' ) logger.log_status("input_directory: " + input_directory ) in_storage = csv_storage(storage_location = input_directory) table_name = "travel_data" travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name ) cache_storage = AttributeCache().get_flt_storage_for_year(year) existing_travel_data_set = TravelDataDataset( in_storage=cache_storage, in_table_name=table_name ) ##TODO:This may not work or may be wrong after the id_names of travel_data ##changed from ['from_zone_id', 'to_zone_id'] to _hidden_id (lmwang) existing_travel_data_set.join(travel_data_set, travel_data_set.get_non_id_primary_attribute_names(),metadata=AttributeType.PRIMARY) return existing_travel_data_set
def get_travel_data_from_travel_model(self, config, year, zone_set): """ """ logger.log_status('Starting GetMatsimDataIntoCache.get_travel_data...') # print >> sys.stderr, "MATSim replaces only _some_ of the columns of travel_data. Yet, Urbansim does not truly merge them" # print >> sys.stderr, " but simply overwrites the columns, without looking for a different sequence of from_zone_id, to_zone_id" # solved 3dec08 by hana input_directory = os.path.join( os.environ['OPUS_HOME'], "opus_matsim", "tmp" ) logger.log_status("input_directory: " + input_directory ) in_storage = csv_storage(storage_location = input_directory) table_name = "travel_data" travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name ) cache_storage = AttributeCache().get_flt_storage_for_year(year) existing_travel_data_set = TravelDataDataset( in_storage=cache_storage, in_table_name=table_name ) ##TODO:This may not work or may be wrong after the id_names of travel_data ##changed from ['from_zone_id', 'to_zone_id'] to _hidden_id (lmwang) existing_travel_data_set.join(travel_data_set, travel_data_set.get_non_id_primary_attribute_names(),metadata=AttributeType.PRIMARY) return existing_travel_data_set
def get_travel_data_from_travel_model(self, config, year, zone_set): """ """ logger.log_status('Starting GetMatsimDataIntoCache.get_travel_data...') # print >> sys.stderr, "MATSim replaces only _some_ of the columns of travel_data. Yet, Urbansim does not truly merge them" # print >> sys.stderr, " but simply overwrites the columns, without looking for a different sequence of from_zone_id, to_zone_id" # solved 3dec08 by hana # tnicolai :for debugging #try: # import pydevd # pydevd.settrace() #except: pass self.init(year, config); # import workplace accessibility from matsim self.get_zone_based_accessibility_into_cache(year) # import travel data from matsim travel_data_set = TravelDataDataset( in_storage=self.in_storage, in_table_name=self.travel_data_table_name ) # tnicolai : Replace travel data table # Case 1) Delete all 'columns' but the 'columns' passing to urbansim first. then no join operation is needed # Case 2) Join the data sets and delete unneeded 'columns' here # delete travel data attributes in UrbanSim data store (see list above: "self.delete_travel_data_columns") -> (tnicolai: may causes errors in some models) #self.clear_cache_travel_data(year) # load actual travel data set from cache existing_travel_data_set = TravelDataDataset( in_storage=self.cache_storage, in_table_name=self.travel_data_table_name ) ##TODO:This may not work or may be wrong after the id_names of travel_data ##changed from ['from_zone_id', 'to_zone_id'] to _hidden_id (lmwang) # join remaining data set with imported travel data existing_travel_data_set.join(travel_data_set, travel_data_set.get_non_id_primary_attribute_names(),metadata=AttributeType.PRIMARY) # return new travel data set return existing_travel_data_set
def get_travel_data_from_travel_model(self, config, year, zone_set): """ Integrates modified travel times and pre-computed travel costs into the UrbanSim cache. """ logger.log_status( 'Starting GetTestTravelDataIntoCache.get_travel_data...') # get sensitivity test path asan anchor to determine the location of the MATSim travel_data file (see below). test_dir_path = test_dir.__path__[0] # for debugging try: #tnicolai import pydevd pydevd.settrace() except: pass # get the exsisting travel data from the current year logger.log_status('Loading travel data from UrbanSim cache (year:%i)' % year) table_name = "travel_data" cache_storage = AttributeCache().get_flt_storage_for_year(year) existing_travel_data_set = TravelDataDataset(in_storage=cache_storage, in_table_name=table_name) ###### modifyed travel time travel data logger.log_status( 'Integrating modifyed travel times in year %i for next simulation year.' ) input_directory = os.path.join(os.environ['OPUS_HOME'], "opus_matsim", "tmp") logger.log_status("input_directory: " + input_directory) # location of the modified travel time travel_data in_storage = csv_storage(storage_location=input_directory) # create travel data set (travel times) travel_data_set = TravelDataDataset(in_storage=in_storage, in_table_name=table_name) # join the modifyed travel times with the travel data set of the current year existing_travel_data_set.join( travel_data_set, travel_data_set.get_non_id_primary_attribute_names(), metadata=AttributeType.PRIMARY) ##### pre-calcualted MATSim travel data (travel costs) # logger.log_status('Integrating pre-calculated travel costs (MATSim) in year %i for next simulation year.') # input_directory = os.path.join( test_dir_path, 'data', 'travel_cost') # logger.log_status("input_directory: " + input_directory ) # # check source file # if not os.path.exists( input_directory ): # print 'File not found! %s' % input_directory # sys.exit() # location of pre-calculated MATSim travel costs # in_storage = csv_storage(storage_location = input_directory) # create travel data set (travel costs) # travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name ) # join travel data set from pre-calcualted MATSim results # existing_travel_data_set.join(travel_data_set, travel_data_set.get_non_id_primary_attribute_names(),metadata=AttributeType.PRIMARY) return existing_travel_data_set