Пример #1
0
 def run(self, config, year):
     """
     """
     logger.start_block("Starting RunDummyTravelModel.run(...)")
     
     print >> sys.stderr, "\nThis should also check if get_cache_data_into_matsim did something reasonable"
     
     #try: # tnicolai :for debugging
     #    import pydevd
     #    pydevd.settrace()
     #except: pass
     
     self.setUp( config )
     
     # config_obj = MATSimConfigObject(config, year) #self.matsim_config_full
     # config_obj.marschall()        
     
     cmd = """cd %(opus_home)s/opus_matsim ; java %(vmargs)s -cp %(classpath)s %(javaclass)s %(matsim_config_file)s %(test_parameter)s""" % {
             'opus_home': paths.get_opus_home_path(),
             'vmargs': "-Xmx2000m", # set to 8GB on math cluster and 2GB on Notebook
             'classpath': "jar/matsim4urbansim.jar",
             'javaclass': "playground.run.Matsim4Urbansim", # "playground.tnicolai.urbansim.cupum.MATSim4UrbansimCUPUM",
             'matsim_config_file': self.matsim_config_full,
             'test_parameter': self.test_parameter } 
     
     logger.log_status('would normally run command %s' % cmd )
     
     travel_data_location = os.path.join( opus_matsim.__path__[0], 'tests', 'testdata', 'travel_data.csv' )
     print 'MATSim travel_data.csv stored at %s' %travel_data_location
Пример #2
0
    def run(self, config, year):
        """ This class simulates a MATSim run. Therefore it copies 
            real travel data into the OPUS_HOME and modifies the 
            entries in the following runs.
        """        
        logger.start_block("Starting RunDummyTravelTimeTravelModel.run(...)")

        self.config = config
        # get travel model parameter from the opus dictionary
        self.travel_model_configuration = config['travel_model_configuration']
        
        self.first_year = 2001 # TODO make configurable (bayeyear + 1)
        
        # set output directory for travel data
        self.travel_data_dir = paths.get_opus_home_path( "opus_matsim", "tmp" )

        # for debugging
        #try: #tnicolai
        #    import pydevd
        #    pydevd.settrace()
        #except: pass

        # set travel data for test simulation
        if year == self.first_year:
            logger.log_status('Exporting travel_data from base_year_cache to %s' % self.travel_data_dir)
            self.export_travel_data(None)
            logger.log_status("Modifying travel data.")
            self.modify_travel_data()   
            logger.log_status("Finished modifying...")  
        else:
            logger.log_status("Travel data was modified before. Nothing to do...")

        logger.end_block()
Пример #3
0
 def init(self, year, config):
     self.input_directory =  paths.get_opus_home_path( "opus_matsim", "tmp" )
     logger.log_status("input_directory: " + self.input_directory )
     self.in_storage = csv_storage(storage_location = self.input_directory)
     self.cache_storage = AttributeCache().get_flt_storage_for_year(year)
     self.cache_directory = config['cache_directory']
     
     self.delete_travel_data_columns = ['am_bike_to_work_travel_time', 
                                   'am_biking_person_trips',
                                   #'am_pk_period_drive_alone_vehicle_trips',
                                   'am_total_transit_time_walk',
                                   'am_transit_person_trip_table',
                                   #'am_walk_time_in_minutes',
                                   'am_walking_person_trips',
                                   'am_double_vehicle_to_work_travel_time',
                                   'am_threeplus_vehicle_to_work_travel_time',
                                   'logsum_hbw_am_income_1',
                                   'logsum_hbw_am_income_2',
                                   'logsum_hbw_am_income_3',
                                   'logsum_hbw_am_income_4',
                                   'md_vehicle_miles_traveled',
                                   'nweubk',
                                   'nweuda',
                                   'nweus2',
                                   'nweus3',
                                   'nweutw',
                                   'nweuwk',
                                   'pm_ev_ni_vehicle_miles_traveled',
                                   'single_vehicle_to_work_travel_distance']
     
     self.travel_data_table_name = "travel_data"
     self.zone_table_name = "zones"
Пример #4
0
 def copy_pre_calculated_MATSim_travel_costs(self):
     ''' Copies pre-calculated MATSim travel costs into the 
         OPUS HOME tmp directory.
     '''
     # get sensitivity test path as an anchor to determine the location of the MATSim travel_data file
     test_dir_path = test_path.__path__[0]
     
     # set source location
     travel_data_source = os.path.join( test_dir_path, 'data', 'travel_cost', "travel_data.csv" )
     if not self.travel_data_exsists( travel_data_source ):
         print 'Pre-computed MATSim travel data not fould! %s' % travel_data_source
         sys.exit()
         
     # set destination location
     destination_dir = paths.get_opus_home_path("opus_matsim", "tmp")
     if not os.path.exists(destination_dir):
         try: os.mkdir(destination_dir)
         except: pass
     travel_data_destination = os.path.join( destination_dir, "travel_data.csv" )
     
     logger.log_status("Copying pre-calculated MATSim travel data:")
     logger.log_status("Source: %s" % travel_data_source)
     logger.log_status("Destination %s:" % travel_data_destination)
     
     # copy travel data
     shutil.copy (travel_data_source, travel_data_destination)
     if os.path.isfile (travel_data_destination): 
         logger.log_status("Copying successful ...")
     else: 
         raise StandardError("Test travel data not copied!")
         sys.exit()
Пример #5
0
    def __init__(self):
        '''
        Constructor
        '''
        logger.log_status('Start init ...')

        if paths.get_opus_home_path == None or paths.get_opus_home_path() == "":
            logger.log_error('OSPUS_HOME variable not found. Please define OPUS_HOME in your environment variables.')
            logger.log_error('Aborting MATSim4UrbanSim installation!')
            exit()
        
        self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp')
        self.target_path = paths.get_opus_home_path( matsim4opus, 'jar')
        self.source_url = 'http://matsim.org/files/builds/'
        self.html_finder = FindLinks()
        
        logger.log_status('... init done!')
    def on_pbn_set_run_directory_released(self):
        start_dir = paths.get_opus_home_path('runs', os.environ['OPUSPROJECTNAME'])

        fd = QFileDialog.getExistingDirectory(self,
                    QString("Please select a run directory..."), #, *.sde, *.mdb)..."),
                    QString(start_dir), QFileDialog.ShowDirsOnly)
        if len(fd) != 0:
            fileName = QString(fd)
            self.lePath.setText(fileName)
Пример #7
0
    def get_travel_data_from_travel_model(self, config, year, zone_set):
        """ Integrates modified travel times and pre-computed travel costs
            into the UrbanSim cache.
        """
        
        logger.log_status('Starting GetTestTravelDataIntoCache.get_travel_data...')
        
        # get sensitivity test path asan anchor to determine the location of the MATSim travel_data file (see below).
        test_dir_path = test_dir.__path__[0]
        
        # for debugging
        #try: #tnicolai
        #    import pydevd
        #    pydevd.settrace()
        #except: pass

        # get the exsisting travel data from the current year
        logger.log_status('Loading travel data from UrbanSim cache (year:%i)' % year)
        table_name = "travel_data"
        cache_storage = AttributeCache().get_flt_storage_for_year(year)
        existing_travel_data_set = TravelDataDataset( in_storage=cache_storage, in_table_name=table_name )


        ###### modifyed travel time travel data
        logger.log_status('Integrating modifyed travel times in year %i for next simulation year.')
        input_directory = paths.get_opus_home_path('opus_matsim', 'tmp')
        logger.log_status("input_directory: " + input_directory )
        # location of the modified travel time travel_data
        in_storage = csv_storage(storage_location = input_directory)
        # create travel data set (travel times)
        travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name )

        # join the modifyed travel times with the travel data set of the current year
        existing_travel_data_set.join(travel_data_set, travel_data_set.get_non_id_primary_attribute_names(),metadata=AttributeType.PRIMARY)


        ##### pre-calcualted MATSim travel data (travel costs)
#        logger.log_status('Integrating pre-calculated travel costs (MATSim) in year %i for next simulation year.')
#        input_directory = os.path.join( test_dir_path, 'data', 'travel_cost')
#        logger.log_status("input_directory: " + input_directory )
#        # check source file
#        if not os.path.exists( input_directory ):
#            print 'File not found! %s' % input_directory
#            sys.exit()
        # location of pre-calculated MATSim travel costs
#        in_storage = csv_storage(storage_location = input_directory)
        # create travel data set (travel costs)
#        travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name )

        # join travel data set from pre-calcualted MATSim results
#        existing_travel_data_set.join(travel_data_set, travel_data_set.get_non_id_primary_attribute_names(),metadata=AttributeType.PRIMARY)

        
        return existing_travel_data_set
Пример #8
0
 def __get_plans_file(self, common_matsim_part, entry):
     try:    # checks if sub config for matsim input plans file exists
         self.sub_config_exists = ( common_matsim_part[entry] != None)
     except: return ""
     if self.sub_config_exists:
         self.check_abolute_path( common_matsim_part[entry]  )    
         logger.log_note('Input plans file found (MATSim warm start enabled).') 
         return paths.get_opus_home_path( common_matsim_part[entry]  )
     else: 
         logger.log_note('No input plans file set in the "travel_model_configuration" of your current configuration file (MATSim warm start disabled).')
         return ""
Пример #9
0
 def __get_plans_file(self, common_matsim_part, entry):
     try:
         self.sub_config_exists = ( common_matsim_part[entry] != None)
     except:
         logger.log_note('No input plans file in "travel_model_configuration" section found (i.e. MATSim warm/hot start is not active).') 
         return ""
     if self.sub_config_exists:
         self.check_abolute_path( common_matsim_part[entry]  )    
         logger.log_note('Input plans file found (MATSim warm start enabled).') 
         return paths.get_opus_home_path( common_matsim_part[entry]  )
     else:
         return ""
Пример #10
0
 def __init__(self):
     Baseline.__init__(self)
     self['config_changes_for_estimation'] = ConfigChangesForEstimation()
     self['cache_directory'] = paths.get_opus_home_path('data/eugene_zone/base_year_data')
     self['scenario_database_configuration'] = ScenarioDatabaseConfiguration(database_name = 'eugene_1980_baseyear_zone')
     self['estimation_database_configuration'] = EstimationDatabaseConfiguration(database_name = 'eugene_1980_baseyear_zone')
 
     self['datasets_to_cache_after_each_model' ] = []
     self['low_memory_mode'] = False
     self['base_year'] = 1980
     self['years'] = (1980,1980)
     self['seed'] = 1
Пример #11
0
 def dump_travel_list(self, travel_list):
     ''' Dumping travel_list for debugging reasons...
     '''
     
     dest = paths.get_opus_home_path( 'opus_matsim', 'tmp')
     if not os.path.exists(dest):
         os.makedirs(dest)
             
     travel = os.path.join(dest, 'travelFile.txt')
     f = open(travel, "w")
     f.write( str(travel_list) )
     f.close()
Пример #12
0
    def on_pbn_set_run_directory_released(self):
        start_dir = paths.get_opus_home_path('runs',
                                             os.environ['OPUSPROJECTNAME'])

        fd = QFileDialog.getExistingDirectory(
            self,
            QString(
                "Please select a run directory..."),  #, *.sde, *.mdb)..."),
            QString(start_dir),
            QFileDialog.ShowDirsOnly)
        if len(fd) != 0:
            fileName = QString(fd)
            self.lePath.setText(fileName)
Пример #13
0
    def run(self, config, year):
        """Running MATSim.  A lot of paths are relative; the base path is ${OPUS_HOME}/opus_matsim.  As long as ${OPUS_HOME}
        is correctly set and the matsim tar-file was unpacked in OPUS_HOME, this should work out of the box.  There may eventually
        be problems with the java version.
        """
        
        logger.start_block("Starting RunTravelModel.run(...)")

        config_obj = MATSimConfigObject(config, year)
        self.matsim_config_full = config_obj.marschall()
        
        # check for test parameter
        tmc = config['travel_model_configuration']
        if tmc['matsim4urbansim'].get('test_parameter') != None:
            self.test_parameter = tmc['matsim4urbansim'].get('test_parameter')
        # change to directory opus_matsim
        os.chdir( paths.get_opus_home_path(matsim4opus) )
        
        # int cmd
        cmd = ""
        # calling travel model with cmd command
        if sys.platform.lower() == 'win32': 
            # reserve memory for java
            xmx = '-Xmx1500m' # Windows can't reserve more than 1500m
            cmd = """java %(vmargs)s -cp %(classpath)s %(javaclass)s %(matsim_config_file)s %(test_parameter)s""" % {
                'vmargs': xmx, 
                'classpath': "jar/matsim.jar;jar/contrib/matsim4urbansim.jar",
                'javaclass': "org.matsim.contrib.matsim4opus.matsim4urbansim.MATSim4UrbanSim",
                'matsim_config_file': self.matsim_config_full,
                'test_parameter': self.test_parameter } 
        else:
            # reserve memory for java
            xmx = '-Xmx2000m'
            cmd = """java %(vmargs)s -cp %(classpath)s %(javaclass)s %(matsim_config_file)s %(test_parameter)s""" % {
                'vmargs': xmx, 
                'classpath': "jar/matsim.jar:jar/contrib/matsim4urbansim.jar",
                'javaclass': "org.matsim.contrib.matsim4opus.matsim4urbansim.MATSim4UrbanSim",
                'matsim_config_file': self.matsim_config_full,
                'test_parameter': self.test_parameter } 
        
        logger.log_status('Running command %s' % cmd ) 
        
        cmd_result = os.system(cmd)
        if cmd_result != 0:
            error_msg = "MATSim Run failed. Code returned by cmd was %d" % (cmd_result)
            logger.log_error(error_msg)
            logger.log_error("Note that currently (dec/08), paths in the matsim config files are relative to the matsim4opus root,")
            logger.log_error("which is one level 'down' from OPUS_HOME.")
            raise StandardError(error_msg)        
        
        logger.end_block()
Пример #14
0
    def test_run(self):
        print "Entering test run"
        
        path = paths.get_opus_home_path('opus_matsim', 'tmp')
        # check if travel data exsits
        travel_data = os.path.join( path, "travel_data.csv" )
        if not os.path.exists(travel_data):
            print "Travel Data not found!!!"
            sys.exit()
        
        in_storage = csv_storage(storage_location = path)
        table_name = "travel_data"
        travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name )
        
        origin_zones = travel_data_set.get_attribute_as_column(self.origin_zone_id)
        l = numpy.atleast_1d(origin_zones).tolist()
        origin_list = set(l) # removes duplicates and sorts the list in ascending order
        # destination_list = len(origin_list) * self.cbd # creates a list that contains the zone id of the cbd an has the same length as "origin_list"

        # set high travel costs for all origin to cbd pairs
        for id in origin_list:
            travel_data_set.set_values_of_one_attribute_with_od_pairs(self.travel_data_attribute, self.high_travel_cost, id, self.cbd)
        # adjust cbd to cbd
        travel_data_set.set_values_of_one_attribute_with_od_pairs(self.travel_data_attribute, 0.0, self.cbd, self.cbd)
        # adjust prefered zone to cbd
        travel_data_set.set_values_of_one_attribute_with_od_pairs(self.travel_data_attribute, self.low_travel_cost, self.preferential_zone, self.cbd)
        
        w = travel_data_set.get_index_by_origin_and_destination_ids(110, 129)
        x = travel_data_set.get_index_by_origin_and_destination_ids(129, 129)
        y = travel_data_set.get_index_by_origin_and_destination_ids(20, 129)
        z = travel_data_set.get_index_by_origin_and_destination_ids(20, 20)
        
        print w
        print x
        print y
        print z 
        
        origin_zones = travel_data_set.get_attribute_as_column(self.origin_zone_id)
        destination_zones = travel_data_set.get_attribute_as_column(self.destination_zone_id)
        
        my_travel_data_attr_mat = travel_data_set.get_attribute_as_matrix('travel_data.single_vehicle_to_work_travel_cost', 
                                                                   fill=999)
        my_travel_data_attr_mat[origin_zones, destination_zones] = 1.03
        
        
        
        cbd_ids = where(origin_zones == 129)
        

        print "Leaving test run"
Пример #15
0
    def __init__(self):
        Baseline.__init__(self)
        self["config_changes_for_estimation"] = ConfigChangesForEstimation()
        self["cache_directory"] = paths.get_opus_home_path("data/eugene_gridcell/base_year_data")
        self["scenario_database_configuration"] = ScenarioDatabaseConfiguration(database_name="eugene_1980_baseyear")
        self["estimation_database_configuration"] = EstimationDatabaseConfiguration(
            database_name="eugene_1980_baseyear_estimation_xxx"
        )

        self["datasets_to_cache_after_each_model"] = []
        self["low_memory_mode"] = False
        self["base_year"] = 1980
        self["years"] = (1980, 1980)
        self["seed"] = 10
Пример #16
0
 def __init__(self, server_config):
     AbstractDatabaseEngineManager.__init__(self, server_config)
     sqlite_db_path = server_config.sqlite_db_path
     if sqlite_db_path is None:
         self.server_path = paths.get_opus_home_path('local_databases')
     else:
         self.server_path = sqlite_db_path
         
     self.schema_path = os.path.join(self.server_path, self._get_default_database())
     self.os = None
     try:
         self.os = os.uname[0]
     except:
         self.os = 'Windows'
Пример #17
0
    def __setUp(self, config):
        """ create MATSim config data
        """
        self.matsim_config_destination = paths.get_opus_home_path("opus_matsim", "matsim_config")
        if not os.path.exists(self.matsim_config_destination):
            try:
                os.mkdir(self.matsim_config_destination)
            except:
                pass
        self.matsim_config_name = config["project_name"] + "_matsim_config.xml"
        self.matsim_config_full = os.path.join(self.matsim_config_destination, self.matsim_config_name)

        tmc = config["travel_model_configuration"]
        if tmc["matsim4urbansim"].get("test_parameter") != None:
            self.test_parameter = tmc["matsim4urbansim"].get("test_parameter")
Пример #18
0
    def __get_file_location(self, file_path, required=False ):
        ''' checks if a given sub path exists
        '''
        try:
            self.sub_config_exists = (file_path != None)
        except:
            if required:
                raise StandardError('File not found: %s' %file_path)
                self.sub_config_exists = False
        if self.sub_config_exists:
            self.check_abolute_path( file_path )
            path = paths.get_opus_home_path( file_path )
            if os.path.exists( path ):
                return path

        return ""
Пример #19
0
    def copy_dummy_travel_data(self):
        ''' Copies pre-calculated MATSim travel costs, travel times and workplace accessibility into the 
            OPUS HOME tmp directory.
        '''
        # get sensitivity test path as an anchor to determine the location of the MATSim travel_data file
        #test_dir_path = test_path.__path__[0]

        # set source location
        travel_data_source = paths.get_opus_data_path_path('psrc_parcel_cupum_preliminary', 'MATSimTravelData', 'travel_data.csv' )
        if not self.travel_data_exsists( travel_data_source ):
            raise StandardError( 'Dummy MATSim travel data not fould! %s' % travel_data_source )
        workplace_accessibility_source = paths.get_opus_data_path_path('psrc_parcel_cupum_preliminary', 'MATSimTravelData', 'zones.csv' )
        if not self.travel_data_exsists( workplace_accessibility_source ):
            raise StandardError( 'Dummy MATSim travel data not fould! %s' % workplace_accessibility_source )
            
        # set destination location
        destination_dir = paths.get_opus_home_path( "opus_matsim", "tmp" )
        if not os.path.exists(destination_dir):
            try: os.mkdir(destination_dir)
            except: pass
        self.travel_data_destination = os.path.join( destination_dir, "travel_data.csv" )
        self.workplace_accessibility_destination = os.path.join( destination_dir, "zones.csv" )
        
        logger.log_status("Copying dummy travel data:")
        logger.log_status("Source: %s" % travel_data_source)
        logger.log_status("Destination %s:" % self.travel_data_destination)
        
        # copy travel data
        shutil.copy (travel_data_source, self.travel_data_destination)
        if os.path.isfile (self.travel_data_destination): 
            logger.log_status("Copying successful ...")
        else: 
            raise StandardError("Test travel data travel_data_destination not copied!")
        
        logger.log_status("Copying dummy workplace accessibility indicators:")
        logger.log_status("Source: %s" % workplace_accessibility_source)
        logger.log_status("Destination %s:" % self.workplace_accessibility_destination)
        
        # copy workplace accessibility indicators
        shutil.copy (workplace_accessibility_source, self.workplace_accessibility_destination)
        if os.path.isfile (self.workplace_accessibility_destination): 
            logger.log_status("Copying successful ...")
        else: 
            raise StandardError("Test travel data workplace_accessibility_destination not copied!")  
Пример #20
0
    def run(self, config, year):
        """ This class simulates a MATSim run. Therefore it copies 
            real travel data into the OPUS_HOME and modifies the 
            entries in the following runs.
        """        
        logger.start_block("Starting RunDummyTravelTimeTravelModel.run(...)")

        self.config = config
        # get travel model parameter from the opus dictionary
        self.travel_model_configuration = config['travel_model_configuration']
        self.base_year = self.travel_model_configuration['base_year']
        
        # set output directory for travel data
        self.travel_data_dir = paths.get_opus_home_path('opus_matsim', 'tmp')

        # for debugging
        #try: #tnicolai
        #    import pydevd
        #    pydevd.settrace()
        #except: pass

        # set travel data for test simulation
        if year == self.base_year+1:
            logger.log_status('Exporting travel_data from base_year_cache to %s' % self.travel_data_dir)
            self.export_travel_data(None)
            logger.log_status("Modifying travel times.") # comment out for base scenario
            self.modify_travel_time()                    # comment out for base scenario
            logger.log_status("Finished modifying...")   # comment out for base scenario
            #logger.log_status('Integrating test travel data into UrbanSim cache for next simulation year.')
            #self.integrate_test_travel_data(year)
            #logger.log_status("Finished integrating...")
            #logger.log_status("Copying updated (integrated) travel data from cache to OPUS_HOME tmp directory.")
            #self.export_travel_data(year+1)
            #logger.log_status("Finished copying...") 
        # use modified travel data for all following runs
        else:
            logger.log_status("Travel data was modified before. So there is nothing to do...")

        logger.end_block()
Пример #21
0
 def __init__(self, output_dir=None,  year=None):
     ''' Constructor
     '''
     # get working path as an anchor e.g. to determine the config file location.
     self.working_path = test_path.__path__[0]
     print "Working path: %s" % self.working_path
     # get config file location
     self.config_file = os.path.join( self.working_path, 'configs', 'seattle_parcel_travel_cost_test.xml')
     
     # get seattle_parcel configuration
     config = XMLConfiguration( self.config_file ).get_run_configuration( "Seattle_baseline" )
     
     self.input_storage = None
     
     # get first simulation year
     self.year = year
     if self.year == None:
         self.year = config['base_year']
         base_year_data_path = paths.get_opus_data_path_path('seattle_parcel', 'base_year_data')
         attribute_cache = AttributeCache(cache_directory=base_year_data_path)
         self.input_storage = attribute_cache.get_flt_storage_for_year(self.year)
     else:
         attribute_cache = AttributeCache().get_flt_storage_for_year(self.year)
         self.input_storage = attribute_cache
     
     # get output dir path
     output_directory = output_dir
     if output_directory == None:
         # set deafult
         output_directory = paths.get_opus_home_path('opus_matsim', 'tmp')
     if not os.path.exists( output_directory ):
         try: os.mkdir( output_directory )
         except: pass
     
     # init 
     self.csv_data_path = output_directory # os.path.join(output_directory, 'travel_data_dir')
Пример #22
0
    def modify_travel_data(self):
        """ Modifies the travel times and costs between cbd and study zone 909
            
            @old version
            Modifies the travel times from zone to zone.
            For zone 20 the travel times to all other zones is set to min_travel_time.
            For all other zones the trvel time will be set on 31min if the origin travel time
            is less than 30min, otherwise it's not modified.
        """
        
        # using default cbd 
        cbd = 129
        # set the preferred zone
        study_zone = 908
        # set travel times for the preferered zone and other zones
        min_travel_time = '0.40'    # time in minutes
        min_travel_cost = '3.47'    # travel const in ???
        
        logger.log_status("Set the following travel time and cost between cbd and study zone:")
        logger.log_status("Zone ID cbd = %s" %cbd)
        logger.log_status("Zone ID study zone = %s" %study_zone)
        logger.log_status("Travel time = %s" %min_travel_time)
        logger.log_status("Travel cost = %s" %min_travel_cost)
        
        travel_data = paths.get_opus_home_path( "opus_matsim", "tmp", "travel_data.csv" )
        if not self.travel_data_exsists(travel_data):
            raise StandardError('Travel data not found! %s' % travel_data)
            
        in_file = open(travel_data, 'r')
        str_list = []
        # read header of travel data to get the indices of the colums (from_zone, to_zone, single_vehicle_travel_time)
        line = in_file.readline()
        # init indices
        get_indices = GetIndices(line)
        index_from_zone = get_indices.get_from_zone_index()
        index_to_zone   = get_indices.get_to_zone_index()
        index_travel_times = get_indices.get_am_single_vehicle_to_work_travel_time_index()
        index_travel_costs = get_indices.get_single_vehicle_to_work_travel_cost_index()
        number_of_colums = get_indices.get_number_of_colums()
        
        # prepare header line for the output file
        row = line.split(',')
        str_list.append( row[index_from_zone].strip('\r\n') +','+ row[index_to_zone].strip('\r\n') +','+ row[index_travel_times].strip('\r\n') + ',' + row[index_travel_costs].strip('\r\n') +'\r\n')
        
        # get first line of the table content
        line = in_file.readline()
        
        # replaces the travel times as decribed above...
        while line:
            row = line.split(',')
            # consistency check
            if len(row) != number_of_colums:
                raise StandardError('Error in number of colums: %s' %row)
                
            from_zone_id = int(row[index_from_zone].strip('\r\n'))
            to_zone_id = int(row[index_to_zone].strip('\r\n'))
            
            
            # just sets the travel time and cost from cbd2studyzone and 
            # from stuyzone2cbd to the defined values above
            if (from_zone_id == cbd and to_zone_id == study_zone):
                row[index_travel_times] = min_travel_time
                row[index_travel_costs] = min_travel_cost
            
            elif (from_zone_id == study_zone and to_zone_id == cbd):
                row[index_travel_times] = min_travel_time
                row[index_travel_costs] = min_travel_cost
        
            # append modified row to the new travel data content
            str_list.append( row[index_from_zone].strip('\r\n') +','+ row[index_to_zone].strip('\r\n') +','+ row[index_travel_times].strip('\r\n') + ',' + row[index_travel_costs].strip('\r\n') +'\r\n')

            line = in_file.readline()
        
        # finished modifying traval data
        in_file.close()
        # now write new travel data onto disc
        out_file = open(travel_data, 'w')
        logger.log_status("Copying modified travel data onto disc.")
        for row in str_list:
            out_file.write(row)
        out_file.close();
        logger.log_status("Finished copy process.")
Пример #23
0
try: import MySQLdb
except: pass
from sqlalchemy import *
import numpy
import math, time, copy, random, os, sys, subprocess
from data_mining.PrintOutput import PrintOutput
from opus_core import paths

#loads system variables                                                                                  
path = paths.get_opus_home_path("src", "data_mining", "SYSTEM_VARIABLES.py")
execfile(path) 

class Query_manager :
    def __init__(self, io_info_element, logCB = None, progressCB = None) :
        
        #For reporting results
        self.printOut = PrintOutput(logCB, progressCB, PROFILING)        
        
        #Storing all the information passed as parameters to the query manager
        self.db_url = io_info_element.attributes["input_db_url"].value
        self.table_name = io_info_element.attributes["input_table_name"].value
        self.x_attribute = io_info_element.attributes["x_column"].value
        self.y_attribute = io_info_element.attributes["y_column"].value
        self.id_attribute = io_info_element.attributes["id_column"].value

        #Forcing certain attributes to be categorical
        self.fclass_atts = []
        if io_info_element.hasAttribute('force_to_class') :
            self.fclass_atts = util_get_attribute_list(io_info_element.attributes["force_to_class"].value)

        #Forcing certain attributes to be numerical
    def __init__(self, 
                 protocol = None, 
                 host_name = None, 
                 user_name = None, 
                 password = None,
                 database_configuration = None,
                 test = False,
                 database_server_configuration_file_path = None,
                 sqlite_db_path = None,
                 blob_compression = False):
        
        if database_server_configuration_file_path is None:
            database_server_configuration_file_path = paths.get_opus_home_path('settings', 'database_server_configurations.xml')

        if (protocol is None or test) and host_name is None and user_name is None and password is None:
            if not os.path.exists(database_server_configuration_file_path):
                raise Exception('You do not have a file %s storing information about your database server configurations. Cannot load database.'%database_server_configuration_file_path)
            if database_configuration is None:
                db_node = self._database_configuration_node()
            else:
                db_node = database_configuration
            database_configuration = ElementTree(file = database_server_configuration_file_path).getroot().find(db_node)
            if database_configuration is None:
                raise Exception('Could not find an entry in %s for %s. Cannot load database.'%(database_server_configuration_file_path, db_node))
            self.protocol = database_configuration.find(self.PROTOCOL_TAG).text
            self.host_name = database_configuration.find(self.HOST_NAME_TAG).text
            self.user_name = database_configuration.find(self.USER_NAME_TAG).text
            self.password = database_configuration.find(self.PASSWORD_TAG).text
            blob_compression = database_configuration.find(self.BLOB_COMPRESSION_TAG)
            if blob_compression != None and blob_compression.text == "True":
                blob_compression = True
            else:
                blob_compression = False

        else:
            if protocol is None:
                self.protocol = get_default_database_engine()
            else:
                self.protocol = protocol.lower()
                 
            if host_name is None:
                self.host_name = 'localhost'
            else:
                self.host_name = host_name
    
            if user_name is None:
                self.user_name = ''
            else:
                self.user_name = user_name
    
            if password is None:
                self.password = ''
            else:
                self.password = password
                
        # If the password is the empty string or None, check if it is defined in the environment variable
        # SQLPASSWORD - if so, use that.
        if (self.password is None or self.password=='') and 'SQLPASSWORD' in os.environ:
            self.password = os.environ['SQLPASSWORD']

        self.sqlite_db_path = sqlite_db_path
        self.blob_compression = blob_compression
Пример #25
0
#FOR PLATFORM                                                                                                                          
#If windows set to backslash  
WIN = True
try :
    sys.getwindowsversion() #@UndefinedVariable
except AttributeError :
    WIN = False

FOLDER_TYPE = "\\"
if not WIN :
    FOLDER_TYPE = "/"

    
#loads system variables
path = paths.get_opus_home_path("src", "data_mining")

#System variables

#for outlier detection
BINARY = ""
if WIN :
    BINARY = os.path.join(path, 'models', 'WinLOF.exe')
else :
    BINARY = os.path.join(path, 'models', 'knn_binary_mac')

#FOR OUTPUT
#Prints what is happening with the test
PROFILING = True

#Draws a picture of the training and test blocks (if pygame is installed)
Пример #26
0
    def __init__(self,
                 protocol=None,
                 host_name=None,
                 user_name=None,
                 password=None,
                 database_configuration=None,
                 test=False,
                 database_server_configuration_file_path=None,
                 sqlite_db_path=None,
                 blob_compression=False):

        if database_server_configuration_file_path is None:
            database_server_configuration_file_path = paths.get_opus_home_path(
                'settings', 'database_server_configurations.xml')

        if (protocol is None or test
            ) and host_name is None and user_name is None and password is None:
            if not os.path.exists(database_server_configuration_file_path):
                raise Exception(
                    'You do not have a file %s storing information about your database server configurations. Cannot load database.'
                    % database_server_configuration_file_path)
            if database_configuration is None:
                db_node = self._database_configuration_node()
            else:
                db_node = database_configuration
            database_configuration = ElementTree(
                file=database_server_configuration_file_path).getroot().find(
                    db_node)
            if database_configuration is None:
                raise Exception(
                    'Could not find an entry in %s for %s. Cannot load database.'
                    % (database_server_configuration_file_path, db_node))
            self.protocol = database_configuration.find(self.PROTOCOL_TAG).text
            self.host_name = database_configuration.find(
                self.HOST_NAME_TAG).text
            self.user_name = database_configuration.find(
                self.USER_NAME_TAG).text
            self.password = database_configuration.find(self.PASSWORD_TAG).text
            blob_compression = database_configuration.find(
                self.BLOB_COMPRESSION_TAG)
            if blob_compression != None and blob_compression.text == "True":
                blob_compression = True
            else:
                blob_compression = False

        else:
            if protocol is None:
                self.protocol = get_default_database_engine()
            else:
                self.protocol = protocol.lower()

            if host_name is None:
                self.host_name = 'localhost'
            else:
                self.host_name = host_name

            if user_name is None:
                self.user_name = ''
            else:
                self.user_name = user_name

            if password is None:
                self.password = ''
            else:
                self.password = password

        # If the password is the empty string or None, check if it is defined in the environment variable
        # SQLPASSWORD - if so, use that.
        if (self.password is None
                or self.password == '') and 'SQLPASSWORD' in os.environ:
            self.password = os.environ['SQLPASSWORD']

        self.sqlite_db_path = sqlite_db_path
        self.blob_compression = blob_compression
Пример #27
0
    def run(self):
        """
        """
        logger.start_block("Starting RunDummyTravelModel.run(...)")
        
        print >> sys.stderr, "\nThis should also check if get_cache_data_into_matsim did something reasonable"
        
        logger.log_status('would normally run MATSim')
        
#        if not (sys.path == None) and len(sys.path) > 0:
#            module_path = sys.path[0]
#            logger.log_note("project path: %s" % module_path)
#        
#        in_file_name = os.path.join( module_path, "data", "travel_data_manipulated.csv" )
#        logger.log_note("open file : %s" % in_file_name)
#        file_in = open(in_file_name, 'r')
        out_file_name = paths.get_opus_home_path( "opus_matsim", "tmp", "travel_data.csv" )
        logger.log_note("open file : %s" % out_file_name)
        file_out = open(out_file_name, 'w')
        
        # cbd_zone = "129"
        
        file_out.write("from_zone_id:i4,to_zone_id:i4,single_vehicle_to_work_travel_cost:f4\n")
        file_out.write("1,1,0.0\n")
        file_out.write("1,102,999.9999999999999\n")
        file_out.write("1,109,999.9999999999999\n")
        file_out.write("1,126,999.9999999999999\n")
        file_out.write("1,128,999.9999999999999\n")
        file_out.write("1,134,999.9999999999999\n")
        file_out.write("1,139,999.9999999999999\n")
        file_out.write("1,140,999.9999999999999\n")
        file_out.write("1,2,999.9999999999999\n")
        file_out.write("102,1,999.9999999999999\n")
        file_out.write("102,102,0.0\n")
        file_out.write("102,109,999.9999999999999\n")
        file_out.write("102,126,999.9999999999999\n")
        file_out.write("102,128,999.9999999999999\n")
        file_out.write("102,134,999.9999999999999\n")
        file_out.write("102,139,999.9999999999999\n")
        file_out.write("102,140,999.9999999999999\n")
        file_out.write("102,2,999.9999999999999\n")
        file_out.write("109,1,999.9999999999999\n")
        file_out.write("109,102,999.9999999999999\n")
        file_out.write("109,109,0.0\n")
        file_out.write("109,126,999.9999999999999\n")
        file_out.write("109,128,999.9999999999999\n")
        file_out.write("109,134,999.9999999999999\n")
        file_out.write("109,139,999.9999999999999\n")
        file_out.write("109,140,999.9999999999999\n")
        file_out.write("109,2,999.9999999999999\n")
        file_out.write("126,1,999.9999999999999\n")
        file_out.write("126,102,999.9999999999999\n")
        file_out.write("126,109,999.9999999999999\n")
        file_out.write("126,126,0.0\n")
        file_out.write("126,128,999.9999999999999\n")
        file_out.write("126,134,999.9999999999999\n")
        file_out.write("126,139,999.9999999999999\n")
        file_out.write("126,140,999.9999999999999\n")
        file_out.write("126,2,999.9999999999999\n")
        file_out.write("128,1,999.9999999999999\n")
        file_out.write("128,102,999.9999999999999\n")
        file_out.write("128,109,999.9999999999999\n")
        file_out.write("128,126,999.9999999999999\n")
        file_out.write("128,128,0.0\n")
        file_out.write("128,134,999.9999999999999\n")
        file_out.write("128,139,999.9999999999999\n")
        file_out.write("128,140,999.9999999999999\n")
        file_out.write("128,2,999.9999999999999\n")
        file_out.write("134,1,999.9999999999999\n")
        file_out.write("134,102,999.9999999999999\n")
        file_out.write("134,109,999.9999999999999\n")
        file_out.write("134,126,999.9999999999999\n")
        file_out.write("134,128,999.9999999999999\n")
        file_out.write("134,134,0.0\n")
        file_out.write("134,139,999.9999999999999\n")
        file_out.write("134,140,999.9999999999999\n")
        file_out.write("134,2,999.9999999999999\n")
        file_out.write("139,1,999.9999999999999\n")
        file_out.write("139,102,999.9999999999999\n")
        file_out.write("139,109,999.9999999999999\n")
        file_out.write("139,126,999.9999999999999\n")
        file_out.write("139,128,999.9999999999999\n")
        file_out.write("139,134,999.9999999999999\n")
        file_out.write("139,139,0.0\n")
        file_out.write("139,140,999.9999999999999\n")
        file_out.write("139,2,999.9999999999999\n")
        file_out.write("140,1,999.9999999999999\n")
        file_out.write("140,102,999.9999999999999\n")
        file_out.write("140,109,999.9999999999999\n")
        file_out.write("140,126,999.9999999999999\n")
        file_out.write("140,128,999.9999999999999\n")
        file_out.write("140,134,999.9999999999999\n")
        file_out.write("140,139,999.9999999999999\n")
        file_out.write("140,140,0.0\n")
        file_out.write("140,2,999.9999999999999\n")
        file_out.write("2,1,999.9999999999999\n")
        file_out.write("2,102,999.9999999999999\n")
        file_out.write("2,109,999.9999999999999\n")
        file_out.write("2,126,999.9999999999999\n")
        file_out.write("2,128,999.9999999999999\n")
        file_out.write("2,134,999.9999999999999\n")
        file_out.write("2,139,999.9999999999999\n")
        file_out.write("2,140,999.9999999999999\n")
        file_out.write("2,2,0.0\n")
       
        try:
            #file_in.close()
            file_out.close()
        except: logger.log_warning("file not closed")
        
        logger.end_block()
Пример #28
0
 def get_default_configuration_file_path(cls):
     return paths.get_opus_home_path('settings', cls.DEFAULT_FILE_NAME)
 def get_default_configuration_file_path(cls):
     return paths.get_opus_home_path('settings', cls.DEFAULT_FILE_NAME)
Пример #30
0
    def __init__(self, config, year):
        """ Constructor
        """
        #try: # tnicolai :for debugging
        #    import pydevd
        #    pydevd.settrace()
        #except: pass

        self.config_dictionary = config
        self.sub_config_exists = False
        self.config_destination_location = None
        
        # get directories
        self.opus_home = paths.get_opus_home_path()
        self.opus_data_path = paths.get_opus_data_path_path()
        self.matsim4opus_path = paths.get_opus_home_path( 'matsim4opus' )
        self.checkAndCreateFolder(self.matsim4opus_path)
        cache_directory = config['cache_directory']
        matsim4opus_target_path = os.path.join(cache_directory, 'matsim4opus') 
        self.matsim_config_path = os.path.join( matsim4opus_target_path, 'matsim_config' )
        
        
        # get sub dictionaries from travel model configuration

        # get travel model parameter from the opus dictionary
        travel_model_configuration = self.config_dictionary['travel_model_configuration']   # contains matsim4urbansim and matsim_config parameter
        
        # matsim4urbansim
        self.matsim4urbansim_dict = travel_model_configuration['matsim4urbansim']                     # contains parameter for matsim/urbansim integration
        
        # matsim_config
        self.matsim_config_dict = travel_model_configuration['matsim_config']                         # contains various matsim_config parameter 
        self.accessibility_dict = self.matsim_config_dict['accessibility']
        self.urbansim_zone_random_location_distribution_dict = self.matsim_config_dict['urbansim_zone_random_location_distribution']
        self.common_dict = self.matsim_config_dict['common']
        self.plan_calc_score_dict = self.matsim_config_dict['plan_calc_score']
        
        ###########################
        # matsim4urbansim parameter
        ###########################
        self.matsim4urbansim_population_sampling_rate =   self.matsim4urbansim_dict['population_sampling_rate']
        self.matsim4urbansim_custom_parameter =   self.matsim4urbansim_dict['custom_parameter']
        self.matsim4urbansim_backup = self.__get_value_as_boolean('backup_run_data',   self.matsim4urbansim_dict['backup'])
        self.matsim4urbansim_matsim_data_to_compute_zone2zone_impedance = self.__get_value_as_boolean('zone2zone_impedance',   self.matsim4urbansim_dict['matsim_data_to_compute'])
        self.matsim4urbansim_matsim_data_to_compute_agent_performance = self.__get_value_as_boolean('agent_performance',  self.matsim4urbansim_dict['matsim_data_to_compute'])
        self.matsim4urbansim_matsim_data_to_compute_zone_based_accessibility = self.__get_value_as_boolean('zone_based_accessibility',   self.matsim4urbansim_dict['matsim_data_to_compute'])
        self.matsim4urbansim_matsim_data_to_compute_parcel_based_accessibility = self.__get_value_as_boolean('parcel_based_accessibility',   self.matsim4urbansim_dict['matsim_data_to_compute'])
        self.matsim4urbansim_year = year
        self.matsim4urbansim_matsim_config_path = os.path.join( matsim4opus_target_path, 'matsim_config' )
        self.checkAndCreateFolder(self.matsim4urbansim_matsim_config_path)
        self.matsim4urbansim_matsim_output_path = os.path.join( matsim4opus_target_path, 'output' )
        self.checkAndCreateFolder(self.matsim4urbansim_matsim_output_path)
        self.matsim4urbansim_matsim_temp_path = os.path.join( matsim4opus_target_path, 'tmp' )
        self.checkAndCreateFolder(self.matsim4urbansim_matsim_temp_path)
        
        
        ###########################
        # matsim_config parameter
        ###########################
        self.matsim_config_urbansim_zone_random_location_distribution_by_radius = self.urbansim_zone_random_location_distribution_dict['by_radius']
        self.matsim_config_urbansim_zone_random_location_distribution_by_shape_file = self.__get_string_value(self.urbansim_zone_random_location_distribution_dict['by_zone_shape_file'])
        
        # matsim_config/accessibility parameter
        self.matsim_config_accessibility_cell_size = self.accessibility_dict['cell_size']
        self.matsim_config_accessibility_study_area_boundary_shape_file = self.__get_string_value(self.accessibility_dict['study_area_boundary_shape_file'])
        self.matsim_config_accessibility_bounding_box_left = self.accessibility_dict['bounding_box_left']
        self.matsim_config_accessibility_bounding_box_bottom = self.accessibility_dict['bounding_box_bottom']
        self.matsim_config_accessibility_bounding_box_top = self.accessibility_dict['bounding_box_top']
        self.matsim_config_accessibility_bounding_box_right = self.accessibility_dict['bounding_box_right']
        accessibility_computation_area = self.accessibility_dict['accessibility_computation_area'] # loading sub directory ...
        if not(accessibility_computation_area.__len__ != 1):
            logger.log_error("Please select ONE item in 'travel_model_configuration/matsim_config/accessibility/accessibility_computation_area' to determine how the study area for the accessibility computation!")
            exit()
        self.matsim_config_accessibility_accessibility_computation_area_from_shapefile = self.__get_value_as_boolean( 'from_shapefile', accessibility_computation_area )
        self.matsim_config_accessibility_accessibility_computation_area_from_bounding_box = self.__get_value_as_boolean( 'from_bounding_box', accessibility_computation_area )
        self.matsim_config_accessibility_accessibility_computation_area_from_network = self.__get_value_as_boolean( 'from_network', accessibility_computation_area )
        
        # matsim_config/common parameter
        self.matsim_config_common_network_file = self.__get_file_location( self.common_dict['network'], required=True)
        self.matsim_config_common_first_iteration = 0
        self.matsim_config_common_last_iteration = self.common_dict['last_iteration']
        self.matsim_config_common_external_matsim_configuration = self.__get_external_matsim_config_for_current_year(self.common_dict['external_matsim_config'], year)
        self.matsim_config_common_warm_start_plans_file = self.__get_plans_file(self.common_dict, 'warm_start_plans_file')
        self.matsim_config_common_use_hot_start = self.__get_value_as_boolean( 'use_hot_start', self.common_dict['hot_start'] )
        self.matsim_config_common_hot_start_plans_file = ''
        if self.matsim_config_common_use_hot_start:
            self.matsim_config_common_hot_start_plans_file = os.path.join(matsim4opus_target_path, 'hot_start_plans_file.xml.gz')
        
        # matsim_config/plan_calc_score parameter
        self.matsim_config_plan_calc_score_work_activity_opening_time = self.plan_calc_score_dict['work_activity_opening_time']
        self.matsim_config_plan_calc_score_home_activity_typical_duration = self.plan_calc_score_dict['home_activity_typical_duration']
        self.matsim_config_plan_calc_score_work_activity_typical_duration = self.plan_calc_score_dict['work_activity_typical_duration']
        self.matsim_config_plan_calc_score_work_activity_latest_start_time = self.plan_calc_score_dict['work_activity_latest_start_time']
        self.matsim_config_plan_calc_score_activityType_0 = 'home'
        self.matsim_config_plan_calc_score_activityType_1 = 'work'

        # setting destination location for generated matsim config       
        self.config_destination_location = os.path.join( self.matsim_config_path, config['project_name'] + "_matsim_config.xml"  )
        logger.log_status('MATSim4UrbanSim config file will be written to %s' %self.config_destination_location)
Пример #31
0
    def create_travel_model_input_file(self, config, year, *args, **kwargs):
        """Constructs and writes a persons and jobs table. 
        Both are associated with a parcels table (also constructed here) storing locations (x and y coordinates) of each person and job .
        """

        logger.start_block('Starting GetCacheDataIntoMatsim.run(...)')
        
        #try: # tnicolai :for debugging
        #    import pydevd
        #    pydevd.settrace()
        #except: pass
        
        # I guess this is access to the full UrbanSim cache data.
        source_data = SourceData(
            cache_directory = config['cache_directory'],
            years = [year],
            dataset_pool_configuration = DatasetPoolConfiguration(
                package_order=['psrc_parcel','urbansim_parcel','psrc', 'urbansim','opus_core'],
                ),
        )            
        
        output_root = paths.get_opus_home_path( matsim4opus ) 
        if not os.path.exists( output_root ):
            try: os.mkdir( output_root )
            except: pass
        
        self.output_directory = paths.get_opus_home_path( matsim4opus, matsim_temp )
        if not os.path.exists( self.output_directory ):
            try: os.mkdir(self.output_directory)
            except: pass

        
        ### PERSONS HOME LOCATION ###############################
        
        self.dataset_table_persons = DatasetTable(
                attributes = [ # TODO: ADD HOME XY-COORDINATES AND WORK XY_COORDINATES
                    'parcel_id_home = person.disaggregate(parcel.parcel_id, intermediates=[building,household])',
                    'x_coord = person.disaggregate(parcel.x_coord_sp, intermediates=[building,household])',
                    'y_coord = person.disaggregate(parcel.y_coord_sp, intermediates=[building,household])',
                    ],
                dataset_name = 'person',
                # exclude_condition = 'person.matsim_flag==0',
                storage_location = self.output_directory,
                source_data = source_data,
                output_type = 'csv',
                name = 'home_location',
                )
        
        export_indicators_persons = [ self.dataset_table_persons ]
        
        # executing the export persons
        IndicatorFactory().create_indicators(
             indicators = export_indicators_persons,
             display_error_box = False, 
             show_results = False)
        
        ### JOB LOCATION ###############################
        
        self.dataset_table_jobs = DatasetTable(
                attributes = [ # TODO: ADD HOME XY-COORDINATES AND WORK XY_COORDINATES
                    'parcel_id_job = job.disaggregate(parcel.parcel_id, intermediates=[building])',
                    'x_coord = job.disaggregate(parcel.x_coord_sp, intermediates=[building])',
                    'y_coord = job.disaggregate(parcel.y_coord_sp, intermediates=[building])',
                    ],
                dataset_name = 'job',
                # exclude_condition = 'person.matsim_flag==0',
                storage_location = self.output_directory,
                source_data = source_data,
                output_type = 'csv',
                name = 'job_location',
                )
        
        export_indicators_jobs = [ self.dataset_table_jobs ]
        
        # executing the export persons
        IndicatorFactory().create_indicators(
             indicators = export_indicators_jobs,
             display_error_box = False, 
             show_results = False)
                
        logger.end_block()        
Пример #32
0
    def __init__(self, config, year):
        """ Constructor
        """

        self.config_dictionary = config
        self.sub_config_exists = False
        self.config_destination_location = None
        
        # get sub dictionaries from travel model configuration
        travel_model_configuration, matsim4urbansim_part, common_matsim_part = self.__get_travel_model_sub_dictionaries()
        
        # network parameter
        try:    # checks if sub config for matsim network exists
            self.sub_config_exists = (common_matsim_part['matsim_network_file'] != None)
        except: pass
        if self.sub_config_exists:
            self.check_abolute_path( common_matsim_part['matsim_network_file'] )
            self.network_file = paths.get_opus_home_path( common_matsim_part['matsim_network_file'] )
        else:
            raise StandardError('No network given in the  "travel_model_configuration" of your current configuration file. A network is required in order to run MATSim. ')
        self.sub_config_exists = False
        
        # input plans file parameter
        self.input_plans_file = self.__get_plans_file(common_matsim_part, 'input_plans_file')
        self.hotstart_plans_file = self.__get_plans_file(common_matsim_part, 'hotstart_plans_file')
        
        # controler parameter
        self.first_iteration = first_iteration
        self.last_iteration = common_matsim_part['last_iteration']
        
        # planCalcScoreType
        self.activityType_0 = activity_type_0
        self.activityType_1 = activity_type_1
        
        # urbansim parameter
        self.year = year
        self.population_sampling_rate = matsim4urbansim_part['sampling_rate']

        self.opus_home = paths.get_opus_home_path()
        self.opus_data_path = paths.get_opus_data_path_path()
        
        self.matsim4opus_path = paths.get_opus_home_path( matsim4opus )
        self.checkAndCreateFolder(self.matsim4opus_path)
        self.matsim_config_path = os.path.join( self.matsim4opus_path, matsim_config )
        self.checkAndCreateFolder(self.matsim_config_path)
        self.matsim_output_path = os.path.join( self.matsim4opus_path, matsim_output )
        self.checkAndCreateFolder(self.matsim_output_path)
        self.matsim_temp_path = os.path.join( self.matsim4opus_path, matsim_temp )
        self.checkAndCreateFolder(self.matsim_temp_path)
        
        self.isTestRun = False
        self.test_parameter = ""
        try:
            self.test_parameter = common_matsim_part[ test_parameter ]
        except: pass
        self.backup_run_data = False
        try:
            self.backup_run_data = common_matsim_part[ backup_run_data ]
        except: pass
        
        self.firstRun = "FALSE"
        try: # determine for MATSim if this is the fist run
            if travel_model_configuration['start_year'] == year:
                self.firstRun = "TRUE"
        except: pass
        
        self.config_destination_location = self.__set_config_destination( self.config_dictionary )
Пример #33
0
    def create_travel_model_input_file(self, config, year, *args, **kwargs):
        """Constructs and writes a persons and jobs table. 
        Both are associated with a parcels table (also constructed here) storing locations (x and y coordinates) of each person and job .
        """

        logger.start_block('Starting GetCacheDataIntoMatsim.run(...)')
        
        #try: # tnicolai :for debugging
        #    import pydevd
        #    pydevd.settrace()
        #except: pass
        
        # I guess this is access to the full UrbanSim cache data.
        source_data = SourceData(
            cache_directory = config['cache_directory'],
            years = [year],
            dataset_pool_configuration = DatasetPoolConfiguration(
                package_order=['psrc_parcel','urbansim_parcel','psrc', 'urbansim','opus_core'],
                ),
        )            
        
        output_root = paths.get_opus_home_path( matsim4opus ) 
        if not os.path.exists( output_root ):
            try: os.mkdir( output_root )
            except: pass
        
        self.output_directory = paths.get_opus_home_path( matsim4opus, matsim_temp )
        if not os.path.exists( self.output_directory ):
            try: os.mkdir(self.output_directory)
            except: pass
                
        ### Jobs ###############################
        
        self.dataset_table_jobs = DatasetTable(
                attributes = [
                    'parcel_id_work = job.disaggregate(parcel.parcel_id, intermediates=[building])',
                    'zone_id_work = job.disaggregate(zone.zone_id, intermediates=[parcel,building])'
                    ],
                dataset_name = 'job',
                # exclude_condition = 'person.matsim_flag==0',
                storage_location = self.output_directory,
                source_data = source_data,
                output_type = 'tab',
                name = 'exported_indicators',
                )
        
        export_indicators_jobs = [ self.dataset_table_jobs ]
        
        # executing the export jobs
        IndicatorFactory().create_indicators(
             indicators = export_indicators_jobs,
             display_error_box = False, 
             show_results = False)
        
        ### PERSONS ###############################
        
        self.dataset_table_persons = DatasetTable(
                attributes = [ # TODO: ADD HOME XY-COORDINATES AND WORK XY_COORDINATES
                    'parcel_id_home = person.disaggregate(parcel.parcel_id, intermediates=[building,household])',
                    'parcel_id_work = person.disaggregate(parcel.parcel_id, intermediates=[building,job])',
                    ],
                dataset_name = 'person',
                # exclude_condition = 'person.matsim_flag==0',
                storage_location = self.output_directory,
                source_data = source_data,
                output_type = 'tab',
                name = 'exported_indicators',
                )
        
        export_indicators_persons = [ self.dataset_table_persons ]
        
        # executing the export persons
        IndicatorFactory().create_indicators(
             indicators = export_indicators_persons,
             display_error_box = False, 
             show_results = False)
        
        ### FACILITIES ###############################
        
        self.dataset_table_parcels = DatasetTable(
                attributes = [
                    'parcel.x_coord_sp',
                    'parcel.y_coord_sp',
                    'parcel.zone_id',
                    # tnicolai: Gewicht fuer parcels, um zone centroid in MATSim genauer zu bestimmen
                    # Zone mit 1Parcel Wald und 10Parcels Wohnungen -> Schwerpunkt bei den Wohnungen
                    # Zone mit 1Parcel Wohnblock und 20Parcels Einfamilienhauesern -> Schwerpunkt bei Wohnblock 
                    ],
                dataset_name = 'parcel',
                storage_location = self.output_directory,
                source_data = source_data,
                output_type = 'tab',
                name = 'exported_indicators',
                )
        
        export_indicators_parcels = [ self.dataset_table_parcels ]
        
        # executing the export parcels
        IndicatorFactory().create_indicators(
             indicators = export_indicators_parcels,
             display_error_box = False, 
             show_results = False)
                
        logger.end_block()        
Пример #34
0
 def __init__(self):
     UrbansimZoneConfiguration.__init__(self)
     
     config_changes = {
         'project_name':'eugene_zone',
         'description':'Eugene zone baseline',
         'base_year':1980,
         'years':(1981, 1985),
         'debuglevel': 4,
         'models': [
             'real_estate_price_model',
             'development_project_transition_model',
             'commercial_development_project_location_choice_model',
             'industrial_development_project_location_choice_model',
             'residential_development_project_location_choice_model',
             'add_projects_to_buildings',
             'household_transition_model',
             'employment_transition_model',
             'household_relocation_model',
             'household_location_choice_model',
             'employment_relocation_model',
             {   'employment_location_choice_model': {   'group_members': '_all_'}},
             'distribute_unplaced_jobs_model',
             ],
         'scenario_database_configuration': ScenarioDatabaseConfiguration(database_name = 'eugene_1980_baseyear_zone'),
         'cache_directory': paths.get_opus_home_path('data/eugene_zone/base_year_data'),
         'creating_baseyear_cache_configuration':CreatingBaseyearCacheConfiguration(
             cache_directory_root = paths.get_opus_home_path('data/eugene_zone/runs'),
             cache_from_database = False,
             baseyear_cache = BaseyearCacheConfiguration(
                 existing_cache_to_copy = paths.get_opus_home_path('data/eugene_zone/base_year_data')
                 ),
             cache_scenario_database = 'urbansim.model_coordinators.cache_scenario_database',
             tables_to_cache = [
                 'annual_employment_control_totals',
                 'annual_household_control_totals',
                 'households',
                 'job_building_types',
                 'building_types',
                 'jobs',
                 'travel_data',
                 'zones',
                 'pseudo_buildings',
                 'counties',
                 'commercial_development_location_choice_model_coefficients',
                 'commercial_development_location_choice_model_specification',
                 'commercial_employment_location_choice_model_coefficients',
                 'commercial_employment_location_choice_model_specification',
                 'home_based_employment_location_choice_model_specification',
                 'home_based_employment_location_choice_model_coefficients',
                 'industrial_employment_location_choice_model_coefficients',
                 'industrial_employment_location_choice_model_specification',
                 'industrial_development_location_choice_model_coefficients',
                 'industrial_development_location_choice_model_specification',
                 'residential_development_location_choice_model_coefficients',
                 'residential_development_location_choice_model_specification',
                 #'fazes',
                 'urbansim_constants',
                 'household_location_choice_model_coefficients',
                 'household_location_choice_model_specification',
                 'household_characteristics_for_ht',
                 'annual_relocation_rates_for_households',
                 'annual_relocation_rates_for_jobs',
                 'base_year',
                 'cities',
                 'development_event_history',
                 'employment_adhoc_sector_group_definitions',
                 'employment_adhoc_sector_groups',
                 'employment_sectors',
                 'race_names',
                 'target_vacancies',
                 'jobs_for_estimation',
                 'households_for_estimation',
                 ],
             unroll_gridcells= False
             ),
         'dataset_pool_configuration': DatasetPoolConfiguration(
             package_order=['eugene_zone', 'eugene', 'urbansim_zone', 'urbansim', 'opus_core'],
             ),
         }
     self.merge(config_changes)