Beispiel #1
0
    def _read_xml_from_config(self, xml_config):
        ''' Extract XML information from the XML Configuration object '''
        self.xml_config = xml_config
        self._root_node = xml_config.full_tree.getroot()

        if xml_config.inherited_tree is None:
            self._inherited_root = XMLConfiguration().full_tree.getroot()
        else:
            self._inherited_root = xml_config.inherited_tree
        # map id's to nodes for the inherited and the local nodes
        inherited_ids_to_nodes = dict(
            (node_identity_string(n), n)
            for n in self._inherited_root.getiterator())
        local_ids_to_nodes = dict((node_identity_string(n), n)
                                  for n in self._root_node.getiterator()
                                  if not n.get('inherited'))

        self._shadowing_nodes = {}
        # join the local and inherited nodes on id-match
        for id_, node in local_ids_to_nodes.items():
            if id_ in inherited_ids_to_nodes:
                self._shadowing_nodes[node] = inherited_ids_to_nodes[id_]

        if self.find('./general/project_name') is not None:
            self.name = self.find('./general/project_name').text
        else:
            self.name = 'unnamed_project'
        os.environ['OPUSPROJECTNAME'] = self.name
        self.dirty = False
Beispiel #2
0
    def setUp(self):
        print "entering setUp"
        
        logger.log_status('Testing UrbanSim export and import functionallity for MATSim...')

        # get root path to test cases
        self.path = test_path.__path__[0]
        logger.log_status('Set root path for MATSim config file to: %s' % self.path)
        if not os.path.exists(self.path):
            raise StandardError("Root path doesn't exist: %s" % self.path)
        
        # get path to MATSim source files and base_year_cache
        self.matsim_source, self.base_year_data_source = self.get_source_files() 
        self.destination = tempfile.mkdtemp(prefix='opus_tmp')
        #self.destination = '/Users/thomas/Desktop/x'    # for debugging
        #if not os.path.exists(self.destination):        # for debugging
        #    os.mkdir(self.destination)                  # for debugging
                    
        # load UrbanSim config to run MATSim test
        urbansim_config_location = os.path.join( self.path, 'configs', 'urbansim_config') #'/Users/thomas/Development/workspace/urbansim_trunk/opus_matsim/tests/test_config.xml'
        logger.log_status('Loading UrbanSim config: %s' % urbansim_config_location)
        #self.run_config = XMLConfiguration( urbansim_config_location ).get_run_configuration("Test")
        urbansim_config_name = "urbansim_config_for_matsim_run_test.xml"
        self.run_config = XMLConfiguration( os.path.join(urbansim_config_location, urbansim_config_name)).get_run_configuration("Test")
        
        # set destination for MATSim config file
        self.matsim_config_full = os.path.join( self.destination, "test_matsim_config.xml" )
        
        print "leaving setUp"
Beispiel #3
0
def make_zone_dbfs(cache_directory):
    xmlconfig = XMLConfiguration(filename="sanfrancisco.xml", 
                                 default_directory=r'C:\opus\project_configs',
                                 is_parent=False)
    runconfig = xmlconfig.get_run_configuration("sanfrancisco_baseline2009", merge_controllers=True)
    tm_config = runconfig['travel_model_configuration']
    print tm_config['urbansim_to_tm_variable_mapping']

    travel_model_years = []
    for key in tm_config.iterkeys():
        if isinstance(key,int) and tm_config[key].has_key('year_dir'):
            travel_model_years.append(key)
    travel_model_years.sort()
    
    zonedbfs_source_data = SourceData(
        cache_directory = cache_directory,
        run_description = "Run description is used for what?",
        years = travel_model_years,
        dataset_pool_configuration = DatasetPoolConfiguration(
            package_order=['sanfrancisco','urbansim','opus_core'],
        ),
    )

    attrs = []
    for key,val in tm_config['urbansim_to_tm_variable_mapping'].iteritems():
        key = key.replace(".", "_")
        attrs.append("%s=%s" % (key,val))
        
    attrs.extend([\
      "pwac_bus=sanfrancisco.zone.bus_travel_time_weighted_access_by_population",
      "pwac_exp=sanfrancisco.zone.exp_travel_time_weighted_access_by_population",
      "pwac_lrt=sanfrancisco.zone.lrt_travel_time_weighted_access_by_population",
      "pwac_bart=sanfrancisco.zone.bart_travel_time_weighted_access_by_population",
      "pwac_hwy=sanfrancisco.zone.hwy_travel_time_weighted_access_by_population",
      "ewac_bus=sanfrancisco.zone.bus_travel_time_weighted_access_to_employment",
      "ewac_exp=sanfrancisco.zone.exp_travel_time_weighted_access_to_employment",
      "ewac_lrt=sanfrancisco.zone.lrt_travel_time_weighted_access_to_employment",
      "ewac_bart=sanfrancisco.zone.bart_travel_time_weighted_access_to_employment",
      "ewac_hwy=sanfrancisco.zone.hwy_travel_time_weighted_access_to_employment",
      "ttpw_bus=sanfrancisco.zone.bus_travel_time_to_751",
      "ttpw_exp=sanfrancisco.zone.exp_travel_time_to_751",
      "ttpw_lrt=sanfrancisco.zone.lrt_travel_time_to_751",
      "ttpw_bart=sanfrancisco.zone.bart_travel_time_to_751",
      "ttpw_hwy=sanfrancisco.zone.hwy_travel_time_to_751",      
      "d2powell=sanfrancisco.zone.dist_travel_time_to_751"
    ])

    zonedbf_indicators = [ DatasetTable(
        source_data = zonedbfs_source_data,
        dataset_name = 'zone',
        name = 'zone Indicators',
        output_type='dbf',
        attributes = attrs
        ) ]
                       
    IndicatorFactory().create_indicators(indicators = zonedbf_indicators,
                                         display_error_box = False,
                                         show_results = False)
def make_zone_dbfs(cache_directory):
    xmlconfig = XMLConfiguration(filename="sanfrancisco.xml",
                                 default_directory=r'C:\opus\project_configs',
                                 is_parent=False)
    runconfig = xmlconfig.get_run_configuration("sanfrancisco_baseline2009",
                                                merge_controllers=True)
    tm_config = runconfig['travel_model_configuration']
    print tm_config['urbansim_to_tm_variable_mapping']

    travel_model_years = []
    for key in tm_config.iterkeys():
        if isinstance(key, int) and tm_config[key].has_key('year_dir'):
            travel_model_years.append(key)
    travel_model_years.sort()

    zonedbfs_source_data = SourceData(
        cache_directory=cache_directory,
        run_description="Run description is used for what?",
        years=travel_model_years,
        dataset_pool_configuration=DatasetPoolConfiguration(
            package_order=['sanfrancisco', 'urbansim', 'opus_core'], ),
    )

    attrs = []
    for key, val in tm_config['urbansim_to_tm_variable_mapping'].iteritems():
        key = key.replace(".", "_")
        attrs.append("%s=%s" % (key, val))

    attrs.extend([\
      "pwac_bus=sanfrancisco.zone.bus_travel_time_weighted_access_by_population",
      "pwac_exp=sanfrancisco.zone.exp_travel_time_weighted_access_by_population",
      "pwac_lrt=sanfrancisco.zone.lrt_travel_time_weighted_access_by_population",
      "pwac_bart=sanfrancisco.zone.bart_travel_time_weighted_access_by_population",
      "pwac_hwy=sanfrancisco.zone.hwy_travel_time_weighted_access_by_population",
      "ewac_bus=sanfrancisco.zone.bus_travel_time_weighted_access_to_employment",
      "ewac_exp=sanfrancisco.zone.exp_travel_time_weighted_access_to_employment",
      "ewac_lrt=sanfrancisco.zone.lrt_travel_time_weighted_access_to_employment",
      "ewac_bart=sanfrancisco.zone.bart_travel_time_weighted_access_to_employment",
      "ewac_hwy=sanfrancisco.zone.hwy_travel_time_weighted_access_to_employment",
      "ttpw_bus=sanfrancisco.zone.bus_travel_time_to_751",
      "ttpw_exp=sanfrancisco.zone.exp_travel_time_to_751",
      "ttpw_lrt=sanfrancisco.zone.lrt_travel_time_to_751",
      "ttpw_bart=sanfrancisco.zone.bart_travel_time_to_751",
      "ttpw_hwy=sanfrancisco.zone.hwy_travel_time_to_751",
      "d2powell=sanfrancisco.zone.dist_travel_time_to_751"
    ])

    zonedbf_indicators = [
        DatasetTable(source_data=zonedbfs_source_data,
                     dataset_name='zone',
                     name='zone Indicators',
                     output_type='dbf',
                     attributes=attrs)
    ]

    IndicatorFactory().create_indicators(indicators=zonedbf_indicators,
                                         display_error_box=False,
                                         show_results=False)
Beispiel #5
0
    def _read_xml_from_config(self, xml_config):
        ''' Extract XML information from the XML Configuration object '''
        self.xml_config = xml_config
        self._root_node = xml_config.full_tree.getroot()

        if xml_config.inherited_tree is None:
            self._inherited_root = XMLConfiguration().full_tree.getroot()
        else:
            self._inherited_root = xml_config.inherited_tree

        self._init_shadowing_nodes()
        self._set_project_name()
Beispiel #6
0
    def delete_or_update_node(self, node, new_node):
        '''
        Delete or update a node from the XML DOM. If the node was shadowing an inherited node, the inherited
        node is (re-)inserted into the DOM (after merging with new_node in the case of an update) and returned.
        Calling delete_or_update_node on an inherited node has no effect.
        @node (Element) node to remove
        @new_node (Element) node to insert instead of the removed element; None to remove only 
        @return the (re-inserted) node (Element) or None
        '''

        # The three cases of deleting/updating a node:
        # The node is local (simplest case -- just remove it)
        #   - if updating, simply add the new node
        # The node is inherited (no, wait, this is the simplest case -- do nothing)
        # The node is shadowing an inherited node (remove the node)
        #   - if removing, reinsert the inherited node
        #   - if updating, merge the new node with the inherited node and insert

        # helper function to clean out all child nodes from shadowing_nodes
        def clean_shadownodes(node):
            for child_node in node:
                clean_shadownodes(child_node)
            if node in self._shadowing_nodes:
                del self._shadowing_nodes[node]
            assert node not in self._shadowing_nodes

        parent = node.getparent()
        node_index = parent.index(node)
        inherited_node = None
        reinserted_node = new_node
        if node in self._shadowing_nodes:
            inherited_node = copy.deepcopy(self._shadowing_nodes[node])
            if new_node is None:
                reinserted_node = inherited_node
                inherited_node = None
        elif node.get('inherited'):
            return
        else:
            pass

        clean_shadownodes(node)
        node_id = node_identity_string(node)
        parent.remove(node)

        if inherited_node is not None:
            assert new_node is not None
            XMLConfiguration._merge_nodes(inherited_node, new_node)
        if new_node is not None:
            self._add_shadowing_nodes(new_node, node_id)
        if reinserted_node is not None:
            parent.insert(node_index, reinserted_node)
        return reinserted_node
 def test_simulation(self):
     eugene_dir = __import__('eugene').__path__[0]
     xml_config = XMLConfiguration(os.path.join(eugene_dir, 'configs', 'eugene_gridcell.xml'))
     option_group = StartRunOptionGroup()
     parser = option_group.parser
     # simulate 0 command line arguments by passing in []
     (options, _) = parser.parse_args([])
     run_manager = RunManager(option_group.get_services_database_configuration(options))
     run_section = xml_config.get_run_configuration('Eugene_baseline')
     insert_auto_generated_cache_directory_if_needed(run_section)
     run_manager.setup_new_run(cache_directory = run_section['cache_directory'],
                               configuration = run_section)
     run_manager.run_run(run_section)
    def delete_or_update_node(self, node, new_node):
        '''
        Delete or update a node from the XML DOM. If the node was shadowing an inherited node, the inherited
        node is (re-)inserted into the DOM (after merging with new_node in the case of an update) and returned.
        Calling delete_or_update_node on an inherited node has no effect.
        @node (Element) node to remove
        @new_node (Element) node to insert instead of the removed element; None to remove only 
        @return the (re-inserted) node (Element) or None
        '''
        # The three cases of deleting/updating a node:
        # The node is local (simplest case -- just remove it)
        #   - if updating, simply add the new node
        # The node is inherited (no, wait, this is the simplest case -- do nothing)
        # The node is shadowing an inherited node (remove the node)
        #   - if removing, reinsert the inherited node
        #   - if updating, merge the new node with the inherited node and insert

        # helper function to clean out all child nodes from shadowing_nodes
        def clean_shadownodes(node):
            for child_node in node:
                clean_shadownodes(child_node)
            if node in self._shadowing_nodes:
                del self._shadowing_nodes[node]
            assert node not in self._shadowing_nodes
            
        parent = node.getparent()
        node_index = parent.index(node)
        inherited_node = None
        reinserted_node = new_node
        if node in self._shadowing_nodes:
            inherited_node = copy.deepcopy(self._shadowing_nodes[node])
            if new_node is None:
                reinserted_node = inherited_node
                inherited_node = None
        elif node.get('inherited'):
            return
        else:
            pass
            
        clean_shadownodes(node)
        node_id = node_identity_string(node)
        parent.remove(node)
        
        if inherited_node is not None:
            assert new_node is not None
            XMLConfiguration._merge_nodes(inherited_node, new_node)
        if new_node is not None:
            self._add_shadowing_nodes(new_node, node_id)
        if reinserted_node is not None:
            parent.insert(node_index, reinserted_node)
        return reinserted_node
 def test_simulation(self):
     # check that the simulation proceeds without crashing
     # open the configuration for seattle_parcel.xml
     seattle_parcel_dir = __import__('seattle_parcel').__path__[0]
     xml_config = XMLConfiguration(os.path.join(seattle_parcel_dir, 'configs', 'seattle_parcel.xml'))
     option_group = StartRunOptionGroup()
     parser = option_group.parser
     # simulate 0 command line arguments by passing in []
     (options, _) = parser.parse_args([])
     run_manager = RunManager(option_group.get_services_database_configuration(options))
     run_section = xml_config.get_run_configuration('Seattle_baseline')
     insert_auto_generated_cache_directory_if_needed(run_section)
     run_manager.setup_new_run(cache_directory = run_section['cache_directory'],
                               configuration = run_section)
     run_manager.run_run(run_section)
    def _read_xml_from_config(self, xml_config):
        ''' Extract XML information from the XML Configuration object '''
        self.xml_config = xml_config
        self._root_node = xml_config.full_tree.getroot()

        if xml_config.inherited_tree is None:
            self._inherited_root = XMLConfiguration().full_tree.getroot()
        else:
            self._inherited_root = xml_config.inherited_tree
        # map id's to nodes for the inherited and the local nodes
        inherited_ids_to_nodes = dict((node_identity_string(n), n) for
                                      n in self._inherited_root.getiterator())
        local_ids_to_nodes = dict((node_identity_string(n), n) for
                                  n in self._root_node.getiterator() if not n.get('inherited'))

        self._shadowing_nodes = {}
        # join the local and inherited nodes on id-match
        for id_, node in local_ids_to_nodes.items():
            if id_ in inherited_ids_to_nodes:
                self._shadowing_nodes[node] = inherited_ids_to_nodes[id_]

        if self.find('./general/project_name') is not None:
            self.name = self.find('./general/project_name').text
        else:
            self.name = 'unnamed_project'
        os.environ['OPUSPROJECTNAME'] = self.name
        self.dirty = False
Beispiel #11
0
    def init_run(self, create_baseyear_cache=True):
        ''' init run, get run_id & cache_directory. '''
        ##avoid invoking start_run from cmd line -
        option_group = StartRunOptionGroup()
        option_group.parser.set_defaults(xml_configuration=self.xml_config,
                                         scenario_name=self.scenario)
        #run_id, cache_directory = start_run(option_group)

        options, args = option_group.parse()
        self.run_manager = RunManager(
            option_group.get_services_database_configuration(options))

        resources = XMLConfiguration(self.xml_config).get_run_configuration(
            self.scenario)
        insert_auto_generated_cache_directory_if_needed(resources)
        cache_directory = resources['cache_directory']
        self.run_manager.setup_new_run(cache_directory, resources)
        run_id, cache_directory = self.run_manager.run_id, self.run_manager.get_current_cache_directory(
        )
        self.run_manager.add_row_to_history(run_id, resources, "done")

        if create_baseyear_cache:
            self.run_manager.create_baseyear_cache(resources)

        ## good for testing
        #run_id = 275
        #cache_directory = '/home/lmwang/opus/data/paris_zone/runs/run_275.2012_05_26_00_20'
        assert run_id is not None
        assert cache_directory is not None
        return run_id, cache_directory
Beispiel #12
0
    def test_run(self):

        # The paths work as follows: opus_matsim.__path__ is the path of the opus_matsim python module.  So we can use that
        # as anchor ...
        config_location = os.path.join(opus_matsim.__path__[0], 'tests')
        print "location: ", config_location
        run_config = XMLConfiguration(
            os.path.join(config_location,
                         "test_config.xml")).get_run_configuration("Test")

        run_config[
            'creating_baseyear_cache_configuration'].cache_directory_root = self.temp_dir
        run_config['creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = \
            os.path.join(opus_matsim.__path__[0], 'tests', 'testdata', 'base_year_data')

        # insert_auto_generated_cache_directory... does things I don't understand.  Need to do the following to obtain consistent
        # behavior independent from the file root:
        run_config['cache_directory'] = None

        insert_auto_generated_cache_directory_if_needed(run_config)
        run_manager = RunManager(ServicesDatabaseConfiguration())

        run_manager.setup_new_run(
            cache_directory=run_config['cache_directory'],
            configuration=run_config)

        run_manager.run_run(run_config, run_as_multiprocess=True)

        self.assert_(True)

        self.cleanup_test_run()
    def setUp(self):
        print "entering setUp"

        logger.log_status("Testing UrbanSim export and import functionallity for MATSim...")

        # get root path to test cases
        self.path = test_path.__path__[0]
        logger.log_status("Set root path for MATSim config file to: %s" % self.path)
        if not os.path.exists(self.path):
            raise StandardError("Root path doesn't exist: %s" % self.path)

        # get path to MATSim source files and base_year_cache
        self.matsim_source, self.base_year_data_source = self.get_source_files()
        self.destination = tempfile.mkdtemp(prefix="opus_tmp")
        # self.destination = '/Users/thomas/Desktop/x'    # for debugging
        # if not os.path.exists(self.destination):        # for debugging
        #    os.mkdir(self.destination)                  # for debugging

        # load UrbanSim config to run MATSim test
        urbansim_config_location = os.path.join(
            self.path, "configs", "urbansim_config"
        )  #'/Users/thomas/Development/workspace/urbansim_trunk/opus_matsim/tests/test_config.xml'
        logger.log_status("Loading UrbanSim config: %s" % urbansim_config_location)
        # self.run_config = XMLConfiguration( urbansim_config_location ).get_run_configuration("Test")
        urbansim_config_name = "urbansim_config_for_matsim_run_test.xml"
        self.run_config = XMLConfiguration(
            os.path.join(urbansim_config_location, urbansim_config_name)
        ).get_run_configuration("Test")

        # set destination for MATSim config file
        self.matsim_config_full = os.path.join(self.destination, "test_matsim_config.xml")

        print "leaving setUp"
Beispiel #14
0
 def open(self, filename):
     '''
     Load a project file from XML.
     @return: flag and message (tuple(boolean, String))
     The flag is only True if the project loaded without problems.
     '''
     # Always close the project before loading another one to avoid mixing
     # data if the load is only partly successful
     self.close()
     filename = str(filename)
     if not os.path.exists(filename):
         return (
             False,
             "Tried to load project from file '%s', but that file does not exist"
             % filename)
     default_path = os.path.dirname(filename)
     filename = os.path.basename(filename)
     try:
         xml_config = XMLConfiguration(filename, default_path)
         self._read_xml_from_config(xml_config)
         self.filename = os.path.normpath(
             os.path.join(default_path, filename))
         return (True, 'Project %s loaded OK' % filename)
     # Catch only the errors that XMLConfiguration is known to throw
     except (IOError, SyntaxError, ValueError, SyntaxError,
             XMLVersionException), ex:
         self.close()
         return (False, str(ex))
def main():
    option_group = EstimationOptionGroup()
    parser = option_group.parser
    (options, args) = parser.parse_args()
    if options.model_name is None:
        raise StandardError, "Model name (argument -m) must be given."
    if (options.configuration_path is None) and (options.xml_configuration is
                                                 None):
        raise StandardError, "Configuration path (argument -c) or XML configuration (argument -x) must be given."
    if (options.specification is None) and (options.xml_configuration is None):
        logger.log_warning(
            "No specification given (arguments -s or -x). Specification taken from the cache."
        )
    if options.xml_configuration is not None:
        xconfig = XMLConfiguration(options.xml_configuration)
    else:
        xconfig = None
    if options.configuration_path is None:
        config = None
    else:
        config = get_config_from_opus_path(options.configuration_path)
    estimator = EstimationRunner(model=options.model_name,
                                 specification_module=options.specification,
                                 xml_configuration=xconfig,
                                 model_group=options.model_group,
                                 configuration=config,
                                 save_estimation_results=options.save_results)
    estimator.estimate()
    return estimator
    def __init__(self,
                 config_path=None,
                 config_file_name=None,
                 destination=None,
                 testRun=True):
        ''' Constructor
        '''
        print "Entering __init__"

        # load test urbansim config
        self.config_path = config_path
        if self.config_path == None:
            self.config_path = pyxb_test.__path__[0]
        self.config_name = config_file_name
        if self.config_name == None:
            self.config_name = 'test_urbansim_config.xml'
        self.matsim_config_location = destination
        if self.matsim_config_location == None:
            self.matsim_config_destination = os.path.join(
                os.environ['OPUS_HOME'], "opus_matsim", "matsim_config")
            if not self.test_path(self.matsim_config_destination):
                return False
            self.matsim_config_location = os.path.join(
                self.matsim_config_destination, "test_matsim_config.xml")

        self.config_location = os.path.join(self.config_path, self.config_name)
        print "Loding test config file: %s" % self.config_location
        self.config = XMLConfiguration(
            self.config_location).get_run_configuration("Test")

        # get travel model parameter from the opus dictionary
        self.travel_model_configuration = self.config[
            'travel_model_configuration']
        # get years parameter
        self.years = self.config['years']

        # gather all parameters for the MATSim config file
        # network parameter
        self.network_file = self.travel_model_configuration[
            'matsim_network_file']
        self.network_file = os.path.join(os.environ['OPUS_HOME'],
                                         "opus_matsim", self.network_file)
        # controler parameter:
        self.first_iteration = self.travel_model_configuration[
            'first_iteration']
        self.last_iteration = self.travel_model_configuration['last_iteration']
        # planCalcScoreType
        self.activityType_0 = self.travel_model_configuration['activityType_0']
        self.activityType_1 = self.travel_model_configuration['activityType_1']
        # urbansim parameter
        self.year = self.years[0]
        self.samplingRate = self.travel_model_configuration['sampling_rate']
        self.temp_directory = self.travel_model_configuration['temp_directory']
        self.isTestRun = testRun
        self.opus_home = os.environ['OPUS_HOME']
        self.firstRun = "FALSE"

        # create xml config file
        # return self.build_xml_config()
        print "Leaving __init__"
Beispiel #17
0
 def test_simulation(self):
     eugene_dir = __import__('eugene').__path__[0]
     xml_config = XMLConfiguration(
         os.path.join(eugene_dir, 'configs', 'eugene_gridcell.xml'))
     option_group = StartRunOptionGroup()
     parser = option_group.parser
     # simulate 0 command line arguments by passing in []
     (options, _) = parser.parse_args([])
     run_manager = RunManager(
         option_group.get_services_database_configuration(options))
     run_section = xml_config.get_run_configuration('Eugene_baseline')
     insert_auto_generated_cache_directory_if_needed(run_section)
     run_manager.setup_new_run(
         cache_directory=run_section['cache_directory'],
         configuration=run_section)
     run_manager.run_run(run_section)
Beispiel #18
0
 def setUp(self):
     """
     set up opus data path, a base_year_data cache directory needs to exists 
     or be created through downloading and unzipping etc
     
     """
     self.opus_home = os.environ["OPUS_HOME"]
     if os.environ.has_key('OPUS_DATA_PATH'):
         self.data_path = os.path.join(os.environ['OPUS_DATA_PATH'], 'psrc_parcel')
     else:
         self.data_path = os.path.join(self.opus_home, 'data', 'psrc_parcel')
     
     self.xml_config = XMLConfiguration(os.path.join(self.opus_home, 'project_configs', 'psrc_parcel_test.xml'))
     
     base_year_data_path = os.path.join(self.data_path, 'base_year_data')        
     if not os.path.exists(base_year_data_path):
         os.makedirs(base_year_data_path)
def _get_run_config(temp_dir):
    config_path = os.path.join(temp_dir, 'testconfig.xml')
    f = open(config_path, 'w')
    f.write(config_template % temp_dir)
    f.close()
    run_configuration = XMLConfiguration(config_path).get_run_configuration(
        'Eugene_baseline')

    return run_configuration
Beispiel #20
0
def prepare_run_manager(option_group=None):
    if option_group is None:
        option_group = StartRunOptionGroup()
    parser = option_group.parser
    options, args = option_group.parse()

    run_manager = RunManager(
        option_group.get_services_database_configuration(options))

    if options.pickled_resource_file is not None:
        f = file(options.pickled_resource_file, 'r')
        try:
            config = pickle.load(f)
        finally:
            f.close()
    elif options.configuration_path is not None:
        opus_path = options.configuration_path
        try:
            config = get_config_from_opus_path(opus_path)
        except ImportError:
            # TODO: Once all fully-specified configurations are stored as classes,
            #       get rid of this use.
            import_stmt = 'from %s import run_configuration as config' % opus_path
            exec(import_stmt)
        insert_auto_generated_cache_directory_if_needed(config)
    elif options.xml_configuration is not None:
        if options.scenario_name is None:
            parser.print_help()
            sys.exit(1)
        config = XMLConfiguration(
            options.xml_configuration).get_run_configuration(
                options.scenario_name)
        insert_auto_generated_cache_directory_if_needed(config)
    else:
        parser.print_help()
        sys.exit(1)

    if options.existing_cache_to_copy is not None:
        config[
            'creating_baseyear_cache_configuration'].cache_from_database = False
        config[
            'creating_baseyear_cache_configuration'].baseyear_cache = BaseyearCacheConfiguration(
                existing_cache_to_copy=options.existing_cache_to_copy, )
        if options.years_to_cache is not None:
            config[
                'creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval(
                    options.years_to_cache)

    if options.profile_filename is not None:
        config["profile_filename"] = options.profile_filename

    run_manager.setup_new_run(cache_directory=config['cache_directory'],
                              configuration=config)

    return options, config, run_manager
    def test_simulation(self):
        base_year_data_path = os.path.join(self.data_path, 'base_year_data')        
        if not os.path.exists(base_year_data_path):
            os.makedirs(base_year_data_path)

        ftp_url = os.environ["FTP_URL"]
        file_name = os.path.split(ftp_url)[1]
        ftp_user = os.environ["FTP_USERNAME"]
        ftp_password = os.environ["FTP_PASSWORD"]
        
        #stdout, stderr = Popen("ls -la %s" % base_year_data_path, shell=True).communicate()
        #stdout, stderr = Popen("echo '%s'" % (base_year_data_path), stdout=PIPE).communicate()
        #print stdout
        
        try:
            Popen( """
                        cd %s;
                        pwd;
                        ls -la;
                        echo wget --timestamping %s --ftp-user=%s --ftp-password=%s > /dev/null 2>&1;
                        rm -rf 2008;
                        unzip -o %s
                        """ % (base_year_data_path, ftp_url, ftp_user, ftp_password, file_name),
                        shell = True
                        ).communicate()
        except:
            print "Error when downloading and unzipping file from %s." % ftp_url
            raise

        services_db = ServicesDatabaseConfiguration( database_name = 'services',                         
                                                     database_configuration = 'services_database_server' )
        run_manager = RunManager(services_db)
        run_as_multiprocess = True
        xml_config = XMLConfiguration(os.path.join(self.opus_home, 'project_configs', 'washtenaw_parcel.xml'))
        for scenario_name in ['washtenaw_baseline_test']:
            config = xml_config.get_run_configuration(scenario_name)
            insert_auto_generated_cache_directory_if_needed(config)
#            base_year = config['base_year']
#            config['years_to_run'] = (base_year+1, base_year+2)
            run_manager.setup_new_run(cache_directory = config['cache_directory'],
                                      configuration = config)
            run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)
Beispiel #22
0
    def run(self, config, executable):
        #--config=opus_matsim/sustain_city/configs/seattle_parcel.xml --executable=Seattle_baseline
        config = XMLConfiguration(config).get_run_configuration(executable)
        
        insert_auto_generated_cache_directory_if_needed(config)
     
        run_manager = RunManager(ServicesDatabaseConfiguration())
        
        run_manager.setup_new_run(cache_directory = config['cache_directory'],configuration = config)

        run_manager.run_run(config, run_as_multiprocess = True )
Beispiel #23
0
    def _read_xml_from_config(self, xml_config):
        ''' Extract XML information from the XML Configuration object '''
        self.xml_config = xml_config
        self._root_node = xml_config.full_tree.getroot()

        if xml_config.inherited_tree is None:
            self._inherited_root = XMLConfiguration().full_tree.getroot()
        else:
            self._inherited_root = xml_config.inherited_tree
        
        self._init_shadowing_nodes()
        self._set_project_name()
Beispiel #24
0
    def __init__(self,
                 xml_config,
                 scenario,
                 calib_datasets,
                 target_expression,
                 target_file,
                 subset=None,
                 subset_patterns=None,
                 skip_cache_cleanup=False,
                 log_directory=None):
        """
        - xml_config: xml configuration file, for ex '/home/atschirhar/opus/project_configs/paris_zone.xml'
        - scenario: name of scenario to run for calibration, where models_to_run and simulation years are specified
        - calib_datasets: dictionary specifying dataset names and attributes to be calibrated, e.g.
                  {'establishment_location_choice_model_coefficients': 'estimate'}
        - target_expression: opus expression computing values from prediction to be compared with targets 
        - target_file: name of csv file providing targets 
        - subset: dictionary specifying the dataset to be calibrated,
                  {'etablishment_location_choice_model_coefficients': ['coefficient_name', ['paris_celcm, 'biotech_celcm']]}
          subset and subset_patterns can not be both specified for the same dataset
        - subset_patterns: dictionary specifying the dataset to be calibrated through a regular expression (re) pattern
                  {'etablishment_location_choice_model_coefficients': ['coefficient_name', '*_celcm']} 
          subset and subset_patterns can not be both specified for the same dataset

        """
        self.target_expression = target_expression
        self.target = self.read_target(target_file)

        self.run_manager = None
        self.xml_config = xml_config
        self.scenario = scenario
        self.skip_cache_cleanup = skip_cache_cleanup
        self.run_id, self.cache_directory = self.init_run()
        self.run_ids = [
            self.run_id
        ]  #allow starting of multiple runs for parallel optimization
        self.log_directory = log_directory
        if self.log_directory is None:
            self.log_directory = self.cache_directory  #legacy

        log_file = os.path.join(self.log_directory, "calibration.log")
        logger.enable_file_logging(log_file)

        dict_config = XMLConfiguration(self.xml_config).get_run_configuration(
            self.scenario)
        ## get parameters from config
        self.base_year = dict_config['base_year']
        self.start_year, self.end_year = dict_config['years']
        self.project_name = dict_config['project_name']
        self.package_order = dict_config[
            'dataset_pool_configuration'].package_order
Beispiel #25
0
 def setUp(self):
     """
     set up opus data path, a base_year_data cache directory needs to exists 
     or be created through downloading and unzipping etc
     
     """
     self.opus_home = paths.OPUS_HOME
     self.data_path = paths.get_opus_data_path_path('sanfrancisco')
     
     self.xml_config = XMLConfiguration(os.path.join(self.opus_home, 'project_configs', 'sanfrancisco.xml'))
     
     base_year_data_path = os.path.join(self.data_path, 'base_year_data')        
     if not os.path.exists(base_year_data_path):
         os.makedirs(base_year_data_path)
Beispiel #26
0
    def setUp(self):
        """
        set up opus data path, a base_year_data cache directory needs to exists 
        or be created through downloading and unzipping etc
        
        """
        self.opus_home = paths.OPUS_HOME
        self.data_path = paths.get_opus_data_path_path("psrc_parcel")

        self.xml_config = XMLConfiguration(os.path.join(self.opus_home, "project_configs", "psrc_parcel_test.xml"))

        base_year_data_path = os.path.join(self.data_path, "base_year_data")
        if not os.path.exists(base_year_data_path):
            os.makedirs(base_year_data_path)
Beispiel #27
0
class StartRunOptionGroup(object):
    """ Helper class to start model from an xml config file. 
    """

    logger.start_block("Starting UrbanSim")

    # get program arguments from the command line
    program_arguments = sys.argv[1:]

    # default parameters are:
    # --config=opus_matsim/sustain_city/configs/seattle_parcel_prescheduled_events.xml
    # --executable=Seattle_baseline
    parser = optparse.OptionParser()
    parser.add_option("-c",
                      "--config",
                      dest="config_file_name",
                      action="store",
                      type="string",
                      help="Name of file containing urbansim config")
    parser.add_option("-e",
                      "--executable",
                      dest="scenario_executable",
                      action="store",
                      type="string",
                      help="Model to execute")
    (options, args) = parser.parse_args()

    if options.config_file_name == None:
        logger.log_error("Missing path to the urbansim config file")
    if options.scenario_executable == None:
        logger.log_error("Missing name of executable scenario")

    config = XMLConfiguration(options.config_file_name).get_run_configuration(
        options.scenario_executable)

    insert_auto_generated_cache_directory_if_needed(config)

    run_manager = RunManager(ServicesDatabaseConfiguration())

    run_manager.setup_new_run(cache_directory=config['cache_directory'],
                              configuration=config)

    #try: #tnicolai
    #    import pydevd
    #    pydevd.settrace()
    #except: pass

    run_manager.run_run(config, run_as_multiprocess=True)
 def __init__(self):
     print "Entering setup"
     
     logger.log_status('Running UrbanSim to test the impact of travel costs (provided from an dummy travel model)')
     
     # get sensitivity test path
     self.test_dir_path = test_dir.__path__[0]
     
     self.config_file = os.path.join( self.test_dir_path, "configs", "seattle_parcel_travel_cost_test.xml")
     print 'Loding UrbanSim config file: %s' % self.config_file
     
     # get seattle_parcel configuration
     self.config = XMLConfiguration( self.config_file ).get_run_configuration( "Seattle_baseline" )
     insert_auto_generated_cache_directory_if_needed(self.config)
     
     print "Leaving setup"
 def setUp(self):
     """
     set up opus data path, a base_year_data cache directory needs to exists 
     or be created through downloading and unzipping etc
     
     """
     self.opus_home = os.environ["OPUS_HOME"]
     if os.environ.has_key('OPUS_DATA_PATH'):
         self.data_path = os.path.join(os.environ['OPUS_DATA_PATH'], 'san_antonio_zone')
     else:
         self.data_path = os.path.join(self.opus_home, 'data', 'san_antonio_zone')
     
     self.xml_config = XMLConfiguration(os.path.join(self.opus_home, 'project_configs', 'san_antonio_zone.xml'))
     
     base_year_data_path = os.path.join(self.data_path, 'base_year_data')        
     if not os.path.exists(base_year_data_path):
         os.makedirs(base_year_data_path)
Beispiel #30
0
class StartRunOptionGroup(object):
    """ Helper class to start model from an xml config file. 
    """

    config = XMLConfiguration("opus_matsim/configs/seattle_parcel.xml"
                              ).get_run_configuration("Seattle_baseline")
    #    config = XMLConfiguration("opus_matsim/configs/psrc_parcel.xml").get_run_configuration("PSRC_baseline")

    insert_auto_generated_cache_directory_if_needed(config)

    run_manager = RunManager(ServicesDatabaseConfiguration())

    run_manager.setup_new_run(cache_directory=config['cache_directory'],
                              configuration=config)

    #    run_manager.create_baseyear_cache(config)

    run_manager.run_run(config, run_as_multiprocess=True)
Beispiel #31
0
    def test_estimation(self):
        # open the configuration for seattle_parcel.xml
        seattle_parcel_dir = __import__('seattle_parcel').__path__[0]
        xml_config = XMLConfiguration(
            os.path.join(seattle_parcel_dir, 'configs', 'seattle_parcel.xml'))

        for model_name in [
                'real_estate_price_model', 'household_location_choice_model'
        ]:
            er = EstimationRunner(model=model_name,
                                  xml_configuration=xml_config,
                                  configuration=None)
            er.estimate()

        # test run with group members
        er = EstimationRunner(model='employment_location_choice_model',
                              xml_configuration=xml_config,
                              model_group='home_based',
                              configuration=None)
        er.estimate()
Beispiel #32
0
 def __init__(self, output_dir=None,  year=None):
     ''' Constructor
     '''
     # get working path as an anchor e.g. to determine the config file location.
     self.working_path = test_path.__path__[0]
     print "Working path: %s" % self.working_path
     # get config file location
     self.config_file = os.path.join( self.working_path, 'configs', 'seattle_parcel_travel_cost_test.xml')
     
     # get seattle_parcel configuration
     config = XMLConfiguration( self.config_file ).get_run_configuration( "Seattle_baseline" )
     
     self.input_storage = None
     
     # get first simulation year
     self.year = year
     if self.year == None:
         self.year = config['base_year']
         base_year_data_path = os.path.join( os.environ['OPUS_DATA_PATH'], 'seattle_parcel', 'base_year_data')
         attribute_cache = AttributeCache(cache_directory=base_year_data_path)
         self.input_storage = attribute_cache.get_flt_storage_for_year(self.year)
     else:
         attribute_cache = AttributeCache().get_flt_storage_for_year(self.year)
         self.input_storage = attribute_cache
     
     # get output dir path
     output_directory = output_dir
     if output_directory == None:
         # set deafult
         output_directory = os.path.join( os.environ['OPUS_HOME'], 'opus_matsim', 'tmp')
     if not os.path.exists( output_directory ):
         try: os.mkdir( output_directory )
         except: pass
     
     # init 
     self.csv_data_path = output_directory # os.path.join(output_directory, 'travel_data_dir')
Beispiel #33
0
                      dest="stdout",
                      default=False,
                      action="store_true",
                      help='print results into stdout')
    parser.add_option("-l",
                      "--latex",
                      dest="latex",
                      default=None,
                      help="latex output file")

    (options, args) = parser.parse_args()

    if options.xml_configuration == None:
        raise "Requires an xml configuration argument."

    chart = DependencyChart(XMLConfiguration(options.xml_configuration),
                            model=options.model,
                            model_group=options.model_group)
    #print chart.model_table(options.model)
    #temp = chart.query.vars_tree(chart.query.var_list)
    #print pretty_tree(chart.query.all_models_tree())

    #auto = AutogenVariableFactory("(urbansim_parcel.parcel.building_sqft/(parcel.parcel_sqft).astype(float32)).astype(float32)")
    #auto._analyze_tree(auto._expr_parsetree)
    #auto._analyze_dataset_names()
    #print(auto._generate_compute_method())

    if options.stdout:
        if options.model != None:
            chart.print_model_dependencies()
        else:
                    action="store", help="Name of the scenario database server configuration in database_server_configurations.xml.")
 
 (options, args) = parser.parse_args()
 
 if options.configuration_path is not None:
     opus_path = options.configuration_path
     try:
         config = get_config_from_opus_path(opus_path)
     except ImportError:
         import_stmt = 'from %s import run_configuration as config' % opus_path
         exec(import_stmt)
 elif options.xml_configuration is not None:
     if options.scenario_name is None:
         parser.print_help()
         sys.exit(1)
     config = XMLConfiguration(options.xml_configuration).get_run_configuration(options.scenario_name)
 else:
     parser.print_help()
     sys.exit(1)
 
 if options.cache_directory is not None:
     config['cache_directory'] = options.cache_directory
     
 if options.database_name is not None or options.database_configuration is not None:
     if not config.has_key('scenario_database_configuration'):
         config['scenario_database_configuration'] = ScenarioDatabaseConfiguration(database_name = options.database_name,
                                                                                   database_configuration = options.database_configuration
                                                                                   )
     else:
         if options.database_configuration is not None:
                 config['scenario_database_configuration'] = ScenarioDatabaseConfiguration(database_configuration = options.database_configuration)
Beispiel #35
0
    parser.add_option("-x", "--xml-configuration", dest="xml_configuration", default = None,
                               action="store", help="Full path to an XML configuration file (must also provide a scenario name using -s). Either -x or -r must be given.")
    parser.add_option("-s", "--scenario_name", dest="scenario_name", default=None, 
                                help="Name of the scenario. Must be given if option -x is used.")
    parser.add_option("-d", "--directory", dest="cache_directory", default = None,
                               action="store", help="Cache directory with urbansim output.")
    (options, args) = parser.parse_args()
    if options.year is None:
        raise StandardError, "Year (argument -y) must be given."
    if (options.scenario_name is None) and (options.xml_configuration is not None):
        raise StandardError, "No scenario given (argument -s). Must be specified if option -x is used."
    r = None
    xconfig = None
    if options.resources_file_name is not None:
        r = get_resources_from_file(options.resources_file_name)
        resources = Resources(get_resources_from_file(options.resources_file_name))
    elif options.xml_configuration is not None:
        xconfig = XMLConfiguration(options.xml_configuration)
        resources = xconfig.get_run_configuration(options.scenario_name)
    else:
        raise StandardError, "Either option -r or -x must be used."
        
    files = GetCacheDataIntoDaysim(resources).run(options.year, cache_directory=options.cache_directory)
    if options.output_directory is not None:
        for file in files:
            copy(file, options.output_directory)
        logger.log_status('Files copied into %s' % options.output_directory)
        
# For a test run, use options
# -x opus_daysim/configs/sample_daysim_configuration.xml -s daysim_scenario -d opus_core/data/test_cache -y 1980
Beispiel #36
0
    elif options.configuration_path is not None:
        opus_path = options.configuration_path
        try:
            config = get_config_from_opus_path(opus_path)
        except ImportError:
            # TODO: Once all fully-specified configurations are stored as classes,
            #       get rid of this use.
            import_stmt = 'from %s import run_configuration as config' % opus_path
            exec(import_stmt)
        insert_auto_generated_cache_directory_if_needed(config)
    elif options.xml_configuration is not None:
        if options.scenario_name is None:
            parser.print_help()
            sys.exit(1)
        config = XMLConfiguration(
            options.xml_configuration).get_run_configuration(
                options.scenario_name)
        insert_auto_generated_cache_directory_if_needed(config)
    else:
        parser.print_help()
        sys.exit(1)

    if options.existing_cache_to_copy is not None:
        config[
            'creating_baseyear_cache_configuration'].cache_from_database = False
        config[
            'creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = options.existing_cache_to_copy
        if options.years_to_cache is not None:
            config[
                'creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval(
                    options.years_to_cache)
Beispiel #37
0
    def copy_to_parent(self, node):
        '''
        Copies a local node to the parent configuration without deleting it. 
        @node (Element) to copy to parent
        '''
        # Helper routines:
        
        # Never copy description and parent nodes to parent
        def delete_immutable():
            id_strings = self.IMMUTABLE_NODE_IDS
            for id_string in id_strings:
                for n in self.find_all_by_id_string(id_string, clone):
                    n.getparent().remove(n)
    
        # Find deepest parent node of to-be-inserted node that is also in the parent config
        def get_insert_node(merge_node, nodes):
            nodes.append(merge_node)
            ins_node = self.find_by_id_string(node_identity_string(merge_node), parent_root) 
            if ins_node is not None:
                return ins_node
            merge_node = merge_node.getparent()
            return get_insert_node(merge_node, nodes)

        # Remove all children for all nodes in the nodes list
        def strip_children(nodes):
            for n in nodes[:-1]:
                for subelement in n.getparent().getchildren():
                    if subelement is not n:
                        n.getparent().remove(subelement)
        
        # Remove "inherited" attribute from all nodes below tree_node that were
        # introduced by the immediate parent.  Remove all inherited nodes
        # that were introduced by a grandparent -- copying them to the parent
        # leads to incorrect results.
        def clear_inherited_attribute_or_delete_inherited_nodes(tree_node):
            nodes_to_delete = []
            for node in tree_node.iterchildren():
                if node.get('inherited') is not None:
                    if node.get('inherited') == parent_name:
                        del node.attrib['inherited']
                        clear_inherited_attribute_or_delete_inherited_nodes(node)
                    else:
                        nodes_to_delete.append(node)
                else:
                    clear_inherited_attribute_or_delete_inherited_nodes(node)
            
            for node in nodes_to_delete:
                tree_node.remove(node)


        #work on clone_node
        id_string = node_identity_string(node)
        clone = copy.deepcopy(self.root_node())
        node = self.find_by_id_string(id_string, clone)
        
        #get parent project   
        parent_file = self.get_last_writable_parent_file()
        parent_project = OpusProject()
        parent_project.open(parent_file)
        parent_name = parent_project.xml_config.name
        parent_root = parent_project.xml_config.tree.getroot()
        
        delete_immutable()
  
        parents_to_insert = []
        if node is not clone:
            insert_node = get_insert_node(node, parents_to_insert)

        node = parents_to_insert[-1]
        strip_children(parents_to_insert)
        clear_inherited_attribute_or_delete_inherited_nodes(node)
        
        XMLConfiguration._merge_nodes(insert_node, node)
        
        insert_parent = insert_node.getparent()
        insert_parent.replace(insert_node, node)

        # using parent_project.save() adds unnecessary attributes for some reason.
        parent_project.xml_config.save_as(parent_file)
    elif options.configuration_path is not None:
        opus_path = options.configuration_path
        try:
            config = get_config_from_opus_path(opus_path)
        except ImportError:
            # TODO: Once all fully-specified configurations are stored as classes,
            #       get rid of this use.
            import_stmt = 'from %s import run_configuration as config' % opus_path
            exec(import_stmt)
        insert_auto_generated_cache_directory_if_needed(config)
    elif options.xml_configuration is not None:
        if options.scenario_name is None:
            parser.print_help()
            sys.exit(1)
        config = XMLConfiguration(options.xml_configuration).get_run_configuration(options.scenario_name)
        insert_auto_generated_cache_directory_if_needed(config)
    else:
        parser.print_help()
        sys.exit(1)

    if options.existing_cache_to_copy is not None:
        config['creating_baseyear_cache_configuration'].cache_from_database = False
        config['creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = options.existing_cache_to_copy
        if options.years_to_cache is not None:
            config['creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval(options.years_to_cache)

    number_of_runs = config.get("number_of_runs", 1)
    number_of_runs_in_parallel = min(config.get("parallel_runs", 1), number_of_runs)
    # generate seeds for multiple runs
    root_seed = config.get("seed", None)
Beispiel #39
0
    
    config = {}
    if options.configuration_path is not None:
        opus_path = options.configuration_path
        try:
            config = get_config_from_opus_path(opus_path)
        except ImportError:
            # TODO: Once all fully-specified configurations are stored as classes,
            #       get rid of this use.
            import_stmt = 'from %s import run_configuration as config' % opus_path
            exec(import_stmt)
    elif options.xml_configuration is not None:
        if options.scenario_name is None:
            parser.print_help()
            sys.exit(1)
        config = XMLConfiguration(options.xml_configuration).get_run_configuration(options.scenario_name)
    
    from_database_configuration = config.get('scenario_database_configuration', ScenarioDatabaseConfiguration(database_name = options.scenario_database_name,
                                                                                                              database_configuration=options.scenario_database_configuration,
                                                                                                              ))
    to_database_name = options.flattened_database_name or (from_database_configuration.database_name + '_flattened')
    to_database_configuration = ScenarioDatabaseConfiguration(
                                    protocol = from_database_configuration.protocol,
                                    host_name = from_database_configuration.host_name,
                                    user_name = from_database_configuration.user_name,
                                    password = from_database_configuration.password,
                                    database_name = to_database_name)

    if config.get('creating_baseyear_cache_configuration', None):
        tables_to_copy = config['creating_baseyear_cache_configuration'].tables_to_cache
    else:
class TestSimulation(opus_unittest.OpusIntegrationTestCase):
    """ this integration test runs san_antonio_baseline_test in project_configs/san_antonio_zone.xml
    """
    def setUp(self):
        """
        set up opus data path, a base_year_data cache directory needs to exists 
        or be created through downloading and unzipping etc
        
        """
        self.opus_home = os.environ["OPUS_HOME"]
        if os.environ.has_key('OPUS_DATA_PATH'):
            self.data_path = os.path.join(os.environ['OPUS_DATA_PATH'], 'san_antonio_zone')
        else:
            self.data_path = os.path.join(self.opus_home, 'data', 'san_antonio_zone')
        
        self.xml_config = XMLConfiguration(os.path.join(self.opus_home, 'project_configs', 'san_antonio_zone.xml'))
        
        base_year_data_path = os.path.join(self.data_path, 'base_year_data')        
        if not os.path.exists(base_year_data_path):
            os.makedirs(base_year_data_path)

#        ftp_url = os.environ["FTP_URL"]
#        file_name = os.path.split(ftp_url)[1]
#        ftp_user = os.environ["FTP_USERNAME"]
#        ftp_password = os.environ["FTP_PASSWORD"]
#
#        try:
#            Popen( """
#                        cd %s;
#                        pwd;
#                        ls -la;
#                        echo wget --timestamping %s --ftp-user=%s --ftp-password=%s > /dev/null 2>&1;
#                        rm -rf 2008;
#                        unzip -o %s
#                        """ % (base_year_data_path, ftp_url, ftp_user, ftp_password, file_name),
#                        shell = True
#                        ).communicate()
#        except:
#            print "Error when downloading and unzipping file from %s." % ftp_url
#            raise            
    
    def tearDown(self):
        """
        delete [project_name]/runs directory to free up disk space
        """
        runs_path = os.path.join(self.data_path, 'runs')
        #if os.path.exists(runs_path):
        #    Popen( "rm -rf %s" % runs_path, shell=True)

    def test_estimation(self):
        for model_name in ['real_estate_price_model', 
                           'household_location_choice_model', 
                           ('employment_location_choice_model', 'home_based'),
                           ('employment_location_choice_model', 'non_home_based'),
                           'residential_development_project_location_choice_model',
                           'non_residential_development_project_location_choice_model',                           
                           ]:
            if type(model_name)==tuple:
                model_name, group_member = model_name
            else:
                group_member = None        
                
            estimator = EstimationRunner(model=model_name,  
                                         model_group=group_member,
                                         xml_configuration=self.xml_config,
                                         configuration = None
                                         )
            estimator.estimate()
                            
    def test_simulation(self):
        services_db = ServicesDatabaseConfiguration( database_name = 'services',                         
                                                     database_configuration = 'services_database_server' )
        run_manager = RunManager(services_db)
        run_as_multiprocess = True
        for scenario_name in ['san_antonio_baseline_test']:
            config = self.xml_config.get_run_configuration(scenario_name)
            insert_auto_generated_cache_directory_if_needed(config)
            run_manager.setup_new_run(cache_directory = config['cache_directory'],
                                      configuration = config)
            run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)
Beispiel #41
0
class TestSimulation(opus_unittest.OpusIntegrationTestCase):
    """ this integration test runs san_antonio_baseline_test in project_configs/psrc_parcel_test.xml
    """

    def setUp(self):
        """
        set up opus data path, a base_year_data cache directory needs to exists 
        or be created through downloading and unzipping etc
        
        """
        self.opus_home = paths.OPUS_HOME
        self.data_path = paths.get_opus_data_path_path("psrc_parcel")

        self.xml_config = XMLConfiguration(os.path.join(self.opus_home, "project_configs", "psrc_parcel_test.xml"))

        base_year_data_path = os.path.join(self.data_path, "base_year_data")
        if not os.path.exists(base_year_data_path):
            os.makedirs(base_year_data_path)

    #        ftp_url = os.environ["FTP_URL"]
    #        file_name = os.path.split(ftp_url)[1]
    #        ftp_user = os.environ["FTP_USERNAME"]
    #        ftp_password = os.environ["FTP_PASSWORD"]
    #
    #        try:
    #            Popen( """
    #                        cd %s;
    #                        pwd;
    #                        ls -la;
    #                        echo wget --timestamping %s --ftp-user=%s --ftp-password=%s > /dev/null 2>&1;
    #                        rm -rf 2008;
    #                        unzip -o %s
    #                        """ % (base_year_data_path, ftp_url, ftp_user, ftp_password, file_name),
    #                        shell = True
    #                        ).communicate()
    #        except:
    #            print "Error when downloading and unzipping file from %s." % ftp_url
    #            raise

    def tearDown(self):
        """
        delete [project_name]/runs directory to free up disk space
        """
        runs_path = os.path.join(self.data_path, "runs")
        # if os.path.exists(runs_path):
        #    Popen( "rm -rf %s" % runs_path, shell=True)

    def test_estimation(self):
        for model_name in [
            "residential_building_type_choice_model",
            "tenure_choice_model",
            "work_at_home_choice_model",
            "workplace_choice_model_for_resident",
            "auto_ownership_choice_model",
            "hbw_mode_choice_model",
            "real_estate_price_model",
            "household_location_choice_model",
            "shopping_destination_choice_model",
            ("employment_location_choice_model", "home_based"),
            ("employment_location_choice_model", "non_home_based"),
        ]:
            if type(model_name) == tuple:
                model_name, group_member = model_name
            else:
                group_member = None

            estimator = EstimationRunner(
                model=model_name, model_group=group_member, xml_configuration=self.xml_config, configuration=None
            )
            estimator.estimate()

    def test_simulation(self):
        services_db = ServicesDatabaseConfiguration(
            database_name="services", database_configuration="services_database_server"
        )
        run_manager = RunManager(services_db)
        run_as_multiprocess = True
        for scenario_name in ["psrc_baseline_test"]:
            config = self.xml_config.get_run_configuration(scenario_name)
            insert_auto_generated_cache_directory_if_needed(config)
            run_manager.setup_new_run(cache_directory=config["cache_directory"], configuration=config)
            run_manager.run_run(config, run_as_multiprocess=run_as_multiprocess)
Beispiel #42
0
    (options, args) = parser.parse_args()

    if options.configuration_path is not None:
        opus_path = options.configuration_path
        try:
            config = get_config_from_opus_path(opus_path)
        except ImportError:
            import_stmt = 'from %s import run_configuration as config' % opus_path
            exec(import_stmt)
    elif options.xml_configuration is not None:
        if options.scenario_name is None:
            parser.print_help()
            sys.exit(1)
        config = XMLConfiguration(
            options.xml_configuration).get_run_configuration(
                options.scenario_name)
    else:
        parser.print_help()
        sys.exit(1)

    if options.cache_directory is not None:
        config['cache_directory'] = options.cache_directory

    if options.database_name is not None or options.database_configuration is not None:
        if not config.has_key('scenario_database_configuration'):
            config[
                'scenario_database_configuration'] = ScenarioDatabaseConfiguration(
                    database_name=options.database_name,
                    database_configuration=options.database_configuration)
        else:
Beispiel #43
0
class OpusProject(object):
    '''
    Methods and attributes for handling an OPUS project file.
    Inheritance is done by letting a XmlConfiguration object handle loading and saving.
    Each project has two root nodes, one for the full tree and one for the tree of all inherited
    values. Nodes that exists in the full tree AND in the inherited tree, but with different values
    are called "shadowing nodes", since they shadow the inherited value with a local variation.
    '''
    def __init__(self):
        self.name = ''              # Project name
        self.filename = ''          # Filename of loaded project
        self.xml_config = None      # XMLController object
        self._root_node = None      # Full project tree root
        self._inherited_root = None # Root node of the tree with inherited nodes (original values)
        self._shadowing_nodes = {}  # Mapping of nodes in local tree to nodes in inherited tree
        self._dirty = False         # Dirty flag

    def __is_dirty(self):
        return self._dirty

    def __set_dirty(self, dirty):
        self._dirty = dirty
        update_mainwindow_savestate()

    dirty = property(__is_dirty, __set_dirty)

    def _read_xml_from_config(self, xml_config):
        ''' Extract XML information from the XML Configuration object '''
        self.xml_config = xml_config
        self._root_node = xml_config.full_tree.getroot()

        if xml_config.inherited_tree is None:
            self._inherited_root = XMLConfiguration().full_tree.getroot()
        else:
            self._inherited_root = xml_config.inherited_tree
        
        self._init_shadowing_nodes()
        self._set_project_name()
        
    def _init_shadowing_nodes(self):
        self._shadowing_nodes = {}
        self._add_shadowing_nodes(self._root_node, '')
        
    def _add_shadowing_nodes(self, root_node, root_node_id):
        # map id's to nodes for the inherited and the local nodes
        inherited_ids_to_nodes = dict((node_identity_string(n), n) for n in self._inherited_root.getiterator())
        local_ids_to_nodes = dict((root_node_id + node_identity_string(n), n) for 
            n in root_node.getiterator() if not n.get('inherited'))
        # join the local and inherited nodes on id-match
        for id_, node in local_ids_to_nodes.items():
            if id_ in inherited_ids_to_nodes:
                shadowing_node = inherited_ids_to_nodes[id_]
                assert node.tag == shadowing_node.tag
                self._shadowing_nodes[node] = shadowing_node

    def _set_project_name(self):
        if self.find('./general/project_name') is not None:
            self.name = self.find('./general/project_name').text
        else:
            self.name = 'unnamed_project'
        os.environ['OPUSPROJECTNAME'] = self.name
        self.dirty = False
        
    def load_minimal_project(self):
        ''' Setup the project as if it was loaded with an absolute minimal project config file '''
        minimal_config = XMLConfiguration()
        self._read_xml_from_config(minimal_config)

    def open(self, filename):
        '''
        Load a project file from XML.
        @return: flag and message (tuple(boolean, String))
        The flag is only True if the project loaded without problems.
        '''
        # Always close the project before loading another one to avoid mixing
        # data if the load is only partly successful
        self.close()
        filename = str(filename)
        if not os.path.exists(filename):
            return (False, "Tried to load project from file '%s', but that file does not exist"
                    %filename)
        default_path = os.path.dirname(filename)
        filename = os.path.basename(filename)
        try:
            xml_config = XMLConfiguration(filename, default_path)
            self._read_xml_from_config(xml_config)
            self.filename = os.path.normpath(os.path.join(default_path, filename))
            return (True, 'Project %s loaded OK' % filename)
        # Catch only the errors that XMLConfiguration is known to throw
        except (IOError, SyntaxError, ValueError, SyntaxError, XMLVersionException), ex:
            self.close()
            return (False, str(ex))
 def __init__(self, xml = '<opus_project />'):
     OpusProject.__init__(self)
     xml_config = XMLConfiguration()
     xml_config.update(xml)
     self._read_xml_from_config(xml_config)
Beispiel #45
0
                      help="model for which you want to chart dependencies")
    parser.add_option("--group", dest="model_group", default = None,
                               action="store", help="name of the model group")
    parser.add_option("-o", "--output", dest="output", default=None,
                      help="output image")
    parser.add_option("-p",  dest="stdout",default=False, action="store_true",
                      help='print results into stdout')
    parser.add_option("-l", "--latex", dest="latex", default=None,
                      help="latex output file")

    (options, args) = parser.parse_args()

    if options.xml_configuration == None:
        raise "Requires an xml configuration argument."

    chart = DependencyChart(XMLConfiguration(options.xml_configuration), model=options.model, model_group=options.model_group)
    #print chart.model_table(options.model)
    #temp = chart.query.vars_tree(chart.query.var_list)
    #print pretty_tree(chart.query.all_models_tree())

    #auto = AutogenVariableFactory("(urbansim_parcel.parcel.building_sqft/(parcel.parcel_sqft).astype(float32)).astype(float32)")
    #auto._analyze_tree(auto._expr_parsetree)
    #auto._analyze_dataset_names()
    #print(auto._generate_compute_method())

    if options.stdout:
        if options.model != None:
            chart.print_model_dependencies()
        else:
            chart.print_dependencies(options.variable)
    elif options.latex != None:
Beispiel #46
0
    config = {}
    if options.configuration_path is not None:
        opus_path = options.configuration_path
        try:
            config = get_config_from_opus_path(opus_path)
        except ImportError:
            # TODO: Once all fully-specified configurations are stored as classes,
            #       get rid of this use.
            import_stmt = 'from %s import run_configuration as config' % opus_path
            exec(import_stmt)
    elif options.xml_configuration is not None:
        if options.scenario_name is None:
            parser.print_help()
            sys.exit(1)
        config = XMLConfiguration(
            options.xml_configuration).get_run_configuration(
                options.scenario_name)

    from_database_configuration = config.get(
        'scenario_database_configuration',
        ScenarioDatabaseConfiguration(
            database_name=options.scenario_database_name,
            database_configuration=options.scenario_database_configuration,
        ))
    to_database_name = options.flattened_database_name or (
        from_database_configuration.database_name + '_flattened')
    to_database_configuration = ScenarioDatabaseConfiguration(
        protocol=from_database_configuration.protocol,
        host_name=from_database_configuration.host_name,
        user_name=from_database_configuration.user_name,
        password=from_database_configuration.password,
Beispiel #47
0
    option_group = ModelExplorerOptionGroup()
    parser = option_group.parser
    (options, args) = parser.parse_args()

    if options.year is None:
        raise StandardError, "Year (argument -y) must be given."
    if options.cache_directory is None:
        raise StandardError, "Cache directory (argument -d) must be given."
    if (options.configuration_path is None) and (options.xml_configuration is
                                                 None):
        raise StandardError, "Configuration path (argument -c) or XML configuration (argument -x) must be given."
    if (options.scenario_name is None) and (options.xml_configuration
                                            is not None):
        raise StandardError, "No scenario given (argument -s). Must be specified if option -x is used."
    if options.xml_configuration is not None:
        xconfig = XMLConfiguration(options.xml_configuration)
    else:
        xconfig = None
    if options.configuration_path is None:
        config = None
    else:
        config = get_config_from_opus_path(options.configuration_path)

    if options.cache_directory == 'BASE':
        cache_directory = None
    else:
        cache_directory = options.cache_directory
    explorer = ModelExplorer(model=options.model_name,
                             year=int(options.year),
                             scenario_name=options.scenario_name,
                             model_group=options.model_group,
Beispiel #48
0
    seed = 100
    nchunks = 1 ##TODO

if __name__ == '__main__':
    try: import wingdbstub
    except: pass
    from psrc_parcel.household.aliases import aliases
    aliases += ["customized_est_filter= (household.household_id > 6000000) * " + \
                "numpy.logical_or(household.tenure_id==1, household.tenure_id==3) * " + \
                #numpy.setmember1d(household.tenure, (1,3)) * \ ## buggy
                "(( (psrc_parcel.household.building_type_id==4) + (psrc_parcel.household.building_type_id==11) + (psrc_parcel.household.building_type_id==12) + (psrc_parcel.household.building_type_id==19)) >= 1) * " + \
                #numpy.setmember1d(psrc_parcel.household.building_type_id, (4, 11, 12, 19)) * \ ##ideally the above line can be replaced with this, but this is buggy
                "(household.aggregate((person.worker1==1)*(urbansim_parcel.person.is_non_home_based_worker_with_job)))",
               "customized_run_filter= household.aggregate(urbansim_parcel.person.is_non_home_based_worker)>=1"
               ]
    xmlconfig = XMLConfiguration(options.xml_configuration)
    ## training data (start_estimation)
    training_data = []
    for h, hierarchy in enumerate(options.meta_models):
        model_data = []
        for i, model_name in enumerate(hierarchy):
            logger.start_block('%s' % model_name)
            estimator = EstimationRunner(model=model_name, 
                                         specification_module=None, 
                                         xml_configuration=xmlconfig, 
                                         model_group=None,
                                         configuration=None,
                                         save_estimation_results=True)
            #estimator = EstimationRunner(model=model_name,
                                         #xml_configuration=xmlconfig,
                                         #save_estimation_results=False)
Beispiel #49
0
class IOCacheMATSimTestRun(opus_unittest.OpusTestCase):
    ''' Tests the UrbanSim export and import functionallity for the travel model.
    '''

    def setUp(self):
        print "entering setUp"
        
        logger.log_status('Testing UrbanSim export and import functionallity for MATSim...')

        # get root path to test cases
        self.path = test_path.__path__[0]
        logger.log_status('Set root path for MATSim config file to: %s' % self.path)
        if not os.path.exists(self.path):
            raise StandardError("Root path doesn't exist: %s" % self.path)
        
        # get path to MATSim source files and base_year_cache
        self.matsim_source, self.base_year_data_source = self.get_source_files() 
        self.destination = tempfile.mkdtemp(prefix='opus_tmp')
        #self.destination = '/Users/thomas/Desktop/x'    # for debugging
        #if not os.path.exists(self.destination):        # for debugging
        #    os.mkdir(self.destination)                  # for debugging
                    
        # load UrbanSim config to run MATSim test
        urbansim_config_location = os.path.join( self.path, 'configs', 'urbansim_config') #'/Users/thomas/Development/workspace/urbansim_trunk/opus_matsim/tests/test_config.xml'
        logger.log_status('Loading UrbanSim config: %s' % urbansim_config_location)
        #self.run_config = XMLConfiguration( urbansim_config_location ).get_run_configuration("Test")
        urbansim_config_name = "urbansim_config_for_matsim_run_test.xml"
        self.run_config = XMLConfiguration( os.path.join(urbansim_config_location, urbansim_config_name)).get_run_configuration("Test")
        
        # set destination for MATSim config file
        self.matsim_config_full = os.path.join( self.destination, "test_matsim_config.xml" )
        
        print "leaving setUp"


    def tearDown(self):
        print "entering tearDown"
        logger.log_status('Removing extracted MATSim files...')
        if os.path.exists(self.destination):
            rmtree(self.destination)
        logger.log_status('... cleaning up finished.')
        print "leaving tearDown"


    def testName(self):
        print "entering test_run"
        
        logger.log_status('Preparing MATsim test run ...')
        # unzip MATSim files
        matsim_zip = ExtractZipFile(self.matsim_source, self.destination)
        matsim_zip.extract()
        matsim_extracted_files = os.path.join(self.destination, 'MATSimTestClasses') # location of unziped MATSim files
        # unzip base_year_cache
        base_year_data_zip = ExtractZipFile(self.base_year_data_source, self.destination)
        base_year_data_zip.extract()
        base_year_data_extracted_files = os.path.join(self.destination, 'base_year_data') # location of unziped base_year_cache
        
                
        # updating location of base_year_data
        self.run_config['creating_baseyear_cache_configuration'].cache_directory_root = self.destination
        self.run_config['creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = base_year_data_extracted_files
        self.run_config['cache_directory'] = base_year_data_extracted_files
        self.run_config.add('matsim_files', matsim_extracted_files)
        self.run_config.add('matsim_config', self.matsim_config_full)
        self.run_config.add('root', self.destination)
        
        insert_auto_generated_cache_directory_if_needed(self.run_config)
        run_manager = RunManager(ServicesDatabaseConfiguration())
    
        run_manager.setup_new_run(cache_directory = self.run_config['cache_directory'],
                                  configuration = self.run_config)

        logger.log_status('Strating UrbanSim run ... ')
        run_manager.run_run(self.run_config, run_as_multiprocess = True )
        # after the UrbanSim run the travel data sets schould be equal
        # self.assertTrue( self.compare_travel_data_sets() )
        logger.log_status('... UrbanSim run finished.')
        
        print "leaving test_run"
    
    def get_source_files(self):
        ''' Returns the path to the MATSim and base_year_data
            source files
        '''
        
        matsim_source_files = os.path.join( self.path, 'data', 'MATSimTestClasses.zip')
        if not os.path.exists(matsim_source_files):
            raise StandardError("MATSim source file not found: %s" % matsim_source_files)
        logger.log_status('Referering to MATSim source file: %s' % matsim_source_files)
        
        base_year_data_source_files = os.path.join( self.path, 'data', 'base_year_data.zip')
        if not os.path.exists(base_year_data_source_files):
            raise StandardError("Base year data zip file not found: %s" % base_year_data_source_files)
        logger.log_status('Referering to base year cache file: %s' % base_year_data_source_files)
       
        return matsim_source_files, base_year_data_source_files

    def compare_travel_data_sets(self):
        
        # get copied travel data csv
        copied_travel_data_location = os.path.join( self.destination, 'opus_matsim', 'tmp')
        if not os.path.exists(copied_travel_data_location):
            raise StandardError('Travel data not found: %s' % copied_travel_data_location)
        logger.log_status('Get copied travel data: %s' % copied_travel_data_location)
        # convert travel data csv into travel data set matrix
        in_storage = csv_storage(storage_location = copied_travel_data_location)
        table_name = "travel_data"
        travel_data_attribute = 'single_vehicle_to_work_travel_cost'
        travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name )
        travel_data_attribute_mat = travel_data_set.get_attribute_as_matrix(travel_data_attribute, fill=999)
        # get exsisting travel data set and convert it also into travel data set matrix
        year = self.run_config['base_year']+2
        attribute_cache = AttributeCache(cache_directory=self.run_config['cache_directory'])
        cache_storage = attribute_cache.get_flt_storage_for_year(year)
        existing_travel_data_set = TravelDataDataset( in_storage=cache_storage, in_table_name=table_name )
        existing_travel_data_attribute_mat = existing_travel_data_set.get_attribute_as_matrix(travel_data_attribute, fill=999)
        
        from numpy import savetxt # for debugging
        savetxt( os.path.join(self.destination, 'origin_travel_data.txt'), travel_data_attribute_mat , fmt="%f")
        savetxt( os.path.join(self.destination, 'existing_travel_data') , existing_travel_data_attribute_mat, fmt="%f")
        
        # compare both data set matices
        compare = travel_data_attribute_mat == existing_travel_data_attribute_mat
        # return result
        return compare.all()     
Beispiel #50
0
 def load_minimal_project(self):
     ''' Setup the project as if it was loaded with an absolute minimal project config file '''
     minimal_config = XMLConfiguration()
     self._read_xml_from_config(minimal_config)
class IOCacheMATSimTestRun(opus_unittest.OpusTestCase):
    """ Tests the UrbanSim export and import functionallity for the travel model.
    """

    def setUp(self):
        print "entering setUp"

        logger.log_status("Testing UrbanSim export and import functionallity for MATSim...")

        # get root path to test cases
        self.path = test_path.__path__[0]
        logger.log_status("Set root path for MATSim config file to: %s" % self.path)
        if not os.path.exists(self.path):
            raise StandardError("Root path doesn't exist: %s" % self.path)

        # get path to MATSim source files and base_year_cache
        self.matsim_source, self.base_year_data_source = self.get_source_files()
        self.destination = tempfile.mkdtemp(prefix="opus_tmp")
        # self.destination = '/Users/thomas/Desktop/x'    # for debugging
        # if not os.path.exists(self.destination):        # for debugging
        #    os.mkdir(self.destination)                  # for debugging

        # load UrbanSim config to run MATSim test
        urbansim_config_location = os.path.join(
            self.path, "configs", "urbansim_config"
        )  #'/Users/thomas/Development/workspace/urbansim_trunk/opus_matsim/tests/test_config.xml'
        logger.log_status("Loading UrbanSim config: %s" % urbansim_config_location)
        # self.run_config = XMLConfiguration( urbansim_config_location ).get_run_configuration("Test")
        urbansim_config_name = "urbansim_config_for_matsim_run_test.xml"
        self.run_config = XMLConfiguration(
            os.path.join(urbansim_config_location, urbansim_config_name)
        ).get_run_configuration("Test")

        # set destination for MATSim config file
        self.matsim_config_full = os.path.join(self.destination, "test_matsim_config.xml")

        print "leaving setUp"

    def tearDown(self):
        print "entering tearDown"
        logger.log_status("Removing extracted MATSim files...")
        if os.path.exists(self.destination):
            rmtree(self.destination)
        logger.log_status("... cleaning up finished.")
        print "leaving tearDown"

    def testName(self):
        print "entering test_run"

        logger.log_status("Preparing MATsim test run ...")
        # unzip MATSim files
        matsim_zip = ExtractZipFile(self.matsim_source, self.destination)
        matsim_zip.extract()
        matsim_extracted_files = os.path.join(self.destination, "MATSimTestClasses")  # location of unziped MATSim files
        # unzip base_year_cache
        base_year_data_zip = ExtractZipFile(self.base_year_data_source, self.destination)
        base_year_data_zip.extract()
        base_year_data_extracted_files = os.path.join(
            self.destination, "base_year_data"
        )  # location of unziped base_year_cache

        # updating location of base_year_data
        self.run_config["creating_baseyear_cache_configuration"].cache_directory_root = self.destination
        self.run_config[
            "creating_baseyear_cache_configuration"
        ].baseyear_cache.existing_cache_to_copy = base_year_data_extracted_files
        self.run_config["cache_directory"] = base_year_data_extracted_files
        self.run_config.add("matsim_files", matsim_extracted_files)
        self.run_config.add("matsim_config", self.matsim_config_full)
        self.run_config.add("root", self.destination)

        insert_auto_generated_cache_directory_if_needed(self.run_config)
        run_manager = RunManager(ServicesDatabaseConfiguration())

        run_manager.setup_new_run(cache_directory=self.run_config["cache_directory"], configuration=self.run_config)

        logger.log_status("Strating UrbanSim run ... ")
        run_manager.run_run(self.run_config, run_as_multiprocess=True)
        # after the UrbanSim run the travel data sets schould be equal
        # self.assertTrue( self.compare_travel_data_sets() )
        logger.log_status("... UrbanSim run finished.")

        print "leaving test_run"

    def get_source_files(self):
        """ Returns the path to the MATSim and base_year_data
            source files
        """

        matsim_source_files = os.path.join(self.path, "data", "MATSimTestClasses.zip")
        if not os.path.exists(matsim_source_files):
            raise StandardError("MATSim source file not found: %s" % matsim_source_files)
        logger.log_status("Referering to MATSim source file: %s" % matsim_source_files)

        base_year_data_source_files = os.path.join(self.path, "data", "base_year_data.zip")
        if not os.path.exists(base_year_data_source_files):
            raise StandardError("Base year data zip file not found: %s" % base_year_data_source_files)
        logger.log_status("Referering to base year cache file: %s" % base_year_data_source_files)

        return matsim_source_files, base_year_data_source_files

    def compare_travel_data_sets(self):

        # get copied travel data csv
        copied_travel_data_location = os.path.join(self.destination, "opus_matsim", "tmp")
        if not os.path.exists(copied_travel_data_location):
            raise StandardError("Travel data not found: %s" % copied_travel_data_location)
        logger.log_status("Get copied travel data: %s" % copied_travel_data_location)
        # convert travel data csv into travel data set matrix
        in_storage = csv_storage(storage_location=copied_travel_data_location)
        table_name = "travel_data"
        travel_data_attribute = "single_vehicle_to_work_travel_cost"
        travel_data_set = TravelDataDataset(in_storage=in_storage, in_table_name=table_name)
        travel_data_attribute_mat = travel_data_set.get_attribute_as_matrix(travel_data_attribute, fill=999)
        # get exsisting travel data set and convert it also into travel data set matrix
        year = self.run_config["base_year"] + 2
        attribute_cache = AttributeCache(cache_directory=self.run_config["cache_directory"])
        cache_storage = attribute_cache.get_flt_storage_for_year(year)
        existing_travel_data_set = TravelDataDataset(in_storage=cache_storage, in_table_name=table_name)
        existing_travel_data_attribute_mat = existing_travel_data_set.get_attribute_as_matrix(
            travel_data_attribute, fill=999
        )

        from numpy import savetxt  # for debugging

        savetxt(os.path.join(self.destination, "origin_travel_data.txt"), travel_data_attribute_mat, fmt="%f")
        savetxt(os.path.join(self.destination, "existing_travel_data"), existing_travel_data_attribute_mat, fmt="%f")

        # compare both data set matices
        compare = travel_data_attribute_mat == existing_travel_data_attribute_mat
        # return result
        return compare.all()