Beispiel #1
0
def make_zone_dbfs(cache_directory):
    xmlconfig = XMLConfiguration(filename="sanfrancisco.xml", 
                                 default_directory=r'C:\opus\project_configs',
                                 is_parent=False)
    runconfig = xmlconfig.get_run_configuration("sanfrancisco_baseline2009", merge_controllers=True)
    tm_config = runconfig['travel_model_configuration']
    print tm_config['urbansim_to_tm_variable_mapping']

    travel_model_years = []
    for key in tm_config.iterkeys():
        if isinstance(key,int) and tm_config[key].has_key('year_dir'):
            travel_model_years.append(key)
    travel_model_years.sort()
    
    zonedbfs_source_data = SourceData(
        cache_directory = cache_directory,
        run_description = "Run description is used for what?",
        years = travel_model_years,
        dataset_pool_configuration = DatasetPoolConfiguration(
            package_order=['sanfrancisco','urbansim','opus_core'],
        ),
    )

    attrs = []
    for key,val in tm_config['urbansim_to_tm_variable_mapping'].iteritems():
        key = key.replace(".", "_")
        attrs.append("%s=%s" % (key,val))
        
    attrs.extend([\
      "pwac_bus=sanfrancisco.zone.bus_travel_time_weighted_access_by_population",
      "pwac_exp=sanfrancisco.zone.exp_travel_time_weighted_access_by_population",
      "pwac_lrt=sanfrancisco.zone.lrt_travel_time_weighted_access_by_population",
      "pwac_bart=sanfrancisco.zone.bart_travel_time_weighted_access_by_population",
      "pwac_hwy=sanfrancisco.zone.hwy_travel_time_weighted_access_by_population",
      "ewac_bus=sanfrancisco.zone.bus_travel_time_weighted_access_to_employment",
      "ewac_exp=sanfrancisco.zone.exp_travel_time_weighted_access_to_employment",
      "ewac_lrt=sanfrancisco.zone.lrt_travel_time_weighted_access_to_employment",
      "ewac_bart=sanfrancisco.zone.bart_travel_time_weighted_access_to_employment",
      "ewac_hwy=sanfrancisco.zone.hwy_travel_time_weighted_access_to_employment",
      "ttpw_bus=sanfrancisco.zone.bus_travel_time_to_751",
      "ttpw_exp=sanfrancisco.zone.exp_travel_time_to_751",
      "ttpw_lrt=sanfrancisco.zone.lrt_travel_time_to_751",
      "ttpw_bart=sanfrancisco.zone.bart_travel_time_to_751",
      "ttpw_hwy=sanfrancisco.zone.hwy_travel_time_to_751",      
      "d2powell=sanfrancisco.zone.dist_travel_time_to_751"
    ])

    zonedbf_indicators = [ DatasetTable(
        source_data = zonedbfs_source_data,
        dataset_name = 'zone',
        name = 'zone Indicators',
        output_type='dbf',
        attributes = attrs
        ) ]
                       
    IndicatorFactory().create_indicators(indicators = zonedbf_indicators,
                                         display_error_box = False,
                                         show_results = False)
def make_zone_dbfs(cache_directory):
    xmlconfig = XMLConfiguration(filename="sanfrancisco.xml",
                                 default_directory=r'C:\opus\project_configs',
                                 is_parent=False)
    runconfig = xmlconfig.get_run_configuration("sanfrancisco_baseline2009",
                                                merge_controllers=True)
    tm_config = runconfig['travel_model_configuration']
    print tm_config['urbansim_to_tm_variable_mapping']

    travel_model_years = []
    for key in tm_config.iterkeys():
        if isinstance(key, int) and tm_config[key].has_key('year_dir'):
            travel_model_years.append(key)
    travel_model_years.sort()

    zonedbfs_source_data = SourceData(
        cache_directory=cache_directory,
        run_description="Run description is used for what?",
        years=travel_model_years,
        dataset_pool_configuration=DatasetPoolConfiguration(
            package_order=['sanfrancisco', 'urbansim', 'opus_core'], ),
    )

    attrs = []
    for key, val in tm_config['urbansim_to_tm_variable_mapping'].iteritems():
        key = key.replace(".", "_")
        attrs.append("%s=%s" % (key, val))

    attrs.extend([\
      "pwac_bus=sanfrancisco.zone.bus_travel_time_weighted_access_by_population",
      "pwac_exp=sanfrancisco.zone.exp_travel_time_weighted_access_by_population",
      "pwac_lrt=sanfrancisco.zone.lrt_travel_time_weighted_access_by_population",
      "pwac_bart=sanfrancisco.zone.bart_travel_time_weighted_access_by_population",
      "pwac_hwy=sanfrancisco.zone.hwy_travel_time_weighted_access_by_population",
      "ewac_bus=sanfrancisco.zone.bus_travel_time_weighted_access_to_employment",
      "ewac_exp=sanfrancisco.zone.exp_travel_time_weighted_access_to_employment",
      "ewac_lrt=sanfrancisco.zone.lrt_travel_time_weighted_access_to_employment",
      "ewac_bart=sanfrancisco.zone.bart_travel_time_weighted_access_to_employment",
      "ewac_hwy=sanfrancisco.zone.hwy_travel_time_weighted_access_to_employment",
      "ttpw_bus=sanfrancisco.zone.bus_travel_time_to_751",
      "ttpw_exp=sanfrancisco.zone.exp_travel_time_to_751",
      "ttpw_lrt=sanfrancisco.zone.lrt_travel_time_to_751",
      "ttpw_bart=sanfrancisco.zone.bart_travel_time_to_751",
      "ttpw_hwy=sanfrancisco.zone.hwy_travel_time_to_751",
      "d2powell=sanfrancisco.zone.dist_travel_time_to_751"
    ])

    zonedbf_indicators = [
        DatasetTable(source_data=zonedbfs_source_data,
                     dataset_name='zone',
                     name='zone Indicators',
                     output_type='dbf',
                     attributes=attrs)
    ]

    IndicatorFactory().create_indicators(indicators=zonedbf_indicators,
                                         display_error_box=False,
                                         show_results=False)
 def test_simulation(self):
     eugene_dir = __import__('eugene').__path__[0]
     xml_config = XMLConfiguration(os.path.join(eugene_dir, 'configs', 'eugene_gridcell.xml'))
     option_group = StartRunOptionGroup()
     parser = option_group.parser
     # simulate 0 command line arguments by passing in []
     (options, _) = parser.parse_args([])
     run_manager = RunManager(option_group.get_services_database_configuration(options))
     run_section = xml_config.get_run_configuration('Eugene_baseline')
     insert_auto_generated_cache_directory_if_needed(run_section)
     run_manager.setup_new_run(cache_directory = run_section['cache_directory'],
                               configuration = run_section)
     run_manager.run_run(run_section)
 def test_simulation(self):
     # check that the simulation proceeds without crashing
     # open the configuration for seattle_parcel.xml
     seattle_parcel_dir = __import__('seattle_parcel').__path__[0]
     xml_config = XMLConfiguration(os.path.join(seattle_parcel_dir, 'configs', 'seattle_parcel.xml'))
     option_group = StartRunOptionGroup()
     parser = option_group.parser
     # simulate 0 command line arguments by passing in []
     (options, _) = parser.parse_args([])
     run_manager = RunManager(option_group.get_services_database_configuration(options))
     run_section = xml_config.get_run_configuration('Seattle_baseline')
     insert_auto_generated_cache_directory_if_needed(run_section)
     run_manager.setup_new_run(cache_directory = run_section['cache_directory'],
                               configuration = run_section)
     run_manager.run_run(run_section)
Beispiel #5
0
 def test_simulation(self):
     eugene_dir = __import__('eugene').__path__[0]
     xml_config = XMLConfiguration(
         os.path.join(eugene_dir, 'configs', 'eugene_gridcell.xml'))
     option_group = StartRunOptionGroup()
     parser = option_group.parser
     # simulate 0 command line arguments by passing in []
     (options, _) = parser.parse_args([])
     run_manager = RunManager(
         option_group.get_services_database_configuration(options))
     run_section = xml_config.get_run_configuration('Eugene_baseline')
     insert_auto_generated_cache_directory_if_needed(run_section)
     run_manager.setup_new_run(
         cache_directory=run_section['cache_directory'],
         configuration=run_section)
     run_manager.run_run(run_section)
    def test_simulation(self):
        base_year_data_path = os.path.join(self.data_path, 'base_year_data')        
        if not os.path.exists(base_year_data_path):
            os.makedirs(base_year_data_path)

        ftp_url = os.environ["FTP_URL"]
        file_name = os.path.split(ftp_url)[1]
        ftp_user = os.environ["FTP_USERNAME"]
        ftp_password = os.environ["FTP_PASSWORD"]
        
        #stdout, stderr = Popen("ls -la %s" % base_year_data_path, shell=True).communicate()
        #stdout, stderr = Popen("echo '%s'" % (base_year_data_path), stdout=PIPE).communicate()
        #print stdout
        
        try:
            Popen( """
                        cd %s;
                        pwd;
                        ls -la;
                        echo wget --timestamping %s --ftp-user=%s --ftp-password=%s > /dev/null 2>&1;
                        rm -rf 2008;
                        unzip -o %s
                        """ % (base_year_data_path, ftp_url, ftp_user, ftp_password, file_name),
                        shell = True
                        ).communicate()
        except:
            print "Error when downloading and unzipping file from %s." % ftp_url
            raise

        services_db = ServicesDatabaseConfiguration( database_name = 'services',                         
                                                     database_configuration = 'services_database_server' )
        run_manager = RunManager(services_db)
        run_as_multiprocess = True
        xml_config = XMLConfiguration(os.path.join(self.opus_home, 'project_configs', 'washtenaw_parcel.xml'))
        for scenario_name in ['washtenaw_baseline_test']:
            config = xml_config.get_run_configuration(scenario_name)
            insert_auto_generated_cache_directory_if_needed(config)
#            base_year = config['base_year']
#            config['years_to_run'] = (base_year+1, base_year+2)
            run_manager.setup_new_run(cache_directory = config['cache_directory'],
                                      configuration = config)
            run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)
Beispiel #7
0
                ms_variablename = VariableName(ms_expression)
        
                dataset_name = ms_variablename.get_dataset_name()
                ds = model_system.run_year_namespace[dataset_name] or model_system.run_year_namespace['datasets'][dataset_name]
                id_name = ds.get_id_name()[0]
                ds.compute_variables([ms_variablename], dataset_pool=dataset_pool)
                ms = ds.get_multiple_attributes([id_name, ms_variablename.get_alias()])
                
                market_ids = m.choice_set.compute_one_variable_with_unknown_package( id_name, dataset_pool=dataset_pool)
                market_ids_2d = market_ids[m.model_interaction.get_choice_index()]
                model_data[i].update({'market_id':market_ids_2d, 'market_share':ms})

            logger.end_block()
        training_data.append(model_data)
        
    config = xmlconfig.get_run_configuration(options.scenario_name)
    if not options.agents_index:
        agent_set = dataset_pool.get_dataset(options.agent_set)
        agents_size = agent_set.size()
        if options.agents_filter:
            is_valid = agent_set.compute_variables(options.agents_filter)
            options.agents_index = probsample_noreplace(arange(agents_size),
                                                        options.sample_size,
                                                        prob_array=is_valid
                                                       ).tolist()
        else:
            options.agents_index = randint(0, agents_size, size=options.sample_size).tolist()

    ## regularization data
    population_data = []
    for h, hierarchy in enumerate(options.meta_models):
Beispiel #8
0
    parser.add_option("-x", "--xml-configuration", dest="xml_configuration", default = None,
                               action="store", help="Full path to an XML configuration file (must also provide a scenario name using -s). Either -x or -r must be given.")
    parser.add_option("-s", "--scenario_name", dest="scenario_name", default=None, 
                                help="Name of the scenario. Must be given if option -x is used.")
    parser.add_option("-d", "--directory", dest="cache_directory", default = None,
                               action="store", help="Cache directory with urbansim output.")
    (options, args) = parser.parse_args()
    if options.year is None:
        raise StandardError, "Year (argument -y) must be given."
    if (options.scenario_name is None) and (options.xml_configuration is not None):
        raise StandardError, "No scenario given (argument -s). Must be specified if option -x is used."
    r = None
    xconfig = None
    if options.resources_file_name is not None:
        r = get_resources_from_file(options.resources_file_name)
        resources = Resources(get_resources_from_file(options.resources_file_name))
    elif options.xml_configuration is not None:
        xconfig = XMLConfiguration(options.xml_configuration)
        resources = xconfig.get_run_configuration(options.scenario_name)
    else:
        raise StandardError, "Either option -r or -x must be used."
        
    files = GetCacheDataIntoDaysim(resources).run(options.year, cache_directory=options.cache_directory)
    if options.output_directory is not None:
        for file in files:
            copy(file, options.output_directory)
        logger.log_status('Files copied into %s' % options.output_directory)
        
# For a test run, use options
# -x opus_daysim/configs/sample_daysim_configuration.xml -s daysim_scenario -d opus_core/data/test_cache -y 1980
Beispiel #9
0
class TestSimulation(opus_unittest.OpusIntegrationTestCase):
    """ this integration test runs san_antonio_baseline_test in project_configs/san_antonio_zone.xml
    """
    def setUp(self):
        """
        set up opus data path, a base_year_data cache directory needs to exists 
        or be created through downloading and unzipping etc
        
        """
        self.opus_home = os.environ["OPUS_HOME"]
        if os.environ.has_key('OPUS_DATA_PATH'):
            self.data_path = os.path.join(os.environ['OPUS_DATA_PATH'],
                                          'sanfrancisco')
        else:
            self.data_path = os.path.join(self.opus_home, 'data',
                                          'sanfrancisco')

        self.xml_config = XMLConfiguration(
            os.path.join(self.opus_home, 'project_configs',
                         'sanfrancisco.xml'))

        base_year_data_path = os.path.join(self.data_path, 'base_year_data')
        if not os.path.exists(base_year_data_path):
            os.makedirs(base_year_data_path)


#        ftp_url = os.environ["FTP_URL"]
#        file_name = os.path.split(ftp_url)[1]
#        ftp_user = os.environ["FTP_USERNAME"]
#        ftp_password = os.environ["FTP_PASSWORD"]
#
#        try:
#            Popen( """
#                        cd %s;
#                        pwd;
#                        ls -la;
#                        echo wget --timestamping %s --ftp-user=%s --ftp-password=%s > /dev/null 2>&1;
#                        rm -rf 2008;
#                        unzip -o %s
#                        """ % (base_year_data_path, ftp_url, ftp_user, ftp_password, file_name),
#                        shell = True
#                        ).communicate()
#        except:
#            print "Error when downloading and unzipping file from %s." % ftp_url
#            raise

    def tearDown(self):
        """
        delete [project_name]/runs directory to free up disk space
        """
        runs_path = os.path.join(self.data_path, 'runs')
        #if os.path.exists(runs_path):
        #    Popen( "rm -rf %s" % runs_path, shell=True)

    def test_estimation(self):
        for model_name in [
                'real_estate_price_model', 'household_location_choice_model',
                'business_location_choice_model',
                'building_location_choice_model'
        ]:
            if type(model_name) == tuple:
                model_name, group_member = model_name
            else:
                group_member = None

            estimator = EstimationRunner(model=model_name,
                                         model_group=group_member,
                                         xml_configuration=self.xml_config,
                                         configuration=None,
                                         save_estimation_results=True)
            estimator.estimate()

    def test_simulation(self):
        services_db = ServicesDatabaseConfiguration(
            database_name='services',
            database_configuration='services_database_server')
        run_manager = RunManager(services_db)
        run_as_multiprocess = True
        for scenario_name in ['sanfrancisco_baseline_test']:
            config = self.xml_config.get_run_configuration(scenario_name)
            insert_auto_generated_cache_directory_if_needed(config)
            run_manager.setup_new_run(
                cache_directory=config['cache_directory'],
                configuration=config)
            run_manager.run_run(config,
                                run_as_multiprocess=run_as_multiprocess)
class TestSimulation(opus_unittest.OpusIntegrationTestCase):
    """ this integration test runs san_antonio_baseline_test in project_configs/san_antonio_zone.xml
    """
    def setUp(self):
        """
        set up opus data path, a base_year_data cache directory needs to exists 
        or be created through downloading and unzipping etc
        
        """
        self.opus_home = os.environ["OPUS_HOME"]
        if os.environ.has_key('OPUS_DATA_PATH'):
            self.data_path = os.path.join(os.environ['OPUS_DATA_PATH'], 'san_antonio_zone')
        else:
            self.data_path = os.path.join(self.opus_home, 'data', 'san_antonio_zone')
        
        self.xml_config = XMLConfiguration(os.path.join(self.opus_home, 'project_configs', 'san_antonio_zone.xml'))
        
        base_year_data_path = os.path.join(self.data_path, 'base_year_data')        
        if not os.path.exists(base_year_data_path):
            os.makedirs(base_year_data_path)

#        ftp_url = os.environ["FTP_URL"]
#        file_name = os.path.split(ftp_url)[1]
#        ftp_user = os.environ["FTP_USERNAME"]
#        ftp_password = os.environ["FTP_PASSWORD"]
#
#        try:
#            Popen( """
#                        cd %s;
#                        pwd;
#                        ls -la;
#                        echo wget --timestamping %s --ftp-user=%s --ftp-password=%s > /dev/null 2>&1;
#                        rm -rf 2008;
#                        unzip -o %s
#                        """ % (base_year_data_path, ftp_url, ftp_user, ftp_password, file_name),
#                        shell = True
#                        ).communicate()
#        except:
#            print "Error when downloading and unzipping file from %s." % ftp_url
#            raise            
    
    def tearDown(self):
        """
        delete [project_name]/runs directory to free up disk space
        """
        runs_path = os.path.join(self.data_path, 'runs')
        #if os.path.exists(runs_path):
        #    Popen( "rm -rf %s" % runs_path, shell=True)

    def test_estimation(self):
        for model_name in ['real_estate_price_model', 
                           'household_location_choice_model', 
                           ('employment_location_choice_model', 'home_based'),
                           ('employment_location_choice_model', 'non_home_based'),
                           'residential_development_project_location_choice_model',
                           'non_residential_development_project_location_choice_model',                           
                           ]:
            if type(model_name)==tuple:
                model_name, group_member = model_name
            else:
                group_member = None        
                
            estimator = EstimationRunner(model=model_name,  
                                         model_group=group_member,
                                         xml_configuration=self.xml_config,
                                         configuration = None
                                         )
            estimator.estimate()
                            
    def test_simulation(self):
        services_db = ServicesDatabaseConfiguration( database_name = 'services',                         
                                                     database_configuration = 'services_database_server' )
        run_manager = RunManager(services_db)
        run_as_multiprocess = True
        for scenario_name in ['san_antonio_baseline_test']:
            config = self.xml_config.get_run_configuration(scenario_name)
            insert_auto_generated_cache_directory_if_needed(config)
            run_manager.setup_new_run(cache_directory = config['cache_directory'],
                                      configuration = config)
            run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)
Beispiel #11
0
class TestSimulation(opus_unittest.OpusIntegrationTestCase):
    """ this integration test runs san_antonio_baseline_test in project_configs/psrc_parcel_test.xml
    """

    def setUp(self):
        """
        set up opus data path, a base_year_data cache directory needs to exists 
        or be created through downloading and unzipping etc
        
        """
        self.opus_home = paths.OPUS_HOME
        self.data_path = paths.get_opus_data_path_path("psrc_parcel")

        self.xml_config = XMLConfiguration(os.path.join(self.opus_home, "project_configs", "psrc_parcel_test.xml"))

        base_year_data_path = os.path.join(self.data_path, "base_year_data")
        if not os.path.exists(base_year_data_path):
            os.makedirs(base_year_data_path)

    #        ftp_url = os.environ["FTP_URL"]
    #        file_name = os.path.split(ftp_url)[1]
    #        ftp_user = os.environ["FTP_USERNAME"]
    #        ftp_password = os.environ["FTP_PASSWORD"]
    #
    #        try:
    #            Popen( """
    #                        cd %s;
    #                        pwd;
    #                        ls -la;
    #                        echo wget --timestamping %s --ftp-user=%s --ftp-password=%s > /dev/null 2>&1;
    #                        rm -rf 2008;
    #                        unzip -o %s
    #                        """ % (base_year_data_path, ftp_url, ftp_user, ftp_password, file_name),
    #                        shell = True
    #                        ).communicate()
    #        except:
    #            print "Error when downloading and unzipping file from %s." % ftp_url
    #            raise

    def tearDown(self):
        """
        delete [project_name]/runs directory to free up disk space
        """
        runs_path = os.path.join(self.data_path, "runs")
        # if os.path.exists(runs_path):
        #    Popen( "rm -rf %s" % runs_path, shell=True)

    def test_estimation(self):
        for model_name in [
            "residential_building_type_choice_model",
            "tenure_choice_model",
            "work_at_home_choice_model",
            "workplace_choice_model_for_resident",
            "auto_ownership_choice_model",
            "hbw_mode_choice_model",
            "real_estate_price_model",
            "household_location_choice_model",
            "shopping_destination_choice_model",
            ("employment_location_choice_model", "home_based"),
            ("employment_location_choice_model", "non_home_based"),
        ]:
            if type(model_name) == tuple:
                model_name, group_member = model_name
            else:
                group_member = None

            estimator = EstimationRunner(
                model=model_name, model_group=group_member, xml_configuration=self.xml_config, configuration=None
            )
            estimator.estimate()

    def test_simulation(self):
        services_db = ServicesDatabaseConfiguration(
            database_name="services", database_configuration="services_database_server"
        )
        run_manager = RunManager(services_db)
        run_as_multiprocess = True
        for scenario_name in ["psrc_baseline_test"]:
            config = self.xml_config.get_run_configuration(scenario_name)
            insert_auto_generated_cache_directory_if_needed(config)
            run_manager.setup_new_run(cache_directory=config["cache_directory"], configuration=config)
            run_manager.run_run(config, run_as_multiprocess=run_as_multiprocess)