def test_run(self): # The paths work as follows: opus_matsim.__path__ is the path of the opus_matsim python module. So we can use that # as anchor ... config_location = os.path.join(opus_matsim.__path__[0], 'tests') print "location: ", config_location run_config = XMLConfiguration( os.path.join(config_location,"test_config.xml")).get_run_configuration("Test") run_config['creating_baseyear_cache_configuration'].cache_directory_root = self.temp_dir run_config['creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = \ os.path.join(opus_matsim.__path__[0], 'tests', 'testdata', 'base_year_data') # insert_auto_generated_cache_directory... does things I don't understand. Need to do the following to obtain consistent # behavior independent from the file root: run_config['cache_directory'] = None insert_auto_generated_cache_directory_if_needed(run_config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory = run_config['cache_directory'], configuration = run_config) run_manager.run_run(run_config, run_as_multiprocess = True ) self.assert_(True) self.cleanup_test_run()
def testName(self): print "entering test_run" logger.log_status('Preparing MATsim test run ...') # unzip MATSim files matsim_zip = ExtractZipFile(self.matsim_source, self.destination) matsim_zip.extract() matsim_extracted_files = os.path.join(self.destination, 'MATSimTestClasses') # location of unziped MATSim files # unzip base_year_cache base_year_data_zip = ExtractZipFile(self.base_year_data_source, self.destination) base_year_data_zip.extract() base_year_data_extracted_files = os.path.join(self.destination, 'base_year_data') # location of unziped base_year_cache # updating location of base_year_data self.run_config['creating_baseyear_cache_configuration'].cache_directory_root = self.destination self.run_config['creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = base_year_data_extracted_files self.run_config['cache_directory'] = base_year_data_extracted_files self.run_config.add('matsim_files', matsim_extracted_files) self.run_config.add('matsim_config', self.matsim_config_full) self.run_config.add('root', self.destination) insert_auto_generated_cache_directory_if_needed(self.run_config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory = self.run_config['cache_directory'], configuration = self.run_config) logger.log_status('Strating UrbanSim run ... ') run_manager.run_run(self.run_config, run_as_multiprocess = True ) # after the UrbanSim run the travel data sets schould be equal # self.assertTrue( self.compare_travel_data_sets() ) logger.log_status('... UrbanSim run finished.') print "leaving test_run"
def _do_run_simple_test_run(caller, temp_dir, config, end_year=None): """Runs model system with a single model (for speed). Sets the .resources property of the caller before starting the run. """ runs_manager = RunManager(config) run_configuration = _get_run_config(temp_dir=temp_dir) insert_auto_generated_cache_directory_if_needed(run_configuration) run_configuration[ 'creating_baseyear_cache_configuration'].cache_directory_root = temp_dir run_configuration['models'] = ['land_price_model'] if end_year is not None: run_configuration['years'] = (run_configuration['years'][0], end_year) SessionConfiguration( new_instance=True, package_order=run_configuration['dataset_pool_configuration']. package_order, in_storage=AttributeCache()) insert_auto_generated_cache_directory_if_needed(run_configuration) caller.resources = run_configuration runs_manager.setup_new_run( cache_directory=run_configuration['cache_directory'], configuration=run_configuration) runs_manager.run_run(run_configuration)
def test_run(self): # The paths work as follows: opus_matsim.__path__ is the path of the opus_matsim python module. So we can use that # as anchor ... config_location = os.path.join(opus_matsim.__path__[0], 'tests') print "location: ", config_location run_config = XMLConfiguration( os.path.join(config_location, "test_config.xml")).get_run_configuration("Test") run_config[ 'creating_baseyear_cache_configuration'].cache_directory_root = self.temp_dir run_config['creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = \ os.path.join(opus_matsim.__path__[0], 'tests', 'testdata', 'base_year_data') # insert_auto_generated_cache_directory... does things I don't understand. Need to do the following to obtain consistent # behavior independent from the file root: run_config['cache_directory'] = None insert_auto_generated_cache_directory_if_needed(run_config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run( cache_directory=run_config['cache_directory'], configuration=run_config) run_manager.run_run(run_config, run_as_multiprocess=True) self.assert_(True) self.cleanup_test_run()
def test_run(self): print "Entering test run" run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory = self.config['cache_directory'],configuration = self.config) run_manager.run_run(self.config, run_as_multiprocess = True ) print "Leaving test run"
def test_run(self): print "Entering test run" run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory = self.config['cache_directory'],configuration = self.config) run_manager.run_run(self.config, run_as_multiprocess = True ) print "Leaving test run"
def run(self, config, executable): #--config=opus_matsim/sustain_city/configs/seattle_parcel.xml --executable=Seattle_baseline config = XMLConfiguration(config).get_run_configuration(executable) insert_auto_generated_cache_directory_if_needed(config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory = config['cache_directory'],configuration = config) run_manager.run_run(config, run_as_multiprocess = True )
def test_simulation(self): services_db = ServicesDatabaseConfiguration( database_name = 'services', database_configuration = 'services_database_server' ) run_manager = RunManager(services_db) run_as_multiprocess = True for scenario_name in ['san_antonio_baseline_test']: config = self.xml_config.get_run_configuration(scenario_name) insert_auto_generated_cache_directory_if_needed(config) run_manager.setup_new_run(cache_directory = config['cache_directory'], configuration = config) run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)
def run(self): logger.start_block() insert_auto_generated_cache_directory_if_needed(self.config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory = self.config['cache_directory'],configuration = self.config) run_manager.run_run(self.config, run_as_multiprocess = True ) logger.end_block()
def run(self): option_group = StartRunOptionGroup() options = VIBeOptions(self.YearsToRun) run_manager = RunManager( option_group.get_services_database_configuration(options)) run_as_multiprocess = not options.run_as_single_process if options.pickled_resource_file is not None: f = file(options.pickled_resource_file, 'r') try: config = pickle.load(f) finally: f.close() elif options.configuration_path is not None: opus_path = options.configuration_path try: config = get_config_from_opus_path(opus_path) except ImportError: # TODO: Once all fully-specified configurations are stored as classes, # get rid of this use. import_stmt = 'from %s import run_configuration as config' % opus_path exec(import_stmt) insert_auto_generated_cache_directory_if_needed(config) elif options.xml_configuration is not None: if options.scenario_name is None: parser.print_help() sys.exit(1) config = XMLConfiguration( options.xml_configuration).get_run_configuration( options.scenario_name) insert_auto_generated_cache_directory_if_needed(config) else: parser.print_help() sys.exit(1) config['years'] = range(1981, 1981 + self.YearsToRun) if options.existing_cache_to_copy is not None: config[ 'creating_baseyear_cache_configuration'].cache_from_database = False config[ 'creating_baseyear_cache_configuration'].baseyear_cache = BaseyearCacheConfiguration( existing_cache_to_copy=options.existing_cache_to_copy, ) if options.years_to_cache is not None: config[ 'creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval( options.years_to_cache) if options.profile_filename is not None: config["profile_filename"] = options.profile_filename run_manager.setup_new_run(cache_directory=config['cache_directory'], configuration=config) run_manager.run_run(config, run_as_multiprocess=run_as_multiprocess)
def test_simulation(self): services_db = ServicesDatabaseConfiguration( database_name = 'services', database_configuration = 'services_database_server' ) run_manager = RunManager(services_db) run_as_multiprocess = True for scenario_name in ['psrc_baseline_test']: config = self.xml_config.get_run_configuration(scenario_name) insert_auto_generated_cache_directory_if_needed(config) run_manager.setup_new_run(cache_directory = config['cache_directory'], configuration = config) run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)
def test_simulation(self): eugene_dir = __import__('eugene').__path__[0] xml_config = XMLConfiguration(os.path.join(eugene_dir, 'configs', 'eugene_gridcell.xml')) option_group = StartRunOptionGroup() parser = option_group.parser # simulate 0 command line arguments by passing in [] (options, _) = parser.parse_args([]) run_manager = RunManager(option_group.get_services_database_configuration(options)) run_section = xml_config.get_run_configuration('Eugene_baseline') insert_auto_generated_cache_directory_if_needed(run_section) run_manager.setup_new_run(cache_directory = run_section['cache_directory'], configuration = run_section) run_manager.run_run(run_section)
def test_simulation(self): # check that the simulation proceeds without crashing # open the configuration for seattle_parcel.xml seattle_parcel_dir = __import__('seattle_parcel').__path__[0] xml_config = XMLConfiguration(os.path.join(seattle_parcel_dir, 'configs', 'seattle_parcel.xml')) option_group = StartRunOptionGroup() parser = option_group.parser # simulate 0 command line arguments by passing in [] (options, _) = parser.parse_args([]) run_manager = RunManager(option_group.get_services_database_configuration(options)) run_section = xml_config.get_run_configuration('Seattle_baseline') insert_auto_generated_cache_directory_if_needed(run_section) run_manager.setup_new_run(cache_directory = run_section['cache_directory'], configuration = run_section) run_manager.run_run(run_section)
def run(self): option_group = StartRunOptionGroup() options = VIBeOptions(self.YearsToRun) run_manager = RunManager(option_group.get_services_database_configuration(options)) run_as_multiprocess = not options.run_as_single_process if options.pickled_resource_file is not None: f = file(options.pickled_resource_file, 'r') try: config = pickle.load(f) finally: f.close() elif options.configuration_path is not None: opus_path = options.configuration_path try: config = get_config_from_opus_path(opus_path) except ImportError: # TODO: Once all fully-specified configurations are stored as classes, # get rid of this use. import_stmt = 'from %s import run_configuration as config' % opus_path exec(import_stmt) insert_auto_generated_cache_directory_if_needed(config) elif options.xml_configuration is not None: if options.scenario_name is None: parser.print_help() sys.exit(1) config = XMLConfiguration(options.xml_configuration).get_run_configuration(options.scenario_name) insert_auto_generated_cache_directory_if_needed(config) else: parser.print_help() sys.exit(1) config['years'] = range(2012, 2012+self.YearsToRun) if options.existing_cache_to_copy is not None: config['creating_baseyear_cache_configuration'].cache_from_database = False config['creating_baseyear_cache_configuration'].baseyear_cache = BaseyearCacheConfiguration( existing_cache_to_copy = options.existing_cache_to_copy, ) if options.years_to_cache is not None: config['creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval(options.years_to_cache) if options.profile_filename is not None: config["profile_filename"] = options.profile_filename run_manager.setup_new_run(cache_directory = config['cache_directory'], configuration = config) run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)
def test_simulation(self): eugene_dir = __import__('eugene').__path__[0] xml_config = XMLConfiguration( os.path.join(eugene_dir, 'configs', 'eugene_gridcell.xml')) option_group = StartRunOptionGroup() parser = option_group.parser # simulate 0 command line arguments by passing in [] (options, _) = parser.parse_args([]) run_manager = RunManager( option_group.get_services_database_configuration(options)) run_section = xml_config.get_run_configuration('Eugene_baseline') insert_auto_generated_cache_directory_if_needed(run_section) run_manager.setup_new_run( cache_directory=run_section['cache_directory'], configuration=run_section) run_manager.run_run(run_section)
def test_simulation(self): base_year_data_path = os.path.join(self.data_path, 'base_year_data') if not os.path.exists(base_year_data_path): os.makedirs(base_year_data_path) ftp_url = os.environ["FTP_URL"] file_name = os.path.split(ftp_url)[1] ftp_user = os.environ["FTP_USERNAME"] ftp_password = os.environ["FTP_PASSWORD"] #stdout, stderr = Popen("ls -la %s" % base_year_data_path, shell=True).communicate() #stdout, stderr = Popen("echo '%s'" % (base_year_data_path), stdout=PIPE).communicate() #print stdout try: Popen( """ cd %s; pwd; ls -la; echo wget --timestamping %s --ftp-user=%s --ftp-password=%s > /dev/null 2>&1; rm -rf 2008; unzip -o %s """ % (base_year_data_path, ftp_url, ftp_user, ftp_password, file_name), shell = True ).communicate() except: print "Error when downloading and unzipping file from %s." % ftp_url raise services_db = ServicesDatabaseConfiguration( database_name = 'services', database_configuration = 'services_database_server' ) run_manager = RunManager(services_db) run_as_multiprocess = True xml_config = XMLConfiguration(os.path.join(self.opus_home, 'project_configs', 'washtenaw_parcel.xml')) for scenario_name in ['washtenaw_baseline_test']: config = xml_config.get_run_configuration(scenario_name) insert_auto_generated_cache_directory_if_needed(config) # base_year = config['base_year'] # config['years_to_run'] = (base_year+1, base_year+2) run_manager.setup_new_run(cache_directory = config['cache_directory'], configuration = config) run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)
def _do_run_simple_test_run(caller, temp_dir, config, end_year=None): """Runs model system with a single model (for speed). Sets the .resources property of the caller before starting the run. """ runs_manager = RunManager(config) run_configuration = SubsetConfiguration() run_configuration['creating_baseyear_cache_configuration'].cache_directory_root = temp_dir run_configuration['models'] = ['land_price_model'] if end_year is not None: run_configuration['years'] = (run_configuration['years'][0], end_year) SessionConfiguration(new_instance=True, package_order=run_configuration['dataset_pool_configuration'].package_order, in_storage=AttributeCache()) insert_auto_generated_cache_directory_if_needed(run_configuration) caller.resources = run_configuration runs_manager.setup_new_run(cache_directory = run_configuration['cache_directory'], configuration = run_configuration) runs_manager.run_run(run_configuration)
def testName(self): print "entering test_run" logger.log_status("Preparing MATsim test run ...") # unzip MATSim files matsim_zip = ExtractZipFile(self.matsim_source, self.destination) matsim_zip.extract() matsim_extracted_files = os.path.join(self.destination, "MATSimTestClasses") # location of unziped MATSim files # unzip base_year_cache base_year_data_zip = ExtractZipFile(self.base_year_data_source, self.destination) base_year_data_zip.extract() base_year_data_extracted_files = os.path.join( self.destination, "base_year_data" ) # location of unziped base_year_cache # updating location of base_year_data self.run_config["creating_baseyear_cache_configuration"].cache_directory_root = self.destination self.run_config[ "creating_baseyear_cache_configuration" ].baseyear_cache.existing_cache_to_copy = base_year_data_extracted_files self.run_config["cache_directory"] = base_year_data_extracted_files self.run_config.add("matsim_files", matsim_extracted_files) self.run_config.add("matsim_config", self.matsim_config_full) self.run_config.add("root", self.destination) insert_auto_generated_cache_directory_if_needed(self.run_config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory=self.run_config["cache_directory"], configuration=self.run_config) logger.log_status("Strating UrbanSim run ... ") run_manager.run_run(self.run_config, run_as_multiprocess=True) # after the UrbanSim run the travel data sets schould be equal # self.assertTrue( self.compare_travel_data_sets() ) logger.log_status("... UrbanSim run finished.") print "leaving test_run"
# generate seeds for multiple runs root_seed = config.get("seed", None) seed(root_seed) # generate different seed for each run (each seed contains 1 number) seed_array = randint(1,2**30, number_of_runs) list_of_cache_directories = [] for irun in range(number_of_runs): config['seed']= (seed_array[irun],) this_config = config.copy() if ((irun + 1) % number_of_runs_in_parallel) == 0: run_in_background = False else: run_in_background = True run_manager.setup_new_run(cache_directory = this_config['cache_directory'], configuration = this_config) run_manager.run_run(this_config, run_as_multiprocess=False, run_in_background=run_in_background) if irun == 0: # log file for the multiple runs will be located in the first cache first_cache_directory = this_config['cache_directory'] log_file = os.path.join(first_cache_directory, 'multiple_runs.log') logger.enable_file_logging(log_file) logger.log_status("Multiple runs: %s replications" % number_of_runs) logger.log_status("root random seed = %s" % str(root_seed)) else: logger.enable_file_logging(log_file, verbose=False) logger.log_status("Run %s: %s" % (irun+1, this_config['cache_directory'])) logger.disable_file_logging(log_file) list_of_cache_directories.append(this_config['cache_directory']) write_to_text_file(os.path.join(first_cache_directory,"cache_directories"), list_of_cache_directories)
insert_auto_generated_cache_directory_if_needed(config) elif options.xml_configuration is not None: if options.scenario_name is None: parser.print_help() sys.exit(1) config = XMLConfiguration( options.xml_configuration).get_run_configuration( options.scenario_name) insert_auto_generated_cache_directory_if_needed(config) else: parser.print_help() sys.exit(1) if options.existing_cache_to_copy is not None: config[ 'creating_baseyear_cache_configuration'].cache_from_database = False config[ 'creating_baseyear_cache_configuration'].baseyear_cache = BaseyearCacheConfiguration( existing_cache_to_copy=options.existing_cache_to_copy, ) if options.years_to_cache is not None: config[ 'creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval( options.years_to_cache) if options.profile_filename is not None: config["profile_filename"] = options.profile_filename run_manager.setup_new_run(cache_directory=config['cache_directory'], configuration=config) run_manager.run_run(config, run_as_multiprocess=run_as_multiprocess)
seed(root_seed) # generate different seed for each run (each seed contains 1 number) seed_array = randint(1, 2**30, number_of_runs) list_of_cache_directories = [] for irun in range(number_of_runs): config['seed'] = (seed_array[irun], ) this_config = config.copy() if ((irun + 1) % number_of_runs_in_parallel) == 0: run_in_background = False else: run_in_background = True run_manager.setup_new_run( cache_directory=this_config['cache_directory'], configuration=this_config) run_manager.run_run(this_config, run_as_multiprocess=False, run_in_background=run_in_background) if irun == 0: # log file for the multiple runs will be located in the first cache first_cache_directory = this_config['cache_directory'] log_file = os.path.join(first_cache_directory, 'multiple_runs.log') logger.enable_file_logging(log_file) logger.log_status("Multiple runs: %s replications" % number_of_runs) logger.log_status("root random seed = %s" % str(root_seed)) else: logger.enable_file_logging(log_file, verbose=False) logger.log_status("Run %s: %s" % (irun + 1, this_config['cache_directory'])) logger.disable_file_logging(log_file)
except ImportError: # TODO: Once all fully-specified configurations are stored as classes, # get rid of this use. import_stmt = 'from %s import run_configuration as config' % opus_path exec(import_stmt) insert_auto_generated_cache_directory_if_needed(config) elif options.xml_configuration is not None: if options.scenario_name is None: parser.print_help() sys.exit(1) config = XMLConfiguration(options.xml_configuration).get_run_configuration(options.scenario_name) insert_auto_generated_cache_directory_if_needed(config) else: parser.print_help() sys.exit(1) if options.existing_cache_to_copy is not None: config['creating_baseyear_cache_configuration'].cache_from_database = False config['creating_baseyear_cache_configuration'].baseyear_cache = BaseyearCacheConfiguration( existing_cache_to_copy = options.existing_cache_to_copy, ) if options.years_to_cache is not None: config['creating_baseyear_cache_configuration'].baseyear_cache.years_to_cache = eval(options.years_to_cache) if options.profile_filename is not None: config["profile_filename"] = options.profile_filename run_manager.setup_new_run(cache_directory = config['cache_directory'], configuration = config) run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)