def __init__(self, urbansim_server_config, travelmodel_server_config, services_db_config, run_manager=None, plink=False): self.urbansim_server_config = urbansim_server_config self.travelmodel_server_config = travelmodel_server_config client_type = 'paramiko' if plink: client_type = 'plink' self.ssh = {} if not self.is_localhost(self.urbansim_server_config['hostname']): self.ssh['urbansim_server'] = self.get_ssh_client( None, self.urbansim_server_config, client_type=client_type) if not self.is_localhost(self.travelmodel_server_config['hostname']): self.ssh['travelmodel_server'] = self.get_ssh_client( None, self.travelmodel_server_config, client_type=client_type) self.services_db_config = ServicesDatabaseConfiguration( host_name=services_db_config['hostname'], user_name=services_db_config['username'], password=services_db_config['password'], database_name=services_db_config['database_name']) self._run_manager = None if run_manager: self._run_manager = run_manager
def testName(self): print "entering test_run" logger.log_status('Preparing MATsim test run ...') # unzip MATSim files matsim_zip = ExtractZipFile(self.matsim_source, self.destination) matsim_zip.extract() matsim_extracted_files = os.path.join(self.destination, 'MATSimTestClasses') # location of unziped MATSim files # unzip base_year_cache base_year_data_zip = ExtractZipFile(self.base_year_data_source, self.destination) base_year_data_zip.extract() base_year_data_extracted_files = os.path.join(self.destination, 'base_year_data') # location of unziped base_year_cache # updating location of base_year_data self.run_config['creating_baseyear_cache_configuration'].cache_directory_root = self.destination self.run_config['creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = base_year_data_extracted_files self.run_config['cache_directory'] = base_year_data_extracted_files self.run_config.add('matsim_files', matsim_extracted_files) self.run_config.add('matsim_config', self.matsim_config_full) self.run_config.add('root', self.destination) insert_auto_generated_cache_directory_if_needed(self.run_config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory = self.run_config['cache_directory'], configuration = self.run_config) logger.log_status('Strating UrbanSim run ... ') run_manager.run_run(self.run_config, run_as_multiprocess = True ) # after the UrbanSim run the travel data sets schould be equal # self.assertTrue( self.compare_travel_data_sets() ) logger.log_status('... UrbanSim run finished.') print "leaving test_run"
def test_run(self): # The paths work as follows: opus_matsim.__path__ is the path of the opus_matsim python module. So we can use that # as anchor ... config_location = os.path.join(opus_matsim.__path__[0], 'tests') print "location: ", config_location run_config = XMLConfiguration( os.path.join(config_location, "test_config.xml")).get_run_configuration("Test") run_config[ 'creating_baseyear_cache_configuration'].cache_directory_root = self.temp_dir run_config['creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = \ os.path.join(opus_matsim.__path__[0], 'tests', 'testdata', 'base_year_data') # insert_auto_generated_cache_directory... does things I don't understand. Need to do the following to obtain consistent # behavior independent from the file root: run_config['cache_directory'] = None insert_auto_generated_cache_directory_if_needed(run_config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run( cache_directory=run_config['cache_directory'], configuration=run_config) run_manager.run_run(run_config, run_as_multiprocess=True) self.assert_(True) self.cleanup_test_run()
def get_run_manager(): ''' Get an instance of a valid run manager @return: a run manager instance (RunManager) ''' config = ServicesDatabaseConfiguration() run_manager = RunManager(config) return run_manager
def setUp(self): self.config = ServicesDatabaseConfiguration( database_name='services_test') self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp') _do_run_simple_test_run(self, self.temp_dir, self.config, end_year=1984)
def test_run(self): print "Entering test run" run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory = self.config['cache_directory'],configuration = self.config) run_manager.run_run(self.config, run_as_multiprocess = True ) print "Leaving test run"
def on_buttonBox_accepted(self): path = str(self.lePath.text()) if not os.path.exists(path): msg = 'Cannot import, %s does not exist' % path logger.log_warning(msg) MessageBox.warning(mainwindow=self, text=msg, detailed_text='') else: cache_directory = path years = [] for dir in os.listdir(cache_directory): if len(dir) == 4 and dir.isdigit(): years.append(int(dir)) if years == []: msg = 'Cannot import, %s has no run data' % path logger.log_warning(msg) MessageBox.warning(mainwindow=self, text=msg, detailed_text='') else: start_year = min(years) end_year = max(years) project_name = os.environ['OPUSPROJECTNAME'] run_name = os.path.basename(path) server_config = ServicesDatabaseConfiguration() run_manager = RunManager(server_config) run_id = run_manager._get_new_run_id() resources = { 'cache_directory': cache_directory, 'description': '', 'years': (start_year, end_year), 'project_name': project_name } try: run_manager.add_row_to_history(run_id=run_id, resources=resources, status='done', run_name=run_name) update_available_runs(self.project) logger.log_status( 'Added run %s of project %s to run_activity table' % (run_name, project_name)) except: errorInfo = formatExceptionInfo() logger.log_error(errorInfo) MessageBox.error( mainwindow=self, text= 'Could not add run %s of project %s to run_activity table' % (run_name, project_name), detailed_text=errorInfo) self.close()
def test_simulation(self): services_db = ServicesDatabaseConfiguration( database_name = 'services', database_configuration = 'services_database_server' ) run_manager = RunManager(services_db) run_as_multiprocess = True for scenario_name in ['psrc_baseline_test']: config = self.xml_config.get_run_configuration(scenario_name) insert_auto_generated_cache_directory_if_needed(config) run_manager.setup_new_run(cache_directory = config['cache_directory'], configuration = config) run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)
def run(self, config, executable): #--config=opus_matsim/sustain_city/configs/seattle_parcel.xml --executable=Seattle_baseline config = XMLConfiguration(config).get_run_configuration(executable) insert_auto_generated_cache_directory_if_needed(config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory = config['cache_directory'],configuration = config) run_manager.run_run(config, run_as_multiprocess = True )
def write_computed_indicators_to_db(self, computed_indicator_group, project_name): options = ServicesDatabaseConfiguration() results_manager = ResultsManager(options) for name, indicator in computed_indicator_group.items(): results_manager.add_computed_indicator( indicator_name=indicator.indicator.name, dataset_name=indicator.dataset_name, expression=indicator.indicator.attribute, run_id=indicator.source_data.run_id, data_path=indicator.get_file_path(), project_name=project_name) results_manager.close()
class StartRunOptionGroup(object): """ Helper class to start model from an xml config file. """ logger.start_block("Starting UrbanSim") # get program arguments from the command line program_arguments = sys.argv[1:] # default parameters are: # --config=opus_matsim/sustain_city/configs/seattle_parcel_prescheduled_events.xml # --executable=Seattle_baseline parser = optparse.OptionParser() parser.add_option("-c", "--config", dest="config_file_name", action="store", type="string", help="Name of file containing urbansim config") parser.add_option("-e", "--executable", dest="scenario_executable", action="store", type="string", help="Model to execute") (options, args) = parser.parse_args() if options.config_file_name == None: logger.log_error("Missing path to the urbansim config file") if options.scenario_executable == None: logger.log_error("Missing name of executable scenario") config = XMLConfiguration(options.config_file_name).get_run_configuration( options.scenario_executable) insert_auto_generated_cache_directory_if_needed(config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory=config['cache_directory'], configuration=config) #try: #tnicolai # import pydevd # pydevd.settrace() #except: pass run_manager.run_run(config, run_as_multiprocess=True)
def create_dataset_from_sql_storage(): from opus_core.database_management.configurations.services_database_configuration import ServicesDatabaseConfiguration from opus_core.database_management.database_server import DatabaseServer # make sure the environment variables are set, or replace it by approproate values db_config = ServicesDatabaseConfiguration() db_server = DatabaseServer(db_config) database = db_server.get_database('services') # name of the database storage = StorageFactory().get_storage('sql_storage', storage_location = database) services_dataset = Dataset(in_storage = storage, in_table_name='available_runs', # name of the table id_name=[] # the table doees not have an unique identifier ) return services_dataset
class StartRunOptionGroup(object): """ Helper class to start model from an xml config file. """ config = XMLConfiguration("opus_matsim/configs/seattle_parcel.xml" ).get_run_configuration("Seattle_baseline") # config = XMLConfiguration("opus_matsim/configs/psrc_parcel.xml").get_run_configuration("PSRC_baseline") insert_auto_generated_cache_directory_if_needed(config) run_manager = RunManager(ServicesDatabaseConfiguration()) run_manager.setup_new_run(cache_directory=config['cache_directory'], configuration=config) # run_manager.create_baseyear_cache(config) run_manager.run_run(config, run_as_multiprocess=True)
def __init__(self, hostname, username, password, services_hostname, services_dbname, skip_travel_model=False, skip_urbansim=False, run_manager=None): self.hostname = hostname self.username = username self.password = password self.services_hostname = services_hostname self.services_dbname = services_dbname self.remote_communication_path = None self.skip_travel_model = skip_travel_model self.skip_urbansim = skip_urbansim self.services_db_config = ServicesDatabaseConfiguration( host_name = services_hostname, user_name = username, password = password, database_name = services_dbname ) self._run_manager = None if run_manager: self._run_manager = run_manager
def test_simulation(self): base_year_data_path = os.path.join(self.data_path, 'base_year_data') if not os.path.exists(base_year_data_path): os.makedirs(base_year_data_path) ftp_url = os.environ["FTP_URL"] file_name = os.path.split(ftp_url)[1] ftp_user = os.environ["FTP_USERNAME"] ftp_password = os.environ["FTP_PASSWORD"] #stdout, stderr = Popen("ls -la %s" % base_year_data_path, shell=True).communicate() #stdout, stderr = Popen("echo '%s'" % (base_year_data_path), stdout=PIPE).communicate() #print stdout try: Popen( """ cd %s; pwd; ls -la; echo wget --timestamping %s --ftp-user=%s --ftp-password=%s > /dev/null 2>&1; rm -rf 2008; unzip -o %s """ % (base_year_data_path, ftp_url, ftp_user, ftp_password, file_name), shell = True ).communicate() except: print "Error when downloading and unzipping file from %s." % ftp_url raise services_db = ServicesDatabaseConfiguration( database_name = 'services', database_configuration = 'services_database_server' ) run_manager = RunManager(services_db) run_as_multiprocess = True xml_config = XMLConfiguration(os.path.join(self.opus_home, 'project_configs', 'washtenaw_parcel.xml')) for scenario_name in ['washtenaw_baseline_test']: config = xml_config.get_run_configuration(scenario_name) insert_auto_generated_cache_directory_if_needed(config) # base_year = config['base_year'] # config['years_to_run'] = (base_year+1, base_year+2) run_manager.setup_new_run(cache_directory = config['cache_directory'], configuration = config) run_manager.run_run(config, run_as_multiprocess = run_as_multiprocess)
def _get_initial_config(self): """Encapsulate dirty inner workings""" debuglevel = 4 config = { 'models_configuration': { 'development_project_types': DevelopmentProjectTypesConfigurationCreator( commercial = DevelopmentProjectTypeConfigurationCreator( categories = [1000, 2000, 5000, 10000], #categories = [], developable_maximum_unit_variable_full_name = 'urbansim.gridcell.developable_maximum_commercial_sqft', developable_minimum_unit_variable_full_name = 'urbansim.gridcell.developable_minimum_commercial_sqft', residential = False, units = 'commercial_sqft', ), industrial = DevelopmentProjectTypeConfigurationCreator( categories = [1000,2000,5000,10000], #categories = [], developable_maximum_unit_variable_full_name = 'urbansim.gridcell.developable_maximum_industrial_sqft', developable_minimum_unit_variable_full_name = 'urbansim.gridcell.developable_minimum_industrial_sqft', residential = False, units = 'industrial_sqft', ), residential = DevelopmentProjectTypeConfigurationCreator( categories = [1,2,3,5,10,20], #categories = [], developable_maximum_unit_variable_full_name = 'urbansim.gridcell.developable_maximum_residential_units', developable_minimum_unit_variable_full_name = 'urbansim.gridcell.developable_minimum_residential_units', residential = True, units = 'residential_units', ) ).execute(), 'residential_development_project_location_choice_model':{ 'controller': DevelopmentProjectLocationChoiceModelConfigurationCreator( project_type = 'residential', coefficients_table = 'residential_development_location_choice_model_coefficients', specification_table = 'residential_development_location_choice_model_specification', #submodel_string = None ).execute(), }, 'commercial_development_project_location_choice_model':{ 'controller': DevelopmentProjectLocationChoiceModelConfigurationCreator( project_type = 'commercial', coefficients_table = 'commercial_development_location_choice_model_coefficients', specification_table = 'commercial_development_location_choice_model_specification', #submodel_string = None ).execute(), }, 'industrial_development_project_location_choice_model':{ 'controller': DevelopmentProjectLocationChoiceModelConfigurationCreator( project_type = 'industrial', coefficients_table = 'industrial_development_location_choice_model_coefficients', specification_table = 'industrial_development_location_choice_model_specification', #submodel_string = None ).execute(), }, 'prescheduled_events': { 'controller': PrescheduledEventsConfigurationCreator( output_events = 'development_events', ).execute(), }, 'events_coordinator': { 'controller': EventsCoordinatorConfigurationCreator( input_events = 'development_events', output_changed_indices = 'changed_indices', ).execute(), 'default_type_of_change':DevelopmentEventTypeOfChange.ADD, }, 'home_based_choice_model': { 'controller': HomeBasedChoiceModelConfigurationCreator().execute(), }, 'auto_ownership_choice_model': { 'controller': AutoOwnershipChoiceModelConfigurationCreator().execute(), }, 'residential_land_share_model': { 'controller': ResidentialLandShareModelConfigurationCreator( debuglevel = debuglevel, input_changed_indices = 'changed_indices', ).execute(), }, 'land_price_model': { 'controller': LandPriceModelConfigurationCreator( debuglevel = debuglevel, ).execute(), }, 'development_project_transition_model': { 'controller': DevelopmentProjectTransitionModelConfigurationCreator( debuglevel = debuglevel, output_results = 'dptm_results', ).execute(), }, 'development_event_transition_model': { 'controller': DevelopmentEventTransitionModelConfigurationCreator( input_projects = 'dptm_results', output_events = 'development_events', ).execute(), }, 'household_transition_model': { 'controller': HouseholdTransitionModelConfigurationCreator().execute(), }, 'employment_transition_model': { 'controller': EmploymentTransitionModelConfigurationCreator().execute(), }, 'household_relocation_model': { 'controller': HouseholdRelocationModelConfigurationCreator( output_index = 'hrm_index', ).execute(), }, 'household_location_choice_model': { 'controller': HouseholdLocationChoiceModelConfigurationCreator( input_index = 'hrm_index', ).execute(), }, 'employment_relocation_model': { 'controller': EmploymentRelocationModelConfigurationCreator( output_index = 'erm_index', ).execute(), }, 'employment_location_choice_model': { 'controller': EmploymentLocationChoiceModelConfigurationCreator( input_index = 'erm_index', ).execute(), }, # These are deviations from the general ELCM for the two home_based ELCMs (sfh, mfh) 'home_based_employment_location_choice_model': { 'controller': HomeBasedEmploymentLocationChoiceModelConfigurationCreator( input_index = 'erm_index', ).execute(), }, 'governmental_employment_location_choice_model': { 'controller': GovernmentalEmploymentLocationChoiceModelConfigurationCreator( input_index = 'erm_index', ).execute(), }, 'distribute_unplaced_jobs_model': { 'controller': DistributeUnplacedJobsModelConfigurationCreator().execute(), }, 'real_estate_price_model': { 'controller': RealEstatePriceModelConfigurationCreator().execute(), }, 'building_transition_model': { 'controller': BuildingTransitionModelConfigurationCreator().execute(), }, 'building_relocation_model': { 'controller': BuildingRelocationModelConfigurationCreator( output_index = 'brm_index', ).execute(), }, 'building_location_choice_model': { 'controller': BuildingLocationChoiceModelConfigurationCreator( input_index = 'brm_index', ).execute(), }, }, 'model_system':'urbansim.model_coordinators.model_system', 'models':[ # models are executed in the same order as in this list "prescheduled_events", "events_coordinator", "residential_land_share_model", 'land_price_model', 'development_project_transition_model', 'residential_development_project_location_choice_model', 'commercial_development_project_location_choice_model', 'industrial_development_project_location_choice_model', "development_event_transition_model", "events_coordinator", "residential_land_share_model", "household_transition_model", "employment_transition_model", "household_relocation_model", "household_location_choice_model", "employment_relocation_model", {"employment_location_choice_model": {"group_members": "_all_"}}, "distribute_unplaced_jobs_model" ], 'years':(2001, 2030), 'debuglevel':debuglevel, 'flush_variables':False, 'seed':None,#(0,0), 'chunk_specification':{ # Default value 'nchunks':1, }, 'datasets_to_cache_after_each_model':[ # datasets to be cached after each model, 'gridcell', 'household', 'job'], 'datasets_to_preload': { # Datasets that should be loaded before each year, e.g. in order to pass them as model arguments. 'gridcell':{ # All remaining datasets are used via SessionConfiguration 'nchunks':2 }, # linked to the cache. 'household':{ }, 'job':{ }, 'zone':{ }, 'development_type':{}, 'target_vacancy':{}, 'development_event_history':{}, 'development_constraint':{}, 'job_building_type':{}, # 'building_type':{}, # 'building':{}, 'vacant_land_and_building_type':{}, 'urbansim_constant':{}, }, 'dataset_pool_configuration': DatasetPoolConfiguration( package_order=['urbansim', 'opus_core'], ), 'services_database_configuration':ServicesDatabaseConfiguration(), } return config
def get_services_database_configuration(self, options): return ServicesDatabaseConfiguration( database_name=options.database_name, database_configuration=options.database_configuration)
def on_pbnStartModel_released(self): duplicate = False self.diagnostic_go_button.setEnabled(True) if self.running and not self.paused: # Take care of pausing a run success = self.runThread.pause() if success: self.paused = True self.timer.stop() self.pbnStartModel.setText(QString("Resume simulation run...")) elif self.running and self.paused: # Need to resume a paused run success = self.runThread.resume() if success: self.paused = False self.timer.start(1000) self.pbnStartModel.setText(QString("Pause simulation run...")) elif not self.running: run_name = str(self.leRunName.text()) if run_name == '': run_name = None else: run_id = None run_nodes = get_available_run_nodes(self.project) for run_node in run_nodes: existing_run_name = run_node.tag if run_name == existing_run_name: duplicate = True r = run_node.get('run_id') if r is not None: run_id = int(r) break if duplicate: dlg_dup = OverwriteRunDialog(self) if dlg_dup.exec_() == QDialog.Rejected: return delete_simulation_run( self.project, run_node.tag) # todo change to run_node.get('name') # Update the XML self.project.update_xml_config() self.updateConfigAndGuiForRun() # Fire up a new thread and run the model self.pbnStartModel.setText(QString("Pause simulation run...")) # References to the GUI elements for status for this run... self.progressBarTotal = self.runProgressBarTotal self.progressBarYear = self.runProgressBarYear self.progressBarModel = self.runProgressBarModel #self.pbnRemoveModel.setEnabled(False) #self.pbnStartModel.setEnabled(False) # Initializing values self.progressBarTotal.setValue(0) self.progressBarYear.setValue(0) self.progressBarModel.setValue(0) self.progressBarTotal.setRange(0, 0) self.progressBarYear.setRange(0, 0) self.progressBarModel.setRange(0, 0) batch_name = str(self.cboOptionalIndicatorBatch.currentText()) if batch_name == '(None)': batch_name = None self.runThread = RunModelThread(get_mainwindow_instance(), self, batch_name, run_name) if duplicate and run_id is not None: from opus_core.services.run_server.run_manager import RunManager as ServicesRunManager run_manager = ServicesRunManager( ServicesDatabaseConfiguration()) run_manager.delete_everything_for_this_run(run_id=run_id) run_manager.close() # Use this signal from the thread if it is capable of producing its own status signal QObject.connect(self.runThread, SIGNAL("runFinished(PyQt_PyObject)"), self.runFinishedFromThread) QObject.connect(self.runThread, SIGNAL("runError(PyQt_PyObject)"), self.runErrorFromThread) # Use this timer to call a function in the thread to check status if the thread is unable # to produce its own signal above self.timer = QTimer() QObject.connect(self.timer, SIGNAL("timeout()"), self.runStatusFromThread) self.timer.start(1000) self.running = True self.paused = False self.runThread.start() else: print "Unexpected state in the model run..."
def add_runs_to_services_db_from_disk(projects = None): server_config = ServicesDatabaseConfiguration() if server_config.protocol == 'sqlite': datapath = paths.OPUS_DATA_PATH for project_name in os.listdir(datapath): if projects is not None and project_name not in projects: continue if not os.path.isdir(os.path.join(datapath, project_name)): continue os.environ['OPUSPROJECTNAME'] = project_name server = DatabaseServer(server_config) server.drop_database(database_name = 'run_activity') server.close() run_manager = RunManager(server_config) baseyear_directory = os.path.join(datapath, project_name, 'base_year_data') if os.path.exists(baseyear_directory): years = [] if os.path.exists(baseyear_directory): for dir in os.listdir(baseyear_directory): if len(dir) == 4 and dir.isdigit(): years.append(int(dir)) start_year = min(years) end_year = max(years) run_name = 'base_year_data' run_id = run_manager._get_new_run_id() resources = { 'cache_directory': baseyear_directory, 'description': 'base year data', 'years': (start_year, end_year) } logger.log_status('Adding run %s of project %s to run_activity table'%(run_name, project_name)) run_manager.add_row_to_history(run_id = run_id, resources = resources, status = 'done', run_name = run_name) data_directory = os.path.join(datapath, project_name, 'runs') if not os.path.exists(data_directory): continue for run_name in os.listdir(data_directory): try: cache_directory = os.path.join(data_directory,run_name) years = [] if not os.path.isdir(cache_directory): continue for dir in os.listdir(cache_directory): if len(dir) == 4 and dir.isdigit(): years.append(int(dir)) start_year = min(years) end_year = max(years) run_id = run_manager._get_new_run_id() resources = { 'cache_directory': cache_directory, 'description': '', 'years': (start_year, end_year) } logger.log_status('Adding run %s of project %s to run_activity table'%(run_name, project_name)) run_manager.add_row_to_history(run_id = run_id, resources = resources, status = 'done', run_name = run_name) except: pass