Beispiel #1
0
    def prepare_for_simulation(self, run_configuration, cache_directory=None):
        self.config = Resources(run_configuration)
        self.simulation_state = SimulationState(new_instance=True,
                                                base_cache_dir=cache_directory)

        ### TODO: Get rid of this! There is no good reason to be changing the
        ###       Configuration.
        if self.config['cache_directory'] is None:
            self.config[
                'cache_directory'] = self.simulation_state.get_cache_directory(
                )

        SessionConfiguration(
            new_instance=True,
            package_order=self.config['dataset_pool_configuration'].
            package_order,
            in_storage=AttributeCache())

        ForkProcess().fork_new_process(
            self.config['creating_baseyear_cache_configuration'].
            cache_scenario_database, self.config)

        # Create output database (normally done by run manager)
        if 'estimation_database_configuration' in self.config:
            db_server = DatabaseServer(
                self.config['estimation_database_configuration'])
            if not db_server.has_database(
                    self.config['estimation_database_configuration'].
                    database_name):
                db_server.create_database(
                    self.config['estimation_database_configuration'].
                    database_name)
Beispiel #2
0
    def prepare_for_simulation(self, config, cache_directory=None):
        self.config = Resources(config)
        base_cache_dir = self.config[
            'creating_baseyear_cache_configuration'].cache_directory_root

        self.simulation_state = SimulationState(new_instance=True,
                                                base_cache_dir=base_cache_dir,
                                                start_time=self.config.get(
                                                    'base_year', 0))

        ### TODO: Get rid of this! There is no good reason to be changing the
        ###       Configuration.
        if self.config['cache_directory'] is None:
            self.config[
                'cache_directory'] = self.simulation_state.get_cache_directory(
                )

        SessionConfiguration(
            new_instance=True,
            package_order=self.config['dataset_pool_configuration'].
            package_order,
            in_storage=AttributeCache())

        if config['creating_baseyear_cache_configuration'].cache_from_database:
            ForkProcess().fork_new_process(
                self.config['creating_baseyear_cache_configuration'].
                cache_scenario_database, self.config)
        else:
            CacheFltData().run(self.config)
Beispiel #3
0
 def create_baseyear_cache(self, resources):
     if resources[
             'creating_baseyear_cache_configuration'].cache_from_database:
         ForkProcess().fork_new_process(
             resources['creating_baseyear_cache_configuration'].
             cache_scenario_database, resources)
     else:
         CacheFltData().run(resources)
    def _test_export_one_table(self, table_name):
        output_temp_dir = tempfile.mkdtemp(dir=self.temp_dir)
        optional_args = [
            '-c',
            os.path.join(self.temp_dir, str(self.year)), '-o', output_temp_dir,
            '-t', table_name
        ]
        ForkProcess().fork_new_process(self.export_from_cache_opus_path,
                                       resources=None,
                                       optional_args=optional_args)

        files = [
            os.path.splitext(os.path.split(f)[1])[0]
            for f in glob(output_temp_dir + '/*')
        ]
        self.assertEqual(set(files), set([table_name]))

        export_year = str(self.year + 100)
        optional_args = [
            '-d', output_temp_dir, '-c', self.temp_dir, '-y', export_year,
            '-t', table_name
        ]
        ForkProcess().fork_new_process(self.export_to_cache_opus_path,
                                       resources=None,
                                       optional_args=optional_args)

        exported_datasets = [
            os.path.split(f)[1]
            for f in glob(os.path.join(self.temp_dir, export_year) + '/*')
        ]
        self.assertEqual(set(exported_datasets), set([table_name]))

        org_dir = os.path.join(self.temp_dir, str(self.year))
        exp_dir = os.path.join(self.temp_dir, export_year)
        flt_file_names = os.listdir(os.path.join(org_dir, table_name))
        self.assertEqual(
            cmpfiles(os.path.join(org_dir, table_name),
                     os.path.join(exp_dir, table_name), flt_file_names),
            (flt_file_names, [], []))
        rmtree(output_temp_dir)
        rmtree(exp_dir)
    def test_export_all_tables(self):
        output_temp_dir = tempfile.mkdtemp(dir=self.temp_dir)
        optional_args = [
            '-c',
            os.path.join(self.temp_dir, str(self.year)), '-o', output_temp_dir
        ]
        ForkProcess().fork_new_process(self.export_from_cache_opus_path,
                                       resources=None,
                                       optional_args=optional_args)

        table_names = self.test_data[self.year].keys()
        files = [
            os.path.splitext(os.path.split(f)[1])[0]
            for f in glob(output_temp_dir + '/*')
        ]
        self.assertEqual(set(files), set(table_names))
Beispiel #6
0
 def _fork_new_process(self,
                       module_name,
                       resources,
                       run_in_background=False,
                       **key_args):
     self.running_conditional.acquire()
     self.running = True
     self.forked_processes.append(ForkProcess())
     key_args["run_in_background"] = True
     self.forked_processes[-1].fork_new_process(module_name, resources,
                                                **key_args)
     self.running_conditional.notifyAll()
     self.running_conditional.release()
     if not run_in_background:
         self.forked_processes[-1].wait()
         self.forked_processes[-1].cleanup()
Beispiel #7
0
    config = ImportedConfiguration()

    base_year = config['base_year']

    config['creating_baseyear_cache_configuration'].cache_from_database = False
    config[
        'creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy = cache_dir
    config['cache_directory'] = output_dir
    config['years'] = (base_year + 1, base_year + years_to_run)

    if not len(range(config['years'][0], config['years'][1] + 1)) > 0:
        display_message_dialog('No years to simulate!')
        sys.exit(1)

    # sanity check on the cache directory -- make sure it includes a subdirectory whose name is the base year
    base_year_directory = os.path.join(cache_dir, str(base_year))
    if not os.path.exists(base_year_directory):
        msg = 'Invalid cache directory: %s\nThe cache directory should have a subdirectory %d for the base year' % (
            cache_dir, base_year)
        display_message_dialog(msg)
        sys.exit(1)

    # sanity check on the output directory
    if output_dir_container == '':
        msg = 'Output directory not specified'
        display_message_dialog(msg)
        sys.exit(1)

    ForkProcess().fork_new_process('opus_core.tools.start_run',
                                   resources=config)
    def _do_run(self, start_year, end_year, urbansim_resources, background=False):
        travel_model_resources = Configuration(urbansim_resources)
        if start_year is None:
            start_year = travel_model_resources['years'][0]
        if end_year is None:
            end_year = travel_model_resources['years'][1]
        if end_year < start_year:
            logger.log_warning("In run %s end_year (%s) is smaller than start_year (%s)." % (self.run_id, end_year, start_year))
            sys.exit(1)
        #only keep sorted travel model years falls into years range
        travel_model_years = []
        if not self.skip_travel_model:
            for key in travel_model_resources['travel_model_configuration'].keys():
                if type(key) == int:
                    if key >= start_year and key <= end_year:
                        travel_model_years.append(key)
        if end_year not in travel_model_years:
            travel_model_years.append(end_year)
        travel_model_years.sort()
        this_start_year = start_year
        bg = ''
        if background:
            bg = '&'
        for travel_model_year in travel_model_years:
            this_end_year = travel_model_year
            if this_end_year > end_year:
                sys.exit(1) #run finished

            if this_start_year <= this_end_year:
                urbansim_resources['years'] = (this_start_year, this_end_year)

                run_manager = self.get_run_manager()                
                run_manager.add_row_to_history(run_id=self.run_id, resources=urbansim_resources, status="started")
                
                if not self.skip_urbansim:
                    self.run_remote_python_process("%s/urbansim/tools/restart_run.py" % self.remote_opus_path, 
                                               "%s %s --skip-cache-cleanup --skip-travel-model %s" % (
                                                         self.run_id, this_start_year, bg),
                                                   )
                    if not background:               
                        if not self.has_urbansim_finished(urbansim_resources):
                            raise StandardError, "There was an error in the urbansim run."

            # run travel models
            if not self.skip_travel_model:
                if background: # wait until urbansim finishes; check every 60 seconds
                    while True:
                        time.sleep(60)
                        runs_by_status = self.get_run_manager().get_runs_by_status([self.run_id])
                        if self.run_id in runs_by_status.get('done', []):
                            break
                        if self.run_id in runs_by_status.get('failed', []):
                            raise StandardError, "There was an error in the urbansim run."

                max_zone_id = 0
                if travel_model_resources['travel_model_configuration'].has_key(this_end_year):
                    tm = AbstractEmme2TravelModel(travel_model_resources)
                    for full_model_path in travel_model_resources['travel_model_configuration'][this_end_year].get('models'):
                        if full_model_path in self.remote_travel_models:
                            # run this model remotely
                            self.run_remote_python_process(full_model_path, 
                                                           '-y %d -d %s' % (this_end_year, self.remote_communication_path),
                                                           config=travel_model_resources,
                                                           is_opus_path=True)
                            tripgen_dir = tm.get_emme2_dir(this_end_year, 'tripgen')
                            max_zone_id = self.copy_file_from_remote_host_and_get_max_zone('TAZDATA.MA2', '%s/inputtg' % tripgen_dir)
                        else:
                            optional_args=['-y', this_end_year]
                            if full_model_path == 'opus_emme2.models.get_emme2_data_into_cache':
                                optional_args='%s -m -z %s' % (optional_args, max_zone_id)
                            elif full_model_path == 'opus_emme2.models.run_travel_model':
                                optional_args='%s -o %s' % (optional_args, os.path.join(self.local_output_path,'emme2_%d_log.txt' % this_end_year))
                            elif full_model_path == 'opus_emme2.models.run_export_macros':
                                optional_args='%s -o %s' % (optional_args, os.path.join(self.local_output_path,'emme2_export_macros_%d_log.txt' % this_end_year))
                            ForkProcess().fork_new_process(full_model_path, 
                                                           travel_model_resources, optional_args=optional_args)
                    reports = travel_model_resources['travel_model_configuration'].get('reports_to_copy', [])
                    for x in self.banks:
                        bank_dir = tm.get_emme2_dir(this_end_year, "bank%i" % x)
                        self.copy_file_to_remote_host("%s/*_one_matrix.txt" % bank_dir, subdirectory="bank%i" % x)
                        node_map = travel_model_resources['travel_model_configuration'].get('node_matrix_variable_map', {})
                        node_files = []
                        if "bank%i" % x in node_map.keys():
                            node_files = node_map["bank%i" % x].keys()
                        for report in reports+node_files:
                            report_name = os.path.join(bank_dir, report)
                            if os.path.exists(report_name):
                                self.copy_file_to_remote_host(report_name, subdirectory="bank%i" % x)
                        
                    self.run_remote_python_process('opus_emme2.models.get_emme2_data_into_cache', 
                               '-y %d --matrix_directory=%s' % (this_end_year, self.remote_communication_path),
                               config=travel_model_resources, is_opus_path=True
                                                   )
                    
            this_start_year = travel_model_year + 1  #next run starting from the next year of the travel model year
                      dest="cache_directory",
                      action="store",
                      type="string",
                      help="Year in which to 'run' the travel model")
    parser.add_option("-y",
                      "--year",
                      dest="year",
                      action="store",
                      type="int",
                      help="Year in which to 'run' the travel model")
    parser.add_option("-t",
                      "--travel-model",
                      dest="travel_model",
                      default="baseline",
                      help="which travel model data to use")

    (options, args) = parser.parse_args()
    travel_models = {
        "baseline": "baseline_travel_model_psrc",
        "baseline2": "baseline_travel_model_psrc2",
        "no_build": "baseline_travel_model_psrc_no_build",
        "one_half_highway": "baseline_travel_model_psrc_highway_x_1.5",
    }
    travel_model_path = travel_models[options.travel_model]
    config = ModelConfig(options.cache_directory, options.year,
                         travel_model_path)

    ForkProcess().fork_new_process('opus_core.tools.start_run',
                                   resources=config,
                                   optional_args=["--hostname", "None"])
Beispiel #10
0
            else:
                this_end_year = end_year
        urbansim_resources['years'] = (this_start_year, this_end_year)
    
        run_manager.services_db.execute(
               run_manager.services_db.delete(run_manager.services_db.c.run_id == run_id))
        run_manager.add_row_to_history(run_id, urbansim_resources, "started")
        
        try:
            os.system("%s -ssh -l %s -pw %s %s python %s %s %s --skip-cache-cleanup --skip-travel-model" % \
                       (plink, username, password, hostname, restart_run_py, run_id, this_start_year))
            
        except:
            raise StandardError, "problem running urbansim remotely"

        if not os.path.exists(os.path.join(local_cache_directory, str(this_end_year))):
            raise StandardError, "cache for year %s doesn't exist in directory %s; there may be problem with urbansim run" % \
                                (this_end_year, local_cache_directory)
        
        if travel_model_resources is not None:
            if travel_model_resources['travel_model_configuration'].has_key(this_end_year):
                for full_model_path in travel_model_resources['travel_model_configuration']['models']:
                    ForkProcess().fork_new_process(full_model_path, 
                        travel_model_resources, optional_args=['-y', this_end_year])

        if not os.path.exists(os.path.join(local_cache_directory, str(this_end_year+1))):
            raise StandardError, "travel model didn't create any output for year %s in directory %s; there may be problem with travel model run" % \
                                (this_end_year+1, local_cache_directory)
            
        this_start_year = travel_model_year + 1  #next run starting from the next year of the travel model year
Beispiel #11
0
 def test_start_run_via_pickle(self):
     """A weak test of start_run - does it crash?"""
     ForkProcess().fork_new_process('opus_core.tools.start_run',
                                    self.config)