def __init__(self, config):
        ss = SimulationState(new_instance=True)
        ss.set_current_time(config['base_year'])
        ss.set_cache_directory(config['cache_directory'])

        SessionConfiguration(new_instance=True,
                             package_order=config['dataset_pool_configuration'].package_order,
                             in_storage=AttributeCache())
        #if not os.path.exists(config['cache_directory']):  ## if cache exists, it will automatically skip
        cacher = CreateBaseyearCache()
        cache_dir = cacher.run(config)

        if 'estimation_database_configuration' in config:
            db_server = DatabaseServer(config['estimation_database_configuration'])
            db = db_server.get_database(config['estimation_database_configuration'].database_name)
            out_storage = StorageFactory().get_storage(
                'sql_storage', 
                storage_location = db)
        else:
            output_cache = os.path.join(config['cache_directory'], str(config['base_year']+1))
            out_storage = StorageFactory().get_storage('flt_storage', storage_location=output_cache)

        dataset_pool = SessionConfiguration().get_dataset_pool()
        households = dataset_pool.get_dataset("household")
        buildings = dataset_pool.get_dataset("building")
        zones = dataset_pool.get_dataset("zone")
        zone_ids = zones.get_id_attribute()
        capacity_attribute_name = "residential_units"  #_of_use_id_%s" % id
        capacity_variable_name = "%s=sanfrancisco.zone.aggregate_%s_from_building" % \
                                 (capacity_attribute_name, capacity_attribute_name)
        buildings.compute_variables("sanfrancisco.building.zone_id", dataset_pool=dataset_pool)
        zones.compute_variables(capacity_variable_name, dataset_pool=dataset_pool)

        building_zone_id = buildings.get_attribute('zone_id')
        
#        is_household_unplace = datasets['household'].get_attribute("building_id") <= 0
        is_household_unplaced = 1 #all households are unplaced
        household_building_id = zeros(households.size(), dtype='int32')-1 #datasets['household'].get_attribute("building_id")
        
        for zone_id in zone_ids:
            capacity = zones.get_attribute_by_id(capacity_attribute_name, zone_id)
            is_household_in_this_zone = (households.get_attribute('zone_id') == zone_id)
            is_unplaced_household_in_this_zone = is_household_in_this_zone * is_household_unplaced
            is_building_in_this_zone = (building_zone_id == zone_id)
#            if not is_household_in_this_zone.sum() <= capacity:
            if capacity == 0 or is_household_in_this_zone.sum()==0:
                print "WARNING: zone %s has %s households but only %s units" % (zone_id, is_household_in_this_zone.sum(), capacity)
                continue
                        
            prob = buildings.get_attribute(capacity_attribute_name) * is_building_in_this_zone / array(capacity, dtype=float64)

            r = random(sum(is_unplaced_household_in_this_zone))
            prob_cumsum = ncumsum(prob)
            index_to_bldg = searchsorted(prob_cumsum, r)

            household_building_id[where(is_unplaced_household_in_this_zone)] = buildings.get_attribute_by_index('building_id', index_to_bldg)

#        import pdb;pdb.set_trace()
        households.set_values_of_one_attribute('building_id', household_building_id)
        households.write_dataset(out_table_name='households', out_storage=out_storage)
    def prepare_for_run(self,
                        scheduled_events_dataset_name=None,
                        scheduled_events_table=None,
                        scheduled_events_storage=None):
        if (scheduled_events_storage is None) or (
            (scheduled_events_table is None) and
            (scheduled_events_dataset_name is None)):
            ## this should not happen
            dataset_pool = SessionConfiguration().get_dataset_pool()
            self.scheduled_events = dataset_pool.get_dataset(
                'scheduled_%s_events' % self.dataset.get_dataset_name())
            return self.scheduled_events

        if not scheduled_events_dataset_name:
            scheduled_events_dataset_name = DatasetFactory(
            ).dataset_name_for_table(scheduled_events_table)

        self.scheduled_events = DatasetFactory().search_for_dataset(
            scheduled_events_dataset_name,
            package_order=SessionConfiguration().package_order,
            arguments={
                'in_storage': scheduled_events_storage,
                'in_table_name': scheduled_events_table,
                'id_name': []
            })
        return self.scheduled_events
예제 #3
0
 def _do_flush_dependent_variables_if_required(self):
     try:
         if not SessionConfiguration().get('flush_variables', False):
             return
     except:
         return
     from opus_core.datasets.interaction_dataset import InteractionDataset
     dataset = self.get_dataset()
     dependencies = self.get_current_dependencies()
     my_dataset_name = dataset.get_dataset_name()
     for iattr in range(
             len(dependencies)):  # iterate over dependent variables
         dep_item = dependencies[iattr][0]
         if isinstance(dep_item, str):
             depvar_name = VariableName(dep_item)
         else:
             depvar_name = dep_item.get_variable_name(
             )  # dep_item should be an instance of AttributeBox
         dataset_name = depvar_name.get_dataset_name()
         if dataset_name == my_dataset_name:
             ds = dataset
         else:
             ds = SessionConfiguration().get_dataset_from_pool(dataset_name)
             #ds = dataset_pool.get_dataset('dataset_name')
         if not isinstance(ds, InteractionDataset):
             short_name = depvar_name.get_alias()
             if short_name not in ds.get_id_name():
                 ds.flush_attribute(depvar_name)
예제 #4
0
    def unroll_gridcells_to_cache_from_buildings(self, gridcells, buildings, 
                                  cache_directory, base_year):
        """Populate the cache with the unrolled gridcells info derived
        from the buildings table.
        """
        logger.start_block('Unrolling gridcell data from buildings')

        try:
            storage = AttributeCache().get_flt_storage_for_year(base_year)
            
            urbansim_constant = SessionConfiguration().get_dataset_from_pool('urbansim_constant')
            print "recent_years = %s" % urbansim_constant['recent_years']
            
            recent_years = urbansim_constant['recent_years']
            roller = RollbackGridcellsFromBuildings()
            for year in range(base_year, base_year-recent_years-1, -1):
                logger.start_block('Unrolling gridcells into year %d' % (year-1))
                try:
                    roller.unroll_gridcells_for_one_year(gridcells, 
                                                         buildings, 
                                                         year,
                                                         dataset_pool=SessionConfiguration().get_dataset_pool())
                    flt_directory = os.path.join(cache_directory, str(year-1))
                    flt_storage = StorageFactory().get_storage(
                        type='flt_storage', subdir='store', 
                        storage_location=flt_directory)
                    gridcells.write_dataset(out_storage=flt_storage)
                finally:
                    logger.end_block()
                
        finally:
            logger.end_block()
예제 #5
0
    def _make_all_indicators(self, indicators, source_data):

        computed_indicators = {}
        indicators_by_dataset = {}
        for year in source_data.years:
            SimulationState().set_current_time(year)
            SessionConfiguration(new_instance=True,
                                 package_order=self.package_order,
                                 in_storage=AttributeCache())

            for name, indicator in indicators.items():
                dataset_name = indicator.dataset_name
                if dataset_name not in indicators_by_dataset:
                    indicators_by_dataset[dataset_name] = [(name, indicator)]
                else:
                    indicators_by_dataset[dataset_name].append(
                        (name, indicator))

            for dataset_name, indicators_in_dataset in indicators_by_dataset.items(
            ):
                dataset = SessionConfiguration().get_dataset_from_pool(
                    dataset_name)

                self._make_indicators_for_dataset(
                    dataset=dataset,
                    indicators_in_dataset=indicators_in_dataset,
                    source_data=source_data,
                    computed_indicators=computed_indicators,
                    year=year)

        self.computed_indicators[source_data.name] = computed_indicators
        return computed_indicators
예제 #6
0
    def tmp_skip_test_gridcell_unrolling_changes_development_type_id(self):
        """Does unrolling update development_type_id?
        """
        # Force one grid cell to be "vacant", so can check that development_type_id changes.
        cache_directory = SimulationState().get_cache_directory()
        flt_directory = os.path.join(cache_directory, str(self.base_year))
        development_event_history = DatasetFactory().get_dataset(
            'development_event_history',
            package='urbansim',
            subdir='datasets',
            arguments={
                'in_storage':
                StorageFactory().get_storage('flt_storage',
                                             storage_location=flt_directory)
            })
        changed_grid_id = 10123
        new_row = {
            'grid_id': array([changed_grid_id]),
            'scheduled_year': array([self.base_year - 1]),
            'residential_units': array([1000]),
            'commercial_sqft': array([10000000]),
            'industrial_sfft': array([10000000]),
            'governmental_sqft': array([10000000]),
            'starting_development_type_id': array([1000]),
        }
        development_event_history.add_elements(new_row,
                                               require_all_attributes=False)
        development_event_history.flush_dataset()

        gridcells = SessionConfiguration().get_dataset_from_pool('gridcell')
        development_event_history = SessionConfiguration(
        ).get_dataset_from_pool('development_event_history')
        unroller = UnrollGridcells()
        unroller.unroll_gridcells_to_cache(gridcells,
                                           development_event_history,
                                           cache_directory, self.base_year)

        cache_directory = SimulationState().get_cache_directory()
        self.assertEqual(self.temp_dir, os.path.split(cache_directory)[0])

        gridcell = {}
        for year in [1978, 1979]:
            flt_directory = os.path.join(cache_directory, str(year))
            gridcell[year] = DatasetFactory().get_dataset(
                'gridcell',
                package='urbansim',
                subdir='datasets',
                arguments={
                    'in_storage':
                    StorageFactory().get_storage(
                        'flt_storage', storage_location=flt_directory)
                })
        self.assertEqual(
            gridcell[1978].get_attribute_by_id('development_type_id',
                                               changed_grid_id), 1000)
        self.assertNotEqual(
            gridcell[1979].get_attribute_by_id('development_type_id',
                                               changed_grid_id),
            gridcell[1978].get_attribute_by_id('development_type_id',
                                               changed_grid_id))
예제 #7
0
    def _test_generate_results(self, indicator_name, dataset_name, expression,
                               source):

        # grab the first base_year_data in results_manager/simulation_runs and
        # fetch the year for it
        base_year = self.project.find(
            "results_manager/simulation_runs/run[@name='base_year_data']/end_year"
        )
        if base_year is None:
            return False, "Project doesn't have any base year data to check against"

        start_year = int(base_year.text)
        result_generator = OpusResultGenerator(self.project)
        result_generator.set_data(source_data_name='base_year_data',
                                  indicator_name=indicator_name,
                                  dataset_name=dataset_name,
                                  years=[
                                      start_year,
                                  ],
                                  indicator_definition=(expression, source))

        interface = IndicatorFrameworkInterface(self.project)
        src_data = interface.get_source_data(source_data_name='base_year_data',
                                             years=[
                                                 start_year,
                                             ])
        SimulationState().set_current_time(start_year)
        SimulationState().set_cache_directory(src_data.cache_directory)
        SessionConfiguration(
            new_instance=True,
            package_order=src_data.dataset_pool_configuration.package_order,
            in_storage=AttributeCache())

        dataset = SessionConfiguration().get_dataset_from_pool(dataset_name)
        if isinstance(dataset, InteractionDataset):
            #create a subset if its an interaction dataset...
            dataset_arguments = {
                'index1':
                numpy.random.randint(0, dataset.dataset1.size(), size=100),
                'index2':
                numpy.random.randint(0, dataset.dataset2.size(), size=100)
            }
            SessionConfiguration().delete_datasets()
            dataset = SessionConfiguration().get_dataset_from_pool(
                dataset_name, dataset_arguments=dataset_arguments)

        try:
            dataset.compute_variables(names=[expression])
            return True, None
        except Exception, e:
            type, value, tb = sys.exc_info()
            stack_dump = ''.join(traceback.format_exception(type, value, tb))
            errors = "{}\n\n{}".format(e, stack_dump)
            return False, errors
예제 #8
0
    def prepare_session_configuration(self, force_reload=False, debuglevel=4):
        MySettings.prepare_session_configuration(self, force_reload=force_reload, debuglevel=debuglevel)

        if self.use_flt_for_big_datasets:                
            if force_reload or "neighborhood" not in SessionConfiguration().get_dataset_pool():
                if os.path.exists(os.path.join(self.dir, self.nbsubdir)):
                    nbs = NeighborhoodDataset(in_storage=StorageFactory().get_storage('flt_storage', storage_location=self.dir), 
                                      in_table_name=self.nbsubdir,
                                      out_storage=StorageFactory().get_storage('flt_storage', storage_location=self.outputdir), 
                                      debuglevel=debuglevel)
                    SessionConfiguration().get_dataset_pool()._add_dataset(self.location_set_dataset, nbs)
                else:
                    logger.log_warning(os.path.join(self.dir, self.nbsubdir) + " doesn't exist; try to load neighborhood set from database %s" % self.db)                        
예제 #9
0
    def _compute_variable_for_prior_year(self,
                                         dataset,
                                         full_name,
                                         time,
                                         resources=None):
        """Create a new dataset for this variable, compute the variable, and then return
        the values for this variable."""
        calling_dataset_pool = SessionConfiguration().get_dataset_pool()
        calling_time = SimulationState().get_current_time()
        SimulationState().set_current_time(time)
        try:
            # Get an empty dataset pool with same search paths.
            my_dataset_pool = DatasetPool(
                package_order=calling_dataset_pool.get_package_order(),
                storage=AttributeCache())

            ds = dataset.empty_dataset_like_me(in_storage=AttributeCache())

            # Don't pass any datasets via resources, since they may be from a different time.
            my_resources = Resources(resources)
            for key in my_resources:
                if isinstance(key, Dataset):
                    del my_resources[key]

            ds.compute_variables(full_name,
                                 my_dataset_pool,
                                 resources=my_resources)
            values = ds.get_attribute(full_name)
            return values
        finally:
            SimulationState().set_current_time(calling_time)
예제 #10
0
    def get_needed_matrices_from_emme2(self,
                                       year,
                                       cache_directory,
                                       bank_dir,
                                       matrix_variable_map,
                                       matrices_created=False):
        """Copies the specified emme/2 matrices into the specified travel_data variable names.
        """
        logger.start_block('Getting matricies from emme2')
        try:
            zone_set = SessionConfiguration().get_dataset_from_pool('zone')
            zone_set.load_dataset()
            travel_data_set = self.get_travel_data_from_emme2(
                zone_set, bank_dir, matrix_variable_map, matrices_created)
        finally:
            logger.end_block()

        logger.start_block('Writing data to cache')
        try:
            next_year = year + 1
            out_storage = AttributeCache().get_flt_storage_for_year(next_year)
            travel_data_set.write_dataset(attributes='*',
                                          out_storage=out_storage,
                                          out_table_name='travel_data')
        finally:
            logger.end_block()
예제 #11
0
    def setUp(self):
        run_configuration = TestCacheConfiguration()
        SimulationState(new_instance=True)
        SessionConfiguration(run_configuration,
                             new_instance=True,
                             package_order=['urbansim', 'opus_core'],
                             in_storage=AttributeCache())

        self.base_year = run_configuration['base_year']
        self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp')

        # Use the test cache.
        opus_core_path = package().get_opus_core_path()
        test_cache_path = os.path.join(opus_core_path, 'data', 'test_cache')
        new_cache_path = os.path.join(self.temp_dir, 'cache')
        copytree(test_cache_path, new_cache_path)

        # Make sure the copied files are writable.
        for (dirpath, dirnames, filenames) in os.walk(new_cache_path):
            for file_name in filenames:
                full_path = os.path.join(dirpath, file_name)
                os.chmod(full_path, S_IWRITE | S_IREAD)

        SimulationState().set_cache_directory(new_cache_path)
        SimulationState().set_current_time(self.base_year)
        self.config = Resources(run_configuration)

        cache_directory = SimulationState().get_cache_directory()
        self.assertEqual(self.temp_dir, os.path.split(cache_directory)[0])
예제 #12
0
def prepare_for_running_macro(parser):
    from opus_core.file_utilities import get_resources_from_file
    parser.add_option("-r",
                      "--resources",
                      dest="resources_file_name",
                      action="store",
                      type="string",
                      help="Name of file containing resources")
    parser.add_option("-y",
                      "--year",
                      dest="year",
                      action="store",
                      type="int",
                      help="Year in which to 'run' the travel model")
    parser.add_option(
        "-o",
        "--output-file",
        dest="output_file",
        action="store",
        type="string",
        default=None,
        help=
        "Output log file. If not given, it is written into urbansim cache directory."
    )
    (options, args) = parser.parse_args()

    r = get_resources_from_file(options.resources_file_name)
    resources = Resources(get_resources_from_file(options.resources_file_name))

    SessionConfiguration(
        new_instance=True,
        package_order=resources['dataset_pool_configuration'].package_order,
        in_storage=AttributeCache())
    return (resources, options)
예제 #13
0
 def create_dataset_pool(self, dataset_pool, pool_packages=['opus_core']):
     if dataset_pool is None:
         try:
             return SessionConfiguration().get_dataset_pool()
         except:
             return DatasetPool(pool_packages)
     return dataset_pool
예제 #14
0
def opusRun(progressCB,logCB,params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)
        
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']
    csv_data_path = params_dict['csv_data_path']
    table_name = params_dict['csv_table_name']
    
    input_storage = csv_storage(storage_location = csv_data_path)
    
    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if table_name == 'ALL':
        logCB('caching all tables...\n')
        lst = input_storage.get_table_names()
    else:
        lst = [table_name]
        
    for i in lst:
        logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
                   (i, opus_data_year, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name = i,
            in_storage = input_storage,
            out_storage = output_storage)

    logCB("Successfully exported all tables.")
 def _set_cache_directory(self, cache_directory):
     if cache_directory != SimulationState().get_cache_directory():
         SimulationState().set_cache_directory(cache_directory)
         SessionConfiguration(
             new_instance=True,
             package_order=self.dataset_pool_configuration.package_order,
             in_storage=AttributeCache())
예제 #16
0
    def _check_dataset_methods_on_dataset_view(self, ds, years_to_merge):
        self.assert_(ds is not None)
        ds.load_dataset(attributes='*',
                        in_table_name='tests',
                        in_storage=AttributeCache())
        id = ds.get_attribute('id')
        attr1 = ds.get_attribute('attr1')

        # Does compute_variables work?
        ds.compute_variables(['opus_core.test.attr1_times_2'])
        attr1_times_2 = ds.get_attribute('attr1_times_2')

        # Are values as expected?
        self.assert_(ma.allequal(attr1 * 2, attr1_times_2))

        # Does results have expected number of elements?
        self.assertEqual(len(years_to_merge) * 3, len(attr1_times_2))

        # Does _compute_if_needed work?
        ds._compute_if_needed(
            'opus_core.test.attr2_times_2',
            dataset_pool=SessionConfiguration().get_dataset_pool())
        attr2_times_2 = ds.get_attribute('attr2_times_2')
        attr2 = ds.get_attribute('attr2')
        self.assert_(ma.allequal(attr2 * 2, attr2_times_2))
            def test_at_year_2000(self):
                cache_dir = os.path.join(self.temp_dir, 'cache')
                data = {
                    self._id_name: array([1, 2, 3]),
                    'population': array([10, 20, 30]),
                }
                self._write_data_to_year(data, cache_dir, 2000)

                attribute_cache = AttributeCache(cache_directory=cache_dir)
                SimulationState(new_instance=True,
                                base_cache_dir=self.temp_dir)
                SimulationState().set_cache_directory(cache_dir)
                SessionConfiguration(new_instance=True,
                                     in_storage=attribute_cache)

                SimulationState().set_current_time(2000)
                dataset_pool_2000 = DatasetPool(package_order=['urbansim'],
                                                storage=attribute_cache)
                dataset = dataset_pool_2000.get_dataset(self._dataset_name)
                variable_name = '%s.%s.percent_population_difference_from_2000' % (
                    self._package_name, self._dataset_name)
                dataset.compute_variables([variable_name],
                                          dataset_pool=dataset_pool_2000)
                pop_2000 = dataset.get_attribute(variable_name)
                self.assert_(ma.allequal(pop_2000, array([0, 0, 0])))
예제 #18
0
    def __init__(self, name_of_dataset_to_merge, in_table_name,
                 attribute_cache, years_to_merge, *args, **kwargs):
        """Create a dataset that contains this many years of data from this dataset.
        
        Years are from current year backwards, inclusive.
        """
        self.name_of_dataset_to_merge = name_of_dataset_to_merge
        self.years_to_merge = years_to_merge

        self._validate_primary_attributes_same_for_all_years(
            name_of_dataset_to_merge, in_table_name, attribute_cache,
            years_to_merge)

        # Add 'year' to id_names.
        dataset_for_current_year = SessionConfiguration(
        ).get_dataset_from_pool(self.name_of_dataset_to_merge)
        id_names = dataset_for_current_year.get_id_name() + ['year']
        self.base_id_name = dataset_for_current_year.get_id_name()

        # Masquerade as a dataset of the right type (important for computing the right variables).
        dataset_name = dataset_for_current_year.get_dataset_name()

        AbstractDataset.__init__(self,
                                 id_name=id_names,
                                 in_table_name=in_table_name,
                                 dataset_name=dataset_name,
                                 *args,
                                 **kwargs)

        coord_system = dataset_for_current_year.get_coordinate_system()
        if coord_system is not None:
            self._coordinate_system = coord_system
예제 #19
0
    def test_compute_a_variable(self):
        """Return merged dataset for this set of years."""
        test_data = {
            1000: {
                'tests': {
                    'id': array([1, 2, 3]),
                    'attr1': array([10, 20, 30]),
                },
            },
            1001: {
                'tests': {
                    'id': array([1, 2, 3]),
                    'attr1': array([40, 50, 60]),
                },
            },
        }
        cache_creator = CreateTestAttributeCache()
        cache_creator.create_attribute_cache_with_data(self.temp_dir,
                                                       test_data)

        attribute_cache = AttributeCache()
        SessionConfiguration(new_instance=True,
                             package_order=['opus_core'],
                             in_storage=attribute_cache)
        ds = MultipleYearDatasetView(
            name_of_dataset_to_merge='test',
            in_table_name='tests',
            years_to_merge=[1000, 1001],
            attribute_cache=attribute_cache,
        )

        ds.compute_variables(['opus_core.test.attr1_times_2'])
예제 #20
0
    def prepare_for_simulation(self, config, cache_directory=None):
        self.config = Resources(config)
        base_cache_dir = self.config[
            'creating_baseyear_cache_configuration'].cache_directory_root

        self.simulation_state = SimulationState(new_instance=True,
                                                base_cache_dir=base_cache_dir,
                                                start_time=self.config.get(
                                                    'base_year', 0))

        ### TODO: Get rid of this! There is no good reason to be changing the
        ###       Configuration.
        if self.config['cache_directory'] is None:
            self.config[
                'cache_directory'] = self.simulation_state.get_cache_directory(
                )

        SessionConfiguration(
            new_instance=True,
            package_order=self.config['dataset_pool_configuration'].
            package_order,
            in_storage=AttributeCache())

        if config['creating_baseyear_cache_configuration'].cache_from_database:
            ForkProcess().fork_new_process(
                self.config['creating_baseyear_cache_configuration'].
                cache_scenario_database, self.config)
        else:
            CacheFltData().run(self.config)
예제 #21
0
def _do_run_simple_test_run(caller, temp_dir, config, end_year=None):
    """Runs model system with a single model (for speed).
    Sets the .resources property of the caller before starting the run.
    """

    runs_manager = RunManager(config)

    run_configuration = _get_run_config(temp_dir=temp_dir)

    insert_auto_generated_cache_directory_if_needed(run_configuration)
    run_configuration[
        'creating_baseyear_cache_configuration'].cache_directory_root = temp_dir
    run_configuration['models'] = ['land_price_model']
    if end_year is not None:
        run_configuration['years'] = (run_configuration['years'][0], end_year)

    SessionConfiguration(
        new_instance=True,
        package_order=run_configuration['dataset_pool_configuration'].
        package_order,
        in_storage=AttributeCache())
    insert_auto_generated_cache_directory_if_needed(run_configuration)
    caller.resources = run_configuration
    runs_manager.setup_new_run(
        cache_directory=run_configuration['cache_directory'],
        configuration=run_configuration)
    runs_manager.run_run(run_configuration)
예제 #22
0
    def prepare_for_simulation(self, run_configuration, cache_directory=None):
        self.config = Resources(run_configuration)
        self.simulation_state = SimulationState(new_instance=True,
                                                base_cache_dir=cache_directory)

        ### TODO: Get rid of this! There is no good reason to be changing the
        ###       Configuration.
        if self.config['cache_directory'] is None:
            self.config[
                'cache_directory'] = self.simulation_state.get_cache_directory(
                )

        SessionConfiguration(
            new_instance=True,
            package_order=self.config['dataset_pool_configuration'].
            package_order,
            in_storage=AttributeCache())

        ForkProcess().fork_new_process(
            self.config['creating_baseyear_cache_configuration'].
            cache_scenario_database, self.config)

        # Create output database (normally done by run manager)
        if 'estimation_database_configuration' in self.config:
            db_server = DatabaseServer(
                self.config['estimation_database_configuration'])
            if not db_server.has_database(
                    self.config['estimation_database_configuration'].
                    database_name):
                db_server.create_database(
                    self.config['estimation_database_configuration'].
                    database_name)
예제 #23
0
 def cleanup(self, remove_cache=True):
     """Remove all outputs of this simulation."""
     self.simulation_state.remove_singleton(delete_cache=remove_cache)
     SessionConfiguration().remove_singleton()
     if remove_cache:
         cache_dir = self.config['cache_directory']
         if os.path.exists(cache_dir):
             rmtree(cache_dir)
예제 #24
0
 def determine_stored_attribute_names(self,
                                      resources=None,
                                      in_storage=None,
                                      in_table_name=None,
                                      attribute_type=AttributeType.PRIMARY):
     dataset_for_current_year = SessionConfiguration(
     ).get_dataset_from_pool(self.name_of_dataset_to_merge)
     return dataset_for_current_year.determine_stored_attribute_names()
 def _compute_variables_for_dataset_if_needed(self, dataset,
                                              variable_names):
     known_attributes = dataset.get_known_attribute_names()
     dataset_pool = SessionConfiguration().get_dataset_pool()
     for variable in variable_names:
         alias = VariableName(variable).get_alias()
         if variable not in known_attributes and alias not in known_attributes:
             dataset.compute_one_variable_with_unknown_package(
                 variable, dataset_pool=dataset_pool)
예제 #26
0
 def flush_datasets_after_model(self, resources):
     if resources.get('flush_variables', False):
         AttributeCache().delete_computed_tables()
         # this will also delete computed attributes
         datasets_to_cache = SessionConfiguration().get_dataset_pool(
         ).datasets_in_pool().keys()
     else:
         datasets_to_cache = resources.get(
             "datasets_to_cache_after_each_model", [])
     self.flush_datasets(datasets_to_cache, after_model=True)
예제 #27
0
 def __init__(self):
     self.dependencies_list = None
     self.dataset = None
     self.number_of_compute_runs = 0
     try:
         self.debug = SessionConfiguration().get('debuglevel', 0)
     except:
         self.debug = 0
     if isinstance(self.debug, int):
         self.debug = DebugPrinter(self.debug)
 def prepare_for_run(self, dataset_name=None, table_name=None, storage=None):
     if (storage is None) or ((table_name is None) and (dataset_name is None)):
         dataset_pool = SessionConfiguration().get_dataset_pool()
         dataset = dataset_pool.get_dataset( 'target_vacancy' )
         return dataset
     
     if not dataset_name:
         dataset_name = DatasetFactory().dataset_name_for_table(table_name)
     
     dataset = DatasetFactory().search_for_dataset(dataset_name,
                                                   package_order=SessionConfiguration().package_order,
                                                   arguments={'in_storage':storage, 
                                                              'in_table_name':table_name,
                                                              'id_name':[]
                                                              }
                                                   )
     if self.target_vancy_dataset is None:
         self.target_vancy_dataset = dataset
         
     return dataset
예제 #29
0
    def prepare_dataset_pool(self, recent_years):
        cache_dir = os.path.join(self.urbansim_tmp, 'urbansim_cache')
        SimulationState().set_cache_directory(cache_dir)

        storage = StorageFactory().get_storage('dict_storage')
        dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage)

        storage.write_table(table_name='gridcells',
                            table_data={
                                'grid_id': array([1, 2, 3, 4]),
                                'industrial_sqft': array([4, 0, 1, 0]),
                            })
        gridcell = dataset_pool.get_dataset('gridcell')
        self._write_dataset_to_cache(gridcell, cache_dir, 1998)
        dataset_pool.remove_all_datasets()

        storage.write_table(table_name='gridcells',
                            table_data={
                                'grid_id': array([1, 2, 3, 4]),
                                'industrial_sqft': array([3, 0, 2, 1]),
                            })
        gridcell = dataset_pool.get_dataset('gridcell')
        self._write_dataset_to_cache(gridcell, cache_dir, 1999)
        dataset_pool.remove_all_datasets()

        storage.write_table(table_name='gridcells',
                            table_data={
                                'grid_id': array([1, 2, 3, 4]),
                                'industrial_sqft': array([3, 0, 3, 1]),
                            })
        gridcell = dataset_pool.get_dataset('gridcell')
        self._write_dataset_to_cache(gridcell, cache_dir, 2000)
        dataset_pool.remove_all_datasets()

        storage.write_table(table_name='urbansim_constants',
                            table_data={
                                'recent_years': array([recent_years]),
                            })

        SimulationState().set_current_time(2001)
        attribute_cache = AttributeCache()
        SessionConfiguration(new_instance=True,
                             package_order=['urbansim'],
                             in_storage=attribute_cache)
        dataset_pool = DatasetPool(package_order=['urbansim'],
                                   storage=attribute_cache)

        # Can't write urbansim_constant, so directly add it to the pool.
        temp_dataset_pool = DatasetPool(package_order=['urbansim'],
                                        storage=storage)
        dataset_pool._add_dataset(
            'urbansim_constant',
            temp_dataset_pool.get_dataset('urbansim_constant'))
        return dataset_pool
예제 #30
0
    def __init__(self,
                 model,
                 year,
                 scenario_name=None,
                 model_group=None,
                 configuration=None,
                 xml_configuration=None,
                 cache_directory=None):
        self.model_group = model_group
        self.explored_model = model

        if configuration is None:
            if xml_configuration is None:
                raise StandardError, "Either dictionary based or XML based configuration must be given."
            config = xml_configuration.get_run_configuration(scenario_name)
        else:
            config = Configuration(configuration)

        if model is not None:
            dependent_models = config['models_configuration'][model][
                'controller'].get('dependencies', [])
            config['models'] = dependent_models
            if model_group is None:
                config['models'] = config['models'] + [{model: ["run"]}]
            else:
                config['models'] = config['models'] + [{
                    model: {
                        "group_members": [{
                            model_group: ["run"]
                        }]
                    }
                }]
        else:
            config['models'] = []

        config['years'] = [year, year]
        config["datasets_to_cache_after_each_model"] = []
        config['flush_variables'] = False

        self.config = Resources(config)
        self.xml_configuration = xml_configuration

        if cache_directory is None:
            cache_directory = config[
                'creating_baseyear_cache_configuration'].baseyear_cache.existing_cache_to_copy
        self.simulation_state = SimulationState(new_instance=True,
                                                base_cache_dir=cache_directory)
        self.config['cache_directory'] = cache_directory

        SessionConfiguration(
            new_instance=True,
            package_order=self.config['dataset_pool_configuration'].
            package_order,
            in_storage=AttributeCache())