def prepare_for_estimate(self, specification_dict=None, specification_storage=None, specification_table=None, events_for_estimation_storage=None, events_for_estimation_table=None, urbansim_constant=None, base_year=0, categories=None): from opus_core.model import get_specification_for_estimation specification = get_specification_for_estimation( specification_dict, specification_storage, specification_table) projects = None # create agents for estimation if events_for_estimation_storage is not None: event_set = DevelopmentEventDataset( in_storage=events_for_estimation_storage, in_table_name=events_for_estimation_table) event_set.remove_non_recent_data(base_year, urbansim_constant['recent_years']) projects = DevelopmentProjectCreator( ).create_projects_from_history(event_set, self.project_type, self.units, categories) return (specification, projects)
def run (self, building_dataset, year, storage, in_table="development_events_exogenous", out_table="development_events_exogenous"): if not storage.has_table(in_table): logger.log_status('No exogenous developments.') return scheduled_development_events = DevelopmentEventDataset(in_storage=storage, in_table_name=in_table, out_table_name=out_table, id_name='event_id') scheduled_index = where(scheduled_development_events.get_attribute("scheduled_year")==year)[0] scheduled_development_events.subset_by_index(scheduled_index,flush_attributes_if_not_loaded=False) max_building_id = building_dataset.get_id_attribute().max() new_buildings = {} new_buildings["building_id"] = max_building_id + arange(1, scheduled_index.size+1) attained_attributes = ['parcel_id', 'residential_units', 'building_sqft', 'non_residential_sqft','building_type_id', 'blklot'] for attribute in attained_attributes: new_buildings[attribute] = scheduled_development_events.get_attribute(attribute) new_buildings['year_built'] = ones(scheduled_index.size) * year building_dataset.add_elements(new_buildings, require_all_attributes=False) return scheduled_development_events
def prepare_for_estimate_alt(self, agent_set, add_member_prefix=True, specification_dict=None, specification_storage=None, specification_table=None, events_for_estimation_storage=None, events_for_estimation_table=None, urbansim_constant=None, base_year=0, building_categories=None, location_id_variable=None, dataset_pool=None, **kwargs): """Remove new buildings of this type and add new projects from event history instead.""" type = self.group_member.get_member_name() agent_set.resources["building_categories"] = building_categories agent_set.compute_variables(["urbansim.%s.building_age" % agent_set.get_dataset_name(), "urbansim.%s.is_building_type_%s" % (agent_set.get_dataset_name(), type)], dataset_pool = dataset_pool) idx_new_buildings = where(logical_and(where(ma.filled(agent_set.get_attribute("building_age"), urbansim_constant['recent_years'] + 1) <= urbansim_constant['recent_years'], 1, 0), agent_set.get_attribute("is_building_type_%s" % type)))[0] # create agents for estimation if events_for_estimation_storage is not None: agent_set.remove_elements(idx_new_buildings) event_set = DevelopmentEventDataset(urbansim_constant, in_storage = events_for_estimation_storage, in_table_name= events_for_estimation_table) event_set.remove_non_recent_data(base_year, urbansim_constant['recent_years']) BuildingCreator().add_events_from_history_to_existing_buildings(agent_set, event_set, type, self.group_member.get_member_code(), self.units, building_categories, dataset_pool=dataset_pool) if location_id_variable: agent_set.compute_variables(location_id_variable, dataset_pool=dataset_pool) if events_for_estimation_storage is None: agent_set.compute_variables(["urbansim.%s.size_category_%s" % (agent_set.get_dataset_name(), self.group_member.get_member_name()), "urbansim.%s.building_age" % agent_set.get_dataset_name()], dataset_pool = dataset_pool) idx_new_buildings = where(ma.filled(agent_set.get_attribute("building_age"), urbansim_constant['recent_years']+1) <= urbansim_constant['recent_years'])[0] if (specification_dict is not None) or (specification_storage is not None): specification, dummy = AgentLocationChoiceModelMember.prepare_for_estimate(self, add_member_prefix, specification_dict, specification_storage, specification_table, location_id_variable=location_id_variable, data_objects=dataset_pool.datasets_in_pool(), **kwargs) else: specification = None return (specification, idx_new_buildings)
def run(self, developments, year=0, landuse_types=None, units=None, resources=None): # landuse_types = ['residential', 'commercial', 'industrial', 'governmental'] # units=['residential_units', 'commercial_sqft','industrial_sqft','governmental_sqft'] if not isinstance(resources, Resources): resources = Resources() grid_ids_for_project = array([], dtype=int32) if developments <> None: grid_ids_for_project = developments.get_attribute("grid_id") grid_ids_for_project = unique(grid_ids_for_project) grid_ids_for_project = grid_ids_for_project[where(grid_ids_for_project>0)] if len(grid_ids_for_project)==0: return sizes = grid_ids_for_project.size result_data = {"grid_id": grid_ids_for_project, "scheduled_year":(year*ones((sizes,), dtype=int16)), "development_type_id": zeros((sizes,),dtype=int16), } for unit in units: result_data[unit] = zeros((sizes,), dtype=int32) for project_type in landuse_types: result_data["%s_improvement_value" % project_type] = zeros((sizes,), dtype=int32) grid_idx=0 for grid_id in grid_ids_for_project: w = where(developments.get_attribute('grid_id') == grid_id)[0] if w.size>0: result_data["development_type_id"][grid_idx] = \ developments.get_attribute_by_index("development_type_id", w[0]) for unit_variable in units: result_data[unit_variable][grid_idx] = \ developments.get_attribute_by_index(unit_variable , w).sum() result_data["%s_improvement_value" % unit_variable.split('_')[0]][grid_idx] = \ developments.get_attribute_by_index("improvement_value", w).sum() grid_idx += 1 storage = StorageFactory().get_storage('dict_storage') eventset_table_name = 'eventset' storage.write_table( table_name=eventset_table_name, table_data=result_data, ) eventset = DevelopmentEventDataset( in_storage = storage, in_table_name = eventset_table_name, id_name=['grid_id', 'scheduled_year'], ) self.debug.print_debug('Number of events: ' + str(grid_ids_for_project.size), 3) return eventset
def prepare_for_estimate(self, specification_dict = None, specification_storage=None, specification_table=None, events_for_estimation_storage=None, events_for_estimation_table=None, urbansim_constant=None, base_year=0, categories=None): from opus_core.model import get_specification_for_estimation specification = get_specification_for_estimation(specification_dict, specification_storage, specification_table) projects = None # create agents for estimation if events_for_estimation_storage is not None: event_set = DevelopmentEventDataset(in_storage = events_for_estimation_storage, in_table_name= events_for_estimation_table) event_set.remove_non_recent_data(base_year, urbansim_constant['recent_years']) projects = DevelopmentProjectCreator().create_projects_from_history( event_set, self.project_type, self.units, categories) return (specification, projects)
def run(self, storage, in_table="development_events", out_table="development_events"): if not storage.has_table(in_table): logger.log_status('No exogenous developments.') return development_events = DevelopmentEventDataset(in_storage=storage, in_table_name=in_table, out_table_name=out_table) return development_events
def test_mix_of_type_of_changes(self): storage = StorageFactory().get_storage('dict_storage') gridcell_set = self._create_simple_gridcell_set() storage.write_table(table_name='dev_events', table_data={ "residential_units": array([10, 20, 30]), "commercial_sqft": array([0, 11, 0]), "industrial_sqft": array([0, 11, 0]), "commercial_improvement_value": array([1, 2, 3]), "industrial_improvement_value": array([1, 2, 3]), "residential_improvement_value": array([1, 2, 3]), "scheduled_year": array([2000, 2000, 2000]), "grid_id": array([1, 2, 3]), "residential_units_change_type_code": array([ DevelopmentEventTypeOfChange.REPLACE, DevelopmentEventTypeOfChange.ADD, DevelopmentEventTypeOfChange.DELETE, ]) }) dev_events_set = DevelopmentEventDataset(in_storage=storage, in_table_name='dev_events') dev_types_set = self._create_simple_development_types_set() EventsCoordinator().run(gridcell_set, dev_events_set, dev_types_set, 2000, model_configuration=self.model_configuration) self.assert_( ma.allclose(gridcell_set.get_attribute("residential_units"), array([10, 1 + 20, 0, 20])))
def prepare_for_estimate(self, specification_dict=None, specification_storage=None, specification_table=None, events_for_estimation_storage=None, events_for_estimation_table=None): from opus_core.model import get_specification_for_estimation specification = get_specification_for_estimation( specification_dict, specification_storage, specification_table) development = None # create agents for estimation if events_for_estimation_storage is not None: event_set = DevelopmentEventDataset( in_storage=events_for_estimation_storage, in_table_name=events_for_estimation_table) development = create_landuse_developments_from_history(event_set) return (specification, development)
def _create_simple_development_event_set(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='dev_events', table_data={ "residential_units": array([2, 0, 11, 10]), "commercial_sqft": array([0, 11, 0, 0]), "industrial_sqft": array([0, 11, 0, 0]), "commercial_improvement_value": array([1, 2, 3, 0]), "industrial_improvement_value": array([1, 2, 3, 0]), "residential_improvement_value": array([1, 2, 3, 10]), "scheduled_year": array([2000, 2000, 2001, 2001]), "grid_id": array([1, 2, 3, -1]), # default type_of_change is ADD }) return DevelopmentEventDataset(in_storage=storage, in_table_name='dev_events')
def do_type_of_changes_test(self, type_of_change): storage = StorageFactory().get_storage('dict_storage') gridcell_set = self._create_simple_gridcell_set() storage.write_table(table_name='dev_events', table_data={ 'residential_units': array([10, 30]), 'commercial_sqft': array([0, 0]), 'industrial_sqft': array([0, 0]), 'commercial_improvement_value': array([1, 3]), 'industrial_improvement_value': array([1, 3]), 'residential_improvement_value': array([1, 3]), 'scheduled_year': array([2000, 2000]), 'grid_id': array([1, 3]), 'residential_units_change_type_code': array([type_of_change, type_of_change]), 'commercial_sqft_change_type_code': array([type_of_change, type_of_change]), 'industrial_sqft_change_type_code': array([type_of_change, type_of_change]) }) dev_events_set = DevelopmentEventDataset(in_storage=storage, in_table_name='dev_events') dev_types_set = self._create_simple_development_types_set() EventsCoordinator().run(gridcell_set, dev_events_set, dev_types_set, 2000, model_configuration=self.model_configuration) return gridcell_set.get_attribute("residential_units")
def run(self, building_dataset, year, storage, in_table="development_events_exogenous", out_table="development_events_exogenous"): if not storage.has_table(in_table): logger.log_status('No exogenous developments.') return scheduled_development_events = DevelopmentEventDataset( in_storage=storage, in_table_name=in_table, out_table_name=out_table, id_name='event_id') scheduled_index = where( scheduled_development_events.get_attribute("scheduled_year") == year)[0] scheduled_development_events.subset_by_index( scheduled_index, flush_attributes_if_not_loaded=False) max_building_id = building_dataset.get_id_attribute().max() new_buildings = {} new_buildings["building_id"] = max_building_id + arange( 1, scheduled_index.size + 1) attained_attributes = [ 'parcel_id', 'residential_units', 'building_sqft', 'non_residential_sqft', 'building_type_id', 'blklot' ] for attribute in attained_attributes: new_buildings[ attribute] = scheduled_development_events.get_attribute( attribute) new_buildings['year_built'] = ones(scheduled_index.size) * year building_dataset.add_elements(new_buildings, require_all_attributes=False) return scheduled_development_events
def prepare_for_estimate_alt(self, agent_set, add_member_prefix=True, specification_dict=None, specification_storage=None, specification_table=None, events_for_estimation_storage=None, events_for_estimation_table=None, urbansim_constant=None, base_year=0, building_categories=None, location_id_variable=None, dataset_pool=None, **kwargs): """Remove new buildings of this type and add new projects from event history instead.""" type = self.group_member.get_member_name() agent_set.resources["building_categories"] = building_categories agent_set.compute_variables([ "urbansim.%s.building_age" % agent_set.get_dataset_name(), "urbansim.%s.is_building_type_%s" % (agent_set.get_dataset_name(), type) ], dataset_pool=dataset_pool) idx_new_buildings = where( logical_and( where( ma.filled(agent_set.get_attribute("building_age"), urbansim_constant['recent_years'] + 1) <= urbansim_constant['recent_years'], 1, 0), agent_set.get_attribute("is_building_type_%s" % type)))[0] # create agents for estimation if events_for_estimation_storage is not None: agent_set.remove_elements(idx_new_buildings) event_set = DevelopmentEventDataset( urbansim_constant, in_storage=events_for_estimation_storage, in_table_name=events_for_estimation_table) event_set.remove_non_recent_data(base_year, urbansim_constant['recent_years']) BuildingCreator().add_events_from_history_to_existing_buildings( agent_set, event_set, type, self.group_member.get_member_code(), self.units, building_categories, dataset_pool=dataset_pool) if location_id_variable: agent_set.compute_variables(location_id_variable, dataset_pool=dataset_pool) if events_for_estimation_storage is None: agent_set.compute_variables([ "urbansim.%s.size_category_%s" % (agent_set.get_dataset_name(), self.group_member.get_member_name()), "urbansim.%s.building_age" % agent_set.get_dataset_name() ], dataset_pool=dataset_pool) idx_new_buildings = where( ma.filled(agent_set.get_attribute( "building_age"), urbansim_constant['recent_years'] + 1) <= urbansim_constant['recent_years'])[0] if (specification_dict is not None) or (specification_storage is not None): specification, dummy = AgentLocationChoiceModelMember.prepare_for_estimate( self, add_member_prefix, specification_dict, specification_storage, specification_table, location_id_variable=location_id_variable, data_objects=dataset_pool.datasets_in_pool(), **kwargs) else: specification = None return (specification, idx_new_buildings)
def run(self, projects, types, units, year=0, location_id_name="grid_id", debuglevel=0): debug = DebugPrinter(debuglevel) grid_ids_for_any_project = array([], dtype=int32) grid_ids_by_project_type = {} for project_type in types: grid_ids_by_project_type[project_type] = array([], dtype=int32) if projects[project_type] <> None: grid_ids_by_project_type[project_type] = projects[ project_type].get_attribute(location_id_name) grid_ids_for_any_project = unique( concatenate((grid_ids_for_any_project, grid_ids_by_project_type[project_type]))) grid_ids_for_any_project = grid_ids_for_any_project[where( grid_ids_for_any_project > 0)] if not len(grid_ids_for_any_project): return result_data = { location_id_name: grid_ids_for_any_project, "scheduled_year": (year * ones( (grid_ids_for_any_project.size, ))).astype(int32) } for unit in units: result_data[unit] = zeros((grid_ids_for_any_project.size, ), dtype=int32) for project_type in types: result_data["%s_improvement_value" % project_type] = zeros( (grid_ids_for_any_project.size, ), dtype=int32) grid_idx = 0 for grid_id in grid_ids_for_any_project: for i in range(0, len(types)): project_type = types[i] my_projects = projects[project_type] w = where( my_projects.get_attribute(location_id_name) == grid_id)[0] if w.size > 0: unit_variable = units[i] result_data[unit_variable][grid_idx] = \ my_projects.get_attribute_by_index( my_projects.get_attribute_name(), w).sum() result_data["%s_improvement_value" % project_type][grid_idx] = \ my_projects.get_attribute_by_index( "improvement_value", w).sum() grid_idx += 1 storage = StorageFactory().get_storage('dict_storage') eventset_table_name = 'development_events_generated' storage.write_table(table_name=eventset_table_name, table_data=result_data) eventset = DevelopmentEventDataset( in_storage=storage, in_table_name=eventset_table_name, id_name=[location_id_name, "scheduled_year"], ) debug.print_debug( "Number of events: " + str(grid_ids_for_any_project.size), 3) return eventset
def test_unrolling(self): from urbansim.datasets.gridcell_dataset import GridcellDataset from urbansim.datasets.development_event_dataset import DevelopmentEventDataset storage = StorageFactory().get_storage('dict_storage') gridcells_table_name = 'gridcells' storage.write_table( table_name=gridcells_table_name, table_data={ 'grid_id': array([1, 2, 3]), 'development_type_id': array([3, 3, 3]), 'commercial_sqft': array([50, 50, 50]), 'industrial_sqft': array([100, 100, 100]), # Rest of this data is not used by unit tests, but is required for unrolling 'governmental_sqft': array([0, 0, 0]), 'residential_units': array([0, 0, 0]), 'commercial_improvement_value': array([0, 0, 0]), 'industrial_improvement_value': array([0, 0, 0]), 'governmental_improvement_value': array([0, 0, 0]), 'residential_improvement_value': array([0, 0, 0]), }, ) dev_event_history_table_name = 'dev_event_history' storage.write_table( table_name=dev_event_history_table_name, table_data={ 'scheduled_year': array([1999, 1999, 1998, 1998]), 'grid_id': array([1, 3, 2, 3]), 'starting_development_type_id': array([3, 3, 2, 1]), 'commercial_sqft': array([10, 20, 30, 40]), 'commercial_sqft_change_type': array(['A', 'A', 'A', 'A']), 'industrial_sqft': array([20, 200, 99, 50]), 'industrial_sqft_change_type': array(['A', 'D', 'R', 'A']), # Rest of this data is not used by unit tests, but is required for unrolling 'governmental_sqft': array([0, 0, 0, 0]), 'residential_units': array([0, 0, 0, 0]), 'commercial_improvement_value': array([0, 0, 0, 0]), 'industrial_improvement_value': array([0, 0, 0, 0]), 'governmental_improvement_value': array([0, 0, 0, 0]), 'residential_improvement_value': array([0, 0, 0, 0]), }, ) gridcells = GridcellDataset(in_storage=storage, in_table_name=gridcells_table_name) dev_event_history = DevelopmentEventDataset( in_storage=storage, in_table_name=dev_event_history_table_name) roller = RollbackGridcells() roller.unroll_gridcells_for_one_year(gridcells, dev_event_history, 2000) self.assert_( ma.allequal(gridcells.get_attribute('commercial_sqft'), array([50, 50, 50]))) self.assert_( ma.allequal(gridcells.get_attribute('industrial_sqft'), array([100, 100, 100]))) self.assert_( ma.allequal(gridcells.get_attribute('development_type_id'), array([3, 3, 3]))) roller.unroll_gridcells_for_one_year(gridcells, dev_event_history, 1999) self.assert_( ma.allequal(gridcells.get_attribute('commercial_sqft'), array([40, 50, 30])), 'Unexpected results for 1999: expected %s; received %s' % (array([40, 50, 30]), gridcells.get_attribute('commercial_sqft'))) self.assert_( ma.allequal(gridcells.get_attribute('industrial_sqft'), array([80, 100, 300]))) self.assert_( ma.allequal(gridcells.get_attribute('development_type_id'), array([3, 3, 3]))) roller.unroll_gridcells_for_one_year(gridcells, dev_event_history, 1998) self.assert_( ma.allequal(gridcells.get_attribute('commercial_sqft'), array([40, 20, 0]))) self.assert_( ma.allequal(gridcells.get_attribute('industrial_sqft'), array([80, 99, 250]))) self.assert_( ma.allequal(gridcells.get_attribute('development_type_id'), array([3, 2, 1])))
def get_resources(self, data_dictionary, dataset): """Create resources for computing a variable. """ resources = Resources() for key in data_dictionary.keys(): if key in self.datasets: data = data_dictionary[key] storage = StorageFactory().get_storage('dict_storage') if self.id_names[key] not in data_dictionary[key].keys( ) and not isinstance(self.id_names[key], list): data[self.id_names[key]] = arange( 1, len(data_dictionary[key][data_dictionary[key].keys() [0]]) + 1) # add id array id_name = self.id_names[key] storage.write_table(table_name='data', table_data=data) if key == "gridcell": gc = GridcellDataset(in_storage=storage, in_table_name='data') # add relative_x and relative_y gc.get_id_attribute() n = int(ceil(sqrt(gc.size()))) if "relative_x" not in data.keys(): x = (indices((n, n)) + 1)[1].ravel() gc.add_attribute(x[0:gc.size()], "relative_x", metadata=1) if "relative_y" not in data.keys(): y = (indices((n, n)) + 1)[0].ravel() gc.add_attribute(y[0:gc.size()], "relative_y", metadata=1) resources.merge({key: gc}) elif key == "household": resources.merge({ key: HouseholdDataset(in_storage=storage, in_table_name='data') }) elif key == "development_project": resources.merge({ key: DevelopmentProjectDataset(in_storage=storage, in_table_name='data') }) elif key == "development_event": resources.merge({ key: DevelopmentEventDataset(in_storage=storage, in_table_name='data') }) elif key == "neighborhood": resources.merge({ key: NeighborhoodDataset(in_storage=storage, in_table_name='data') }) elif key == "job": resources.merge({ key: JobDataset(in_storage=storage, in_table_name='data') }) elif key == "zone": resources.merge({ key: ZoneDataset(in_storage=storage, in_table_name='data') }) elif key == "travel_data": resources.merge({ key: TravelDataDataset(in_storage=storage, in_table_name='data') }) elif key == "faz": resources.merge({ key: FazDataset(in_storage=storage, in_table_name='data') }) elif key == "fazdistrict": resources.merge({ key: FazdistrictDataset(in_storage=storage, in_table_name='data') }) elif key == "race": resources.merge({ key: RaceDataset(in_storage=storage, in_table_name='data') }) elif key == "county": resources.merge({ key: CountyDataset(in_storage=storage, in_table_name='data') }) elif key == "large_area": resources.merge({ key: LargeAreaDataset(in_storage=storage, in_table_name='data') }) elif key == "development_group": resources.merge({ key: DevelopmentGroupDataset(in_storage=storage, in_table_name='data') }) elif key == "employment_sector_group": resources.merge({ key: EmploymentSectorGroupDataset(in_storage=storage, in_table_name='data') }) elif key == "plan_type_group": resources.merge({ key: PlanTypeGroupDataset(in_storage=storage, in_table_name='data') }) elif key == "building": resources.merge({ key: BuildingDataset(in_storage=storage, in_table_name='data') }) else: resources.merge({key: data_dictionary[key]}) if dataset in self.interactions: if dataset == "household_x_gridcell": resources.merge({ "dataset": HouseholdXGridcellDataset(dataset1=resources["household"], dataset2=resources["gridcell"]) }) if dataset == "job_x_gridcell": resources.merge({ "dataset": JobXGridcellDataset(dataset1=resources["job"], dataset2=resources["gridcell"]) }) if dataset == "household_x_zone": resources.merge({ "dataset": HouseholdXZoneDataset(dataset1=resources["household"], dataset2=resources["zone"]) }) if dataset == "household_x_neighborhood": resources.merge({ "dataset": HouseholdXNeighborhoodDataset( dataset1=resources["household"], dataset2=resources["neighborhood"]) }) if dataset == "development_project_x_gridcell": resources.merge({ "dataset": DevelopmentProjectXGridcellDataset( dataset1=resources["development_project"], dataset2=resources["gridcell"]) }) else: resources.merge({"dataset": resources[dataset]}) resources.merge({"check_variables": '*', "debug": 4}) return resources