def run(self, specification, coefficients, dataset, index=None, chunk_specification=None, data_objects=None, run_config=None, debuglevel=0): """ For info on the arguments see RegressionModel. dataset should be an instance of DevelopmentProjectProposalDataset, if it isn't, create dataset on the fly with parcel and development template index and self.filter_attribute (passed in __init___) are relative to dataset """ if data_objects is not None: self.dataset_pool.add_datasets_if_not_included(data_objects) proposal_component_set = create_from_proposals_and_template_components(dataset, self.dataset_pool.get_dataset('development_template_component')) self.dataset_pool.replace_dataset(proposal_component_set.get_dataset_name(), proposal_component_set) #proposal_component_set.flush_dataset_if_low_memory_mode() #dataset.flush_dataset_if_low_memory_mode() result = RegressionModel.run(self, specification, coefficients, dataset, index=index, chunk_specification=chunk_specification, data_objects=data_objects, run_config=run_config, debuglevel=debuglevel) if re.search("^ln_", self.outcome_attribute_name): # if the outcome attr. name starts with 'ln_' # the results will be exponentiated. self.outcome_attribute_name = self.outcome_attribute_name[3:len(self.outcome_attribute_name)] result = exp(result) if self.outcome_attribute_name not in dataset.get_known_attribute_names(): dataset.add_primary_attribute(self.defalult_value + zeros(dataset.size()), self.outcome_attribute_name) dataset.set_values_of_one_attribute(self.outcome_attribute_name, result, index=index) self.correct_infinite_values(dataset, self.outcome_attribute_name) return dataset
def run(self, specification=None, coefficients=None, dataset=None, **kwargs): """At the moment it's a mock-up model.""" proposal_component_set = create_from_proposals_and_template_components(dataset, self.dataset_pool.get_dataset('development_template_component')) self.dataset_pool.replace_dataset(proposal_component_set.get_dataset_name(), proposal_component_set) dataset.compute_variables('land_use_type_id = development_project_proposal.disaggregate(development_template.land_use_type_id)', dataset_pool=self.dataset_pool) outcome = RegressionModel.run(self, specification, coefficients, dataset, **kwargs) if (outcome == None) or (outcome.size <= 0): return outcome if re.search("^ln_", self.outcome_attribute_name): # if the outcome attr. name starts with 'ln_' # the results will be exponentiated. self.outcome_attribute_name = self.outcome_attribute_name[3:len(self.outcome_attribute_name)] outcome = exp(outcome) if self.outcome_attribute_name not in dataset.get_known_attribute_names(): dataset.add_primary_attribute(name=self.outcome_attribute_name, data=zeros(dataset.size(), dtype='f')) dataset.set_values_of_one_attribute(self.outcome_attribute_name, outcome) self.correct_infinite_values(dataset, self.outcome_attribute_name, clip_all_larger_values=True) #values = 6.7 * dataset['land_value']/dataset['parcel_sqft'].astype('float32') #dataset.add_primary_attribute(name=self.outcome_attribute_name, data=values) #props_values = proposal_dataset.compute_variables(['development_project_proposal.disaggregate(parcel.%s)' % self.outcome_attribute_name], # dataset_pool=self.dataset_pool) #proposal_dataset.add_primary_attribute(name=self.outcome_attribute_name, data=props_values) return outcome
def __init__(self, proposal_set, sampler="opus_core.samplers.weighted_sampler", weight_string = "exp_roi = exp(urbansim_parcel.development_project_proposal.expected_rate_of_return_on_investment)", filter_attribute=None, run_config=None, estimate_config=None, debuglevel=0, dataset_pool=None): """ this model sample project proposals from proposal set weighted by exponentiated ROI """ self.dataset_pool = self.create_dataset_pool(dataset_pool, pool_packages=['urbansim_parcel', 'urbansim', 'opus_core']) self.dataset_pool.add_datasets_if_not_included({proposal_set.get_dataset_name(): proposal_set}) self.proposal_set = proposal_set # Code added by Jesse Ayers, MAG, 7/27/2009 # Checking the size of the proposal set # if there are no proposals, skip running the model and # print a message self.positive_proposals = True if self.proposal_set.n <= 0: logger.log_status("Proposal Set size <= 0, no proposals to consider, skipping DPPSM.") self.positive_proposals = None return if not self.dataset_pool.has_dataset("development_project_proposal_component"): self.proposal_component_set = create_from_proposals_and_template_components(proposal_set, self.dataset_pool.get_dataset('development_template_component')) self.dataset_pool.replace_dataset(self.proposal_component_set.get_dataset_name(), self.proposal_component_set) else: self.proposal_component_set = self.dataset_pool.get_dataset("development_project_proposal_component") if weight_string is not None: if weight_string not in proposal_set.get_known_attribute_names(): proposal_set.compute_variables(weight_string, dataset_pool=self.dataset_pool) self.weight = self.proposal_set.get_attribute(weight_string) else: self.weight = ones(self.proposal_set.size(), dtype="float64") #equal weight
def prepare_for_run(self, *args, **kwargs): proposal_set, specification, coefficients = DevelopmentProjectProposalRegressionModel.prepare_for_run( self, *args, **kwargs ) proposal_component_set = create_from_proposals_and_template_components( proposal_set, self.dataset_pool.get_dataset("development_template_component") ) self.dataset_pool.replace_dataset(proposal_component_set.get_dataset_name(), proposal_component_set) self.dataset_pool.replace_dataset(proposal_set.get_dataset_name(), proposal_set) return (proposal_set, proposal_component_set, specification, coefficients)
def __init__( self, proposal_set, weight_string="exp_roi=exp(urbansim_parcel.development_project_proposal.expected_rate_of_return_on_investment)", filter_attribute=None, run_config=None, estimate_config=None, debuglevel=0, dataset_pool=None): """ This model samples project proposals from proposal set weighted by weight_string """ self.proposal_set = proposal_set if self.proposal_set.n <= 0: ## to be skipped if proposal_set has no data return self.dataset_pool = self.create_dataset_pool( dataset_pool, pool_packages=['urbansim_parcel', 'urbansim', 'opus_core']) self.dataset_pool.add_datasets_if_not_included( {proposal_set.get_dataset_name(): proposal_set}) if not self.dataset_pool.has_dataset( "development_project_proposal_component"): self.proposal_component_set = create_from_proposals_and_template_components( proposal_set, self.dataset_pool.get_dataset( 'development_template_component')) self.dataset_pool.replace_dataset( self.proposal_component_set.get_dataset_name(), self.proposal_component_set) else: self.proposal_component_set = self.dataset_pool.get_dataset( "development_project_proposal_component") if weight_string is not None: if VariableName(weight_string).get_alias( ) not in self.proposal_set.get_known_attribute_names(): self.proposal_set.compute_variables( weight_string, dataset_pool=self.dataset_pool) self.weight = self.proposal_set.get_attribute( weight_string).astype("float64") else: self.weight = ones(self.proposal_set.size(), dtype="float32") #equal weight ## handling of filter_attribute if filter_attribute is not None: if VariableName(filter_attribute).get_alias( ) not in self.proposal_set.get_known_attribute_names(): self.proposal_set.compute_variables(filter_attribute) self.weight = self.weight * self.proposal_set.get_attribute( filter_attribute)
def run(self, specification, coefficients, dataset, index=None, chunk_specification=None, data_objects=None, run_config=None, debuglevel=0): """ For info on the arguments see RegressionModel. dataset should be an instance of DevelopmentProjectProposalDataset, if it isn't, create dataset on the fly with parcel and development template index and self.filter_attribute (passed in __init___) are relative to dataset """ if data_objects is not None: self.dataset_pool.add_datasets_if_not_included(data_objects) proposal_component_set = create_from_proposals_and_template_components( dataset, self.dataset_pool.get_dataset('development_template_component')) self.dataset_pool.replace_dataset( proposal_component_set.get_dataset_name(), proposal_component_set) #proposal_component_set.flush_dataset_if_low_memory_mode() #dataset.flush_dataset_if_low_memory_mode() result = RegressionModel.run(self, specification, coefficients, dataset, index=index, chunk_specification=chunk_specification, data_objects=data_objects, run_config=run_config, debuglevel=debuglevel) if re.search("^ln_", self.outcome_attribute_name ): # if the outcome attr. name starts with 'ln_' # the results will be exponentiated. self.outcome_attribute_name = self.outcome_attribute_name[ 3:len(self.outcome_attribute_name)] result = exp(result) if self.outcome_attribute_name not in dataset.get_known_attribute_names( ): dataset.add_primary_attribute( self.defalult_value + zeros(dataset.size()), self.outcome_attribute_name) dataset.set_values_of_one_attribute(self.outcome_attribute_name, result, index=index) self.correct_infinite_values(dataset, self.outcome_attribute_name) return dataset
def __init__( self, proposal_set, sampler="opus_core.samplers.weighted_sampler", weight_string="exp_roi = exp(urbansim_parcel.development_project_proposal.expected_rate_of_return_on_investment)", filter_attribute=None, run_config=None, estimate_config=None, debuglevel=0, dataset_pool=None): """ this model sample project proposals from proposal set weighted by exponentiated ROI """ self.dataset_pool = self.create_dataset_pool( dataset_pool, pool_packages=['urbansim_parcel', 'urbansim', 'opus_core']) self.dataset_pool.add_datasets_if_not_included( {proposal_set.get_dataset_name(): proposal_set}) self.proposal_set = proposal_set # Code added by Jesse Ayers, MAG, 7/27/2009 # Checking the size of the proposal set # if there are no proposals, skip running the model and # print a message self.positive_proposals = True if self.proposal_set.n <= 0: logger.log_status( "Proposal Set size <= 0, no proposals to consider, skipping DPPSM." ) self.positive_proposals = None return if not self.dataset_pool.has_dataset( "development_project_proposal_component"): self.proposal_component_set = create_from_proposals_and_template_components( proposal_set, self.dataset_pool.get_dataset( 'development_template_component')) self.dataset_pool.replace_dataset( self.proposal_component_set.get_dataset_name(), self.proposal_component_set) else: self.proposal_component_set = self.dataset_pool.get_dataset( "development_project_proposal_component") if weight_string is not None: if weight_string not in proposal_set.get_known_attribute_names(): proposal_set.compute_variables(weight_string, dataset_pool=self.dataset_pool) self.weight = self.proposal_set.get_attribute(weight_string) else: self.weight = ones(self.proposal_set.size(), dtype="float64") #equal weight
def __init__( self, proposal_set, weight_string="exp_roi=exp(urbansim_parcel.development_project_proposal.expected_rate_of_return_on_investment)", filter_attribute=None, run_config=None, estimate_config=None, debuglevel=0, dataset_pool=None, ): """ This model samples project proposals from proposal set weighted by weight_string """ self.proposal_set = proposal_set if self.proposal_set.n <= 0: ## to be skipped if proposal_set has no data return self.dataset_pool = self.create_dataset_pool( dataset_pool, pool_packages=["urbansim_parcel", "urbansim", "opus_core"] ) self.dataset_pool.add_datasets_if_not_included({proposal_set.get_dataset_name(): proposal_set}) if not self.dataset_pool.has_dataset("development_project_proposal_component"): self.proposal_component_set = create_from_proposals_and_template_components( proposal_set, self.dataset_pool.get_dataset("development_template_component") ) self.dataset_pool.replace_dataset( self.proposal_component_set.get_dataset_name(), self.proposal_component_set ) else: self.proposal_component_set = self.dataset_pool.get_dataset("development_project_proposal_component") if weight_string is not None: if VariableName(weight_string).get_alias() not in self.proposal_set.get_known_attribute_names(): self.proposal_set.compute_variables(weight_string, dataset_pool=self.dataset_pool) self.weight = self.proposal_set.get_attribute(weight_string).astype("float64") else: self.weight = ones(self.proposal_set.size(), dtype="float32") # equal weight ## handling of filter_attribute if filter_attribute is not None: if VariableName(filter_attribute).get_alias() not in self.proposal_set.get_known_attribute_names(): self.proposal_set.compute_variables(filter_attribute) self.weight = self.weight * self.proposal_set.get_attribute(filter_attribute)
SimulationState().set_cache_directory(cache_directory) # SimulationState().set_current_time(year) SessionConfiguration(new_instance=True, package_order=package_order, in_storage=AttributeCache()) for year in range(base_year+1, end_year+1, 1): SimulationState().set_current_time(year) # SessionConfiguration(new_instance=True, # package_order=package_order, # in_storage=AttributeCache()) dataset_pool=SessionConfiguration().get_dataset_pool() dataset_pool.remove_all_datasets() # dataset_pool = DatasetPool( # package_order=['psrc','urbansim','opus_core'], # storage=AttributeCache()) proposal_set = dataset_pool.get_dataset("development_project_proposal") template_component = dataset_pool.get_dataset("development_template_component") from urbansim_parcel.datasets.development_project_proposal_component_dataset import create_from_proposals_and_template_components proposal_component = create_from_proposals_and_template_components(proposal_set, template_component, dataset_pool=dataset_pool) proposal_component.write_dataset(out_storage=AttributeCache().get_flt_storage_for_year(year), out_table_name="development_project_proposal_components")
def run (self, development_proposal_set, building_dataset, dataset_pool, buildings_to_be_demolished=[], consider_amount_built_in_parcels = False, current_year=None, development_proposal_component_set = None ): self.demolish_buildings(buildings_to_be_demolished, building_dataset, dataset_pool) if development_proposal_set.size() <= 0: logger.log_status("Proposal set is empty. Nothing to be constructed.") return development_proposal_set # load velocity function dataset try: velocity_function_set = dataset_pool.get_dataset("velocity_function") except: velocity_function_set = None # choose active projects is_active = development_proposal_set.get_attribute("status_id") == development_proposal_set.id_active is_delayed_or_active = logical_or(is_active, development_proposal_set.get_attribute("status_id") == development_proposal_set.id_with_velocity) active_idx = where(is_delayed_or_active)[0] if active_idx.size <= 0: logger.log_status("No new buildings built.") return development_proposal_set if current_year is None: current_year = SimulationState().get_current_time() # It is important that during this method no variable flushing happens, since # we create datasets of the same name but different sizes than existing # (possibly already flushed) datasets. flush_variables_current = SimulationState().get_flush_datasets() SimulationState().set_flush_datasets(False) active_proposal_set = DatasetSubset(development_proposal_set, active_idx) # create proposal_component_set from the active proposals if development_proposal_component_set is None: proposal_component_set = create_from_proposals_and_template_components(active_proposal_set, dataset_pool.get_dataset('development_template_component')) else: proposal_component_set = development_proposal_component_set dataset_pool.replace_dataset(proposal_component_set.get_dataset_name(), proposal_component_set) # determine building types and corresponding unit names of the involved building_types building_type_id = proposal_component_set.get_attribute("building_type_id") building_type_set = dataset_pool.get_dataset("building_type") # unit_names = building_type_set.compute_variables([ # 'building_type.disaggregate(generic_building_type.unit_name)'], dataset_pool=dataset_pool) unit_names = building_type_set.get_attribute("unit_name") # get unique values of the involved generic building types and unique unit names unique_building_types = unique(building_type_id) index_in_building_types = building_type_set.get_id_index(unique_building_types) unit_names = unit_names[index_in_building_types] is_residential = building_type_set.get_attribute("is_residential")[index_in_building_types]==1 unique_unit_names = unique(unit_names) # determine existing units on parcels parcels = dataset_pool.get_dataset("parcel") parcels.compute_variables(["urbansim_parcel.parcel.vacant_land_area"] + ["urbansim_parcel.parcel.residential_units"] + map(lambda x: "urbansim_parcel.parcel.%s" % x, unique_unit_names), dataset_pool=dataset_pool) parcel_is_lut_vacant = parcels.compute_variables(["urbansim_parcel.parcel.is_land_use_type_vacant"], dataset_pool=dataset_pool) parcel_lut = parcels.get_attribute("land_use_type_id") parcel_lut_before = parcel_lut.copy() if 'land_use_type_id' not in proposal_component_set.get_known_attribute_names(): component_land_use_types = proposal_component_set.compute_variables([ 'development_project_proposal_component.disaggregate(development_template.land_use_type_id, [development_project_proposal])'], dataset_pool=dataset_pool) else: component_land_use_types = proposal_component_set['land_use_type_id'] component_is_redevelopment = proposal_component_set.compute_variables([ 'development_project_proposal_component.disaggregate(development_project_proposal.is_redevelopment)'], dataset_pool=dataset_pool) # from the velocity function determine the amount to be built for each component (in %) if velocity_function_set is not None: cummulative_amount_of_development = proposal_component_set.compute_variables(["urbansim_parcel.development_project_proposal_component.cummulative_amount_of_development"], dataset_pool=dataset_pool) percent_of_development_this_year = proposal_component_set.compute_variables(["urbansim_parcel.development_project_proposal_component.percent_of_development_this_year"], dataset_pool=dataset_pool) else: # if there is no velocity function, all components have velocity of 100% percent_of_development_this_year = resize(array([100], dtype="int32"), int(proposal_component_set.size())) # amount to be built to_be_built = proposal_component_set.compute_variables([ 'urbansim_parcel.development_project_proposal_component.units_proposed'], dataset_pool=dataset_pool)/100.0 * percent_of_development_this_year # initializing for new buildings max_building_id = building_dataset.get_id_attribute().max() new_buildings = {} new_buildings["parcel_id"] = array([], dtype="int32") new_buildings["residential_units"] = array([], dtype="int32") new_buildings["non_residential_sqft"] = array([], dtype="int32") new_buildings["building_type_id"] = array([], dtype="int32") new_buildings["sqft_per_unit"] = array([], dtype=building_dataset.get_attribute("sqft_per_unit").dtype) new_buildings["land_area"] = array([], dtype=building_dataset.get_attribute("land_area").dtype) new_buildings["improvement_value"] = array([], dtype=building_dataset.get_attribute("improvement_value").dtype) new_buildings["template_id"] = array([], dtype="int32") sqft_per_unit = proposal_component_set.get_attribute("building_sqft_per_unit").astype(new_buildings["sqft_per_unit"].dtype) # Compute land_area_taken properly if velocity function is present if velocity_function_set is not None: larea_taken = proposal_component_set.compute_variables(['urbansim_parcel.development_project_proposal_component.land_area_taken'], dataset_pool=dataset_pool) pct_dev_this_yr_conv = (percent_of_development_this_year / 100.0) land_area_taken = larea_taken * pct_dev_this_yr_conv else: land_area_taken = proposal_component_set.compute_variables(['urbansim_parcel.development_project_proposal_component.land_area_taken'], dataset_pool=dataset_pool).astype(new_buildings["land_area"].dtype) construction_cost = proposal_component_set.compute_variables(['urbansim_parcel.development_project_proposal_component.construction_cost'], dataset_pool=dataset_pool).astype(new_buildings["improvement_value"].dtype) template_ids = proposal_component_set.get_attribute("template_id") number_of_new_buildings = {} number_of_new_buildings_by_template_id = {} # iterate over building types that are unique over the involved proposals for itype in range(unique_building_types.size): this_building_type = unique_building_types[itype] number_of_new_buildings[this_building_type] = 0 unit_name = unit_names[itype] if is_residential[itype]: unit_name = 'residential_units' component_index = where(building_type_id == this_building_type)[0] parcel_ids_in_components = proposal_component_set.get_attribute_by_index("parcel_id", component_index) unique_parcels = unique(parcel_ids_in_components) # iterate over involved parcels for parcel_id in unique_parcels: pidx = component_index[parcel_ids_in_components==parcel_id] parcel_index = parcels.get_id_index(parcel_id) # what is already built on this parcel if consider_amount_built_in_parcels: amount_built = parcels.get_attribute_by_index(unit_name, parcel_index) else: amount_built = 0 # what is proposed on this parcel amount_proposed = to_be_built[pidx].sum() # build if needed if rint(amount_proposed) > amount_built: if unit_name == "residential_units": bunit = "residential_units" bnunit = "non_residential_sqft" else: bnunit = "residential_units" bunit = "non_residential_sqft" to_be_built_cumsum = rint(cumsum(to_be_built[pidx])).astype("int32") idx_to_be_built = where(to_be_built_cumsum > amount_built)[0] new_buildings["parcel_id"] = concatenate((new_buildings["parcel_id"], array(idx_to_be_built.size * [parcel_id], dtype="int32"))) new_buildings[bunit] = concatenate((new_buildings[bunit], rint(to_be_built[pidx][idx_to_be_built]).astype(new_buildings[bunit].dtype))) new_buildings[bnunit] = concatenate((new_buildings[bnunit], array(idx_to_be_built.size * [0], dtype="int32"))) new_buildings["building_type_id"] = concatenate((new_buildings["building_type_id"], array(idx_to_be_built.size * [this_building_type], dtype="int32"))) new_buildings["sqft_per_unit"] = concatenate((new_buildings["sqft_per_unit"], sqft_per_unit[pidx][idx_to_be_built])) new_buildings["land_area"] = concatenate((new_buildings["land_area"], land_area_taken[pidx][idx_to_be_built])) new_buildings["improvement_value"] = concatenate((new_buildings["improvement_value"], construction_cost[pidx][idx_to_be_built])) new_buildings["template_id"] = concatenate((new_buildings["template_id"], template_ids[pidx][idx_to_be_built])) number_of_new_buildings[this_building_type] += idx_to_be_built.size if parcel_is_lut_vacant[parcel_index] or component_is_redevelopment[pidx][idx_to_be_built][0]: parcel_lut[parcel_index] = component_land_use_types[pidx][idx_to_be_built][0] # count number of buildings by template ids for icomp in range(idx_to_be_built.size): if template_ids[pidx[idx_to_be_built[icomp]]] not in number_of_new_buildings_by_template_id.keys(): number_of_new_buildings_by_template_id[template_ids[pidx[idx_to_be_built[icomp]]]] = 0 number_of_new_buildings_by_template_id[template_ids[pidx[idx_to_be_built[icomp]]]] += 1 # add created buildings to the existing building dataset buildings_id_name = building_dataset.get_id_name()[0] new_buildings[buildings_id_name] = max_building_id + arange(1, new_buildings["parcel_id"].size+1) new_buildings['year_built'] = resize(array([current_year], dtype="int32"), new_buildings["parcel_id"].size) new_buildings['job_capacity'] = resize(array([-1], dtype="int32"), new_buildings["parcel_id"].size) building_dataset.add_elements(new_buildings, require_all_attributes=False) if "zone_id" in building_dataset.get_known_attribute_names(): zone_ids = building_dataset.compute_variables(['building.disaggregate(parcel.zone_id)'], dataset_pool=dataset_pool) building_dataset.modify_attribute(name="zone_id", data=zone_ids) if "county" in building_dataset.get_known_attribute_names(): county_ids = building_dataset.compute_variables(['building.disaggregate(parcel.county)'], dataset_pool=dataset_pool) building_dataset.modify_attribute(name="county", data=county_ids) logger.log_status("%s new buildings built." % new_buildings["parcel_id"].size) for type_id in number_of_new_buildings.keys(): logger.log_status("building type %s: %s" % (type_id, number_of_new_buildings[type_id])) logger.log_status("Number of new buildings by template ids:") logger.log_status(number_of_new_buildings_by_template_id) parcels["land_use_type_id"] = parcel_lut logger.log_status("%s parcels have modified land_use_type_id." % (parcel_lut_before <> parcel_lut).sum()) # recompute the cummulative development amount if velocity_function_set is not None: # determine, if everything has been built or if it should be considered next year cummulative_amount_of_development = development_proposal_set.compute_variables([ "development_project_proposal.aggregate(urbansim_parcel.development_project_proposal_component.cummulative_amount_of_development)/urbansim_parcel.development_project_proposal.number_of_components"], dataset_pool=dataset_pool) else: # if there is no velocity function, all components have velocity of 100% ## TODO: need to be reviewed, probably by Hana ## changed from proposal_component_set to development_proposal_set ## so it will have the same shape as is_delayed_or_active cummulative_amount_of_development = resize(array([100], dtype="int32"), int(development_proposal_set.size())) will_be_delayed = cummulative_amount_of_development < 100 velocity_idx = where(logical_and(is_delayed_or_active, will_be_delayed))[0] if velocity_idx.size > 0: # for the unfinished projects set the status_id to id_with_velocity development_proposal_set.set_values_of_one_attribute("status_id", development_proposal_set.id_with_velocity, index=velocity_idx) not_velocity_idx = where(logical_and(is_delayed_or_active, logical_not(will_be_delayed)))[0] if not_velocity_idx.size > 0: # for the remaining projects set the status_id to id_not_available development_proposal_set.set_values_of_one_attribute("status_id", development_proposal_set.id_not_available, index=not_velocity_idx) dataset_pool._remove_dataset(proposal_component_set.get_dataset_name()) # switch flush_variables to the original value SimulationState().set_flush_datasets(flush_variables_current) return development_proposal_set
def run(self, development_proposal_set, building_dataset, dataset_pool, buildings_to_be_demolished=[], consider_amount_built_in_parcels=False, current_year=None): self.demolish_buildings(buildings_to_be_demolished, building_dataset, dataset_pool) if development_proposal_set.size() <= 0: logger.log_status( "Proposal set is empty. Nothing to be constructed.") return development_proposal_set # load velocity function dataset try: velocity_function_set = dataset_pool.get_dataset( "velocity_function") except: velocity_function_set = None # choose active projects is_active = development_proposal_set.get_attribute( "status_id") == development_proposal_set.id_active is_delayed_or_active = logical_or( is_active, development_proposal_set.get_attribute("status_id") == development_proposal_set.id_with_velocity) active_idx = where(is_delayed_or_active)[0] if active_idx.size <= 0: logger.log_status("No new buildings built.") return development_proposal_set if current_year is None: current_year = SimulationState().get_current_time() active_proposal_set = DatasetSubset(development_proposal_set, active_idx) # create proposal_component_set from the active proposals proposal_component_set = create_from_proposals_and_template_components( active_proposal_set, dataset_pool.get_dataset('development_template_component')) dataset_pool.replace_dataset(proposal_component_set.get_dataset_name(), proposal_component_set) # determine building types and corresponding unit names of the involved building_types building_type_id = proposal_component_set.get_attribute( "building_type_id") building_type_set = dataset_pool.get_dataset("building_type") # unit_names = building_type_set.compute_variables([ # 'building_type.disaggregate(generic_building_type.unit_name)'], dataset_pool=dataset_pool) unit_names = building_type_set.get_attribute("unit_name") # get unique values of the involved generic building types and unique unit names unique_building_types = unique(building_type_id) index_in_building_types = building_type_set.get_id_index( unique_building_types) unit_names = unit_names[index_in_building_types] is_residential = building_type_set.get_attribute( "is_residential")[index_in_building_types] == 1 unique_unit_names = unique(unit_names) # determine existing units on parcels parcels = dataset_pool.get_dataset("parcel") parcels.compute_variables( ["urbansim_parcel.parcel.vacant_land_area"] + ["urbansim_parcel.parcel.residential_units"] + map(lambda x: "urbansim_parcel.parcel.%s" % x, unique_unit_names), dataset_pool=dataset_pool) parcel_is_lut_vacant = parcels.compute_variables( ["urbansim_parcel.parcel.is_land_use_type_vacant"], dataset_pool=dataset_pool) parcel_lut = parcels.get_attribute("land_use_type_id") component_land_use_types = proposal_component_set.compute_variables( [ 'development_project_proposal_component.disaggregate(development_template.land_use_type_id, [development_project_proposal])' ], dataset_pool=dataset_pool) # from the velocity function determine the amount to be built for each component (in %) if velocity_function_set is not None: cummulative_amount_of_development = proposal_component_set.compute_variables( [ "urbansim_parcel.development_project_proposal_component.cummulative_amount_of_development" ], dataset_pool=dataset_pool) percent_of_development_this_year = proposal_component_set.compute_variables( [ "urbansim_parcel.development_project_proposal_component.percent_of_development_this_year" ], dataset_pool=dataset_pool) else: # if there is no velocity function, all components have velocity of 100% percent_of_development_this_year = resize( array([100], dtype="int32"), proposal_component_set.size()) # amount to be built to_be_built = proposal_component_set.compute_variables( [ 'urbansim_parcel.development_project_proposal_component.units_proposed' ], dataset_pool=dataset_pool ) / 100.0 * percent_of_development_this_year # initializing for new buildings max_building_id = building_dataset.get_id_attribute().max() new_buildings = {} new_buildings["parcel_id"] = array([], dtype="int32") new_buildings["residential_units"] = array([], dtype="int32") new_buildings["non_residential_sqft"] = array([], dtype="int32") new_buildings["building_type_id"] = array([], dtype="int32") new_buildings["sqft_per_unit"] = array( [], dtype=building_dataset.get_attribute("sqft_per_unit").dtype) new_buildings["land_area"] = array( [], dtype=building_dataset.get_attribute("land_area").dtype) new_buildings["improvement_value"] = array( [], dtype=building_dataset.get_attribute("improvement_value").dtype) new_buildings["template_id"] = array([], dtype="int32") sqft_per_unit = proposal_component_set.get_attribute( "building_sqft_per_unit").astype( new_buildings["sqft_per_unit"].dtype) # Compute land_area_taken properly if velocity function is present if velocity_function_set is not None: larea_taken = proposal_component_set.compute_variables([ 'urbansim_parcel.development_project_proposal_component.land_area_taken' ], dataset_pool= dataset_pool ) pct_dev_this_yr_conv = (percent_of_development_this_year / 100.0) land_area_taken = larea_taken * pct_dev_this_yr_conv else: land_area_taken = proposal_component_set.compute_variables( [ 'urbansim_parcel.development_project_proposal_component.land_area_taken' ], dataset_pool=dataset_pool).astype( new_buildings["land_area"].dtype) construction_cost = proposal_component_set.compute_variables( [ 'urbansim_parcel.development_project_proposal_component.construction_cost' ], dataset_pool=dataset_pool).astype( new_buildings["improvement_value"].dtype) template_ids = proposal_component_set.get_attribute("template_id") number_of_new_buildings = {} number_of_new_buildings_by_template_id = {} # iterate over building types that are unique over the involved proposals for itype in range(unique_building_types.size): this_building_type = unique_building_types[itype] number_of_new_buildings[this_building_type] = 0 unit_name = unit_names[itype] if is_residential[itype]: unit_name = 'residential_units' component_index = where(building_type_id == this_building_type)[0] parcel_ids_in_components = proposal_component_set.get_attribute_by_index( "parcel_id", component_index) unique_parcels = unique(parcel_ids_in_components) # iterate over involved parcels for parcel_id in unique_parcels: pidx = component_index[parcel_ids_in_components == parcel_id] parcel_index = parcels.get_id_index(parcel_id) # what is already built on this parcel if consider_amount_built_in_parcels: amount_built = parcels.get_attribute_by_index( unit_name, parcel_index) else: amount_built = 0 # what is proposed on this parcel amount_proposed = to_be_built[pidx].sum() # build if needed if rint(amount_proposed) > amount_built: if unit_name == "residential_units": bunit = "residential_units" bnunit = "non_residential_sqft" else: bnunit = "residential_units" bunit = "non_residential_sqft" to_be_built_cumsum = rint(cumsum( to_be_built[pidx])).astype("int32") idx_to_be_built = where( to_be_built_cumsum > amount_built)[0] new_buildings["parcel_id"] = concatenate( (new_buildings["parcel_id"], array(idx_to_be_built.size * [parcel_id], dtype="int32"))) new_buildings[bunit] = concatenate( (new_buildings[bunit], rint(to_be_built[pidx][idx_to_be_built]).astype( new_buildings[bunit].dtype))) new_buildings[bnunit] = concatenate( (new_buildings[bnunit], array(idx_to_be_built.size * [0], dtype="int32"))) new_buildings["building_type_id"] = concatenate( (new_buildings["building_type_id"], array(idx_to_be_built.size * [this_building_type], dtype="int32"))) new_buildings["sqft_per_unit"] = concatenate( (new_buildings["sqft_per_unit"], sqft_per_unit[pidx][idx_to_be_built])) new_buildings["land_area"] = concatenate( (new_buildings["land_area"], land_area_taken[pidx][idx_to_be_built])) new_buildings["improvement_value"] = concatenate( (new_buildings["improvement_value"], construction_cost[pidx][idx_to_be_built])) new_buildings["template_id"] = concatenate( (new_buildings["template_id"], template_ids[pidx][idx_to_be_built])) number_of_new_buildings[ this_building_type] += idx_to_be_built.size if parcel_is_lut_vacant[parcel_index]: parcel_lut[parcel_index] = component_land_use_types[ pidx][idx_to_be_built][0] # count number of buildings by template ids for icomp in range(idx_to_be_built.size): if template_ids[pidx[idx_to_be_built[ icomp]]] not in number_of_new_buildings_by_template_id.keys( ): number_of_new_buildings_by_template_id[ template_ids[pidx[idx_to_be_built[icomp]]]] = 0 number_of_new_buildings_by_template_id[template_ids[ pidx[idx_to_be_built[icomp]]]] += 1 # add created buildings to the existing building dataset buildings_id_name = building_dataset.get_id_name()[0] new_buildings[buildings_id_name] = max_building_id + arange( 1, new_buildings["parcel_id"].size + 1) new_buildings['year_built'] = resize( array([current_year], dtype="int32"), new_buildings["parcel_id"].size) building_dataset.add_elements(new_buildings, require_all_attributes=False) if "zone_id" in building_dataset.get_known_attribute_names(): zone_ids = building_dataset.compute_variables( ['building.disaggregate(parcel.zone_id)'], dataset_pool=dataset_pool) building_dataset.modify_attribute(name="zone_id", data=zone_ids) if "county" in building_dataset.get_known_attribute_names(): county_ids = building_dataset.compute_variables( ['building.disaggregate(parcel.county)'], dataset_pool=dataset_pool) building_dataset.modify_attribute(name="county", data=county_ids) logger.log_status("%s new buildings built." % new_buildings["parcel_id"].size) for type_id in number_of_new_buildings.keys(): logger.log_status("building type %s: %s" % (type_id, number_of_new_buildings[type_id])) logger.log_status("Number of new buildings by template ids:") logger.log_status(number_of_new_buildings_by_template_id) # recompute the cummulative development amount if velocity_function_set is not None: # determine, if everything has been built or if it should be considered next year cummulative_amount_of_development = development_proposal_set.compute_variables( [ "development_project_proposal.aggregate(urbansim_parcel.development_project_proposal_component.cummulative_amount_of_development)/urbansim_parcel.development_project_proposal.number_of_components" ], dataset_pool=dataset_pool) else: # if there is no velocity function, all components have velocity of 100% ## TODO: need to be reviewed, probably by Hana ## changed from proposal_component_set to development_proposal_set ## so it will have the same shape as is_delayed_or_active cummulative_amount_of_development = resize( array([100], dtype="int32"), development_proposal_set.size()) will_be_delayed = cummulative_amount_of_development < 100 velocity_idx = where(logical_and(is_delayed_or_active, will_be_delayed))[0] if velocity_idx.size > 0: # for the unfinished projects set the status_id to id_with_velocity development_proposal_set.set_values_of_one_attribute( "status_id", development_proposal_set.id_with_velocity, index=velocity_idx) not_velocity_idx = where( logical_and(is_delayed_or_active, logical_not(will_be_delayed)))[0] if not_velocity_idx.size > 0: # for the remaining projects set the status_id to id_not_available development_proposal_set.set_values_of_one_attribute( "status_id", development_proposal_set.id_not_available, index=not_velocity_idx) dataset_pool._remove_dataset(proposal_component_set.get_dataset_name()) return development_proposal_set
buildings_parcel_ids = buildings.get_attribute( "parcel_id" ) index_in_parcels = parcels.get_id_index(buildings_parcel_ids) demolished_buildings_index = where(is_redevelopment[index_in_parcels])[0] buildings.set_values_of_one_attribute("land_area", zeros(demolished_buildings_index.size), index=demolished_buildings_index) redev_proposal_set = create_from_parcel_and_development_template(parcels, templates, filter_attribute=self.filter, parcel_index = where(is_redevelopment)[0], template_index = index2, proposed_units_variable=proposed_units_variable, dataset_pool=dataset_pool, resources = kwargs.get("resources", None)) if(kwargs.get('accept_only_larger_proposals_for_redevelopment', False)): # remove proposals that are smaller than the current building in the parcel proposal_component_set = create_from_proposals_and_template_components(redev_proposal_set, dataset_pool.get_dataset('development_template_component')) dataset_pool.replace_dataset(proposal_component_set.get_dataset_name(), proposal_component_set) dataset_pool.replace_dataset(redev_proposal_set.get_dataset_name(), redev_proposal_set) remove_proposals = where(redev_proposal_set.compute_variables(['urbansim_parcel.development_project_proposal.building_sqft <= development_project_proposal.disaggregate(urbansim_parcel.parcel.building_sqft)'], dataset_pool=dataset_pool))[0] if remove_proposals.size > 0: redev_proposal_set.remove_elements(remove_proposals) logger.log_status('%s proposals smaller than existing buildings, therefore removed.' % remove_proposals.size) dataset_pool._remove_dataset(redev_proposal_set.get_dataset_name()) dataset_pool._remove_dataset(proposal_component_set.get_dataset_name()) redev_proposal_set.add_attribute( ones(redev_proposal_set.size(), dtype=int16), "is_redevelopment", AttributeType.PRIMARY) proposal_set.join_by_rows(redev_proposal_set, require_all_attributes=False, change_ids_if_not_unique=True) ###roll back land_area of buildings buildings.set_values_of_one_attribute("land_area", land_area[demolished_buildings_index],
SimulationState().set_cache_directory(cache_directory) # SimulationState().set_current_time(year) SessionConfiguration(new_instance=True, package_order=package_order, in_storage=AttributeCache()) for year in range(base_year + 1, end_year + 1, 1): SimulationState().set_current_time(year) # SessionConfiguration(new_instance=True, # package_order=package_order, # in_storage=AttributeCache()) dataset_pool = SessionConfiguration().get_dataset_pool() dataset_pool.remove_all_datasets() # dataset_pool = DatasetPool( # package_order=['psrc','urbansim','opus_core'], # storage=AttributeCache()) proposal_set = dataset_pool.get_dataset("development_project_proposal") template_component = dataset_pool.get_dataset( "development_template_component") from urbansim_parcel.datasets.development_project_proposal_component_dataset import create_from_proposals_and_template_components proposal_component = create_from_proposals_and_template_components( proposal_set, template_component, dataset_pool=dataset_pool) proposal_component.write_dataset( out_storage=AttributeCache().get_flt_storage_for_year(year), out_table_name="development_project_proposal_components")