def test_agent_times_choice(self): expression = 'agent_x_choice.agent_times_choice(attr)' storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='agents', table_data={'id': array([1, 2, 3, 4, 5]), 'attr_2': array([3, 2, 4, 10, 20]), 'attr_3': array([10, 100, 1000, 500, 0]), 'attr_4': array([100, 500, 0, 20, -30]) } ) storage.write_table(table_name='choices', table_data={'id': array([1, 2, 3, 4])} ) agents = Dataset(in_storage=storage, in_table_name='agents', dataset_name='agent', id_name='id') choices = Dataset(in_storage=storage, in_table_name='choices', dataset_name='choice', id_name='id') ids = InteractionDataset(dataset1=agents, dataset2=choices, index1=array([0,1,3,4]), index2=array([1,2,3])) result = ids.compute_variables(expression) should_be = array([[3, 10, 100], [2,100,500], [10,500, 20], [20, 0, -30]]) self.assertEqual(ma.allequal(result, should_be), True) agents.touch_attribute('attr_2') # in order to recompute the expression choices.add_primary_attribute(name='name', data=array(['bus', 'car', 'tran', 'walk'])) agents.add_primary_attribute(name='attr_tran', data=array([100, 1000, 10000, 5000,10])) result = ids.compute_variables(expression) should_be = array([[3, 100, 100], [2,1000,500], [10,5000, 20], [20, 10, -30]]) self.assertEqual(ma.allequal(result, should_be), True)
def test_agent_times_choice(self): expression = "agent_x_choice.agent_times_choice(attr)" storage = StorageFactory().get_storage("dict_storage") storage.write_table( table_name="agents", table_data={ "id": array([1, 2, 3, 4, 5]), "attr_2": array([3, 2, 4, 10, 20]), "attr_3": array([10, 100, 1000, 500, 0]), "attr_4": array([100, 500, 0, 20, -30]), }, ) storage.write_table(table_name="choices", table_data={"id": array([1, 2, 3, 4])}) agents = Dataset(in_storage=storage, in_table_name="agents", dataset_name="agent", id_name="id") choices = Dataset(in_storage=storage, in_table_name="choices", dataset_name="choice", id_name="id") ids = InteractionDataset(dataset1=agents, dataset2=choices, index1=array([0, 1, 3, 4]), index2=array([1, 2, 3])) result = ids.compute_variables(expression) should_be = array([[3, 10, 100], [2, 100, 500], [10, 500, 20], [20, 0, -30]]) self.assertEqual(ma.allequal(result, should_be), True) agents.touch_attribute("attr_2") # in order to recompute the expression choices.add_primary_attribute(name="name", data=array(["bus", "car", "tran", "walk"])) agents.add_primary_attribute(name="attr_tran", data=array([100, 1000, 10000, 5000, 10])) result = ids.compute_variables(expression) should_be = array([[3, 100, 100], [2, 1000, 500], [10, 5000, 20], [20, 10, -30]]) self.assertEqual(ma.allequal(result, should_be), True)
def test_match_agent_attribute_to_choice(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='agents', table_data={ 'id': array([1, 2, 3, 4, 5]), 'attr_2': array([3, 2, 4, 10, 20]), 'attr_3': array([10, 100, 1000, 500, 0]), 'attr_4': array([100, 500, 0, 20, -30]) }) storage.write_table(table_name='choices', table_data={'id': array([1, 2, 3, 4])}) agents = Dataset(in_storage=storage, in_table_name='agents', dataset_name='agent', id_name='id') choices = Dataset(in_storage=storage, in_table_name='choices', dataset_name='choice', id_name='id') ids = InteractionDataset(dataset1=agents, dataset2=choices, index1=array([0, 1, 3, 4]), index2=array([1, 2, 3])) result, dep = ids.match_agent_attribute_to_choice('attr') should_be = array([[3, 10, 100], [2, 100, 500], [10, 500, 20], [20, 0, -30]]) self.assertEqual(ma.allequal(result, should_be), True) self.assertEqual((array(dep) == array( ['agent.attr_2', 'agent.attr_3', 'agent.attr_4'])).sum() == 3, True) choices.add_primary_attribute(name='name', data=array( ['bus', 'car', 'tran', 'walk'])) agents.add_primary_attribute(name='attr_tran', data=array([100, 1000, 10000, 5000, 10])) result, dep = ids.match_agent_attribute_to_choice('attr') should_be = array([[3, 100, 100], [2, 1000, 500], [10, 5000, 20], [20, 10, -30]]) self.assertEqual(ma.allequal(result, should_be), True) self.assertEqual((array(dep) == array( ['agent.attr_2', 'agent.attr_tran', 'agent.attr_4'])).sum() == 3, True)
def test_match_agent_attribute_to_choice(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='agents', table_data={'id': array([1, 2, 3, 4, 5]), 'attr_2': array([3, 2, 4, 10, 20]), 'attr_3': array([10, 100, 1000, 500, 0]), 'attr_4': array([100, 500, 0, 20, -30]) } ) storage.write_table(table_name='choices', table_data={'id': array([1, 2, 3, 4])} ) agents = Dataset(in_storage=storage, in_table_name='agents', dataset_name='agent', id_name='id') choices = Dataset(in_storage=storage, in_table_name='choices', dataset_name='choice', id_name='id') ids = InteractionDataset(dataset1=agents, dataset2=choices, index1=array([0,1,3,4]), index2=array([1,2,3])) result, dep = ids.match_agent_attribute_to_choice('attr') should_be = array([[3, 10, 100], [2,100,500], [10,500, 20], [20, 0, -30]]) self.assertEqual(ma.allequal(result, should_be), True) self.assertEqual((array(dep) == array(['agent.attr_2', 'agent.attr_3', 'agent.attr_4'])).sum() == 3, True) choices.add_primary_attribute(name='name', data=array(['bus', 'car', 'tran', 'walk'])) agents.add_primary_attribute(name='attr_tran', data=array([100, 1000, 10000, 5000,10])) result, dep = ids.match_agent_attribute_to_choice('attr') should_be = array([[3, 100, 100], [2,1000,500], [10,5000, 20], [20, 10, -30]]) self.assertEqual(ma.allequal(result, should_be), True) self.assertEqual((array(dep) == array(['agent.attr_2', 'agent.attr_tran', 'agent.attr_4'])).sum()==3, True)
id_name='household_id', dataset_name='household') households.get_attribute_names() households.get_id_attribute() households.size() households.get_attribute("income") households.get_attribute_names() households.load_dataset() households.get_attribute_names() #households.plot_histogram("income", bins = 10) #households.r_histogram("income") #households.r_scatter("persons", "income") households.correlation_coefficient("persons", "income") households.correlation_matrix(["persons", "income"]) households.summary() households.add_primary_attribute(data=[4,6,9,2,4,8,2,1,3,2], name="location") households.get_attribute_names() households.modify_attribute(name="location", data=[0,0], index=[0,1]) households.get_attribute("location") households.get_data_element_by_id(5).location #households.write_dataset(out_storage=storage, out_table_name="households_output") households.get_dataset_name() # Working with models from opus_core.choice_model import ChoiceModel choicemodel = ChoiceModel(choice_set=[1,2,3], utilities = "opus_core.linear_utilities", probabilities = "opus_core.mnl_probabilities",
def prepare_for_estimate(self, add_member_prefix=True, specification_dict=None, specification_storage=None, specification_table=None, building_set=None, buildings_for_estimation_storage=None, buildings_for_estimation_table=None, constants=None, base_year=0, building_categories=None, location_id_variable=None, join_datasets=False, data_objects=None, **kwargs): # buildings = None if (building_set is not None): if location_id_variable is not None: building_set.compute_variables( location_id_variable, resources=Resources(data_objects)) # create agents for estimation if buildings_for_estimation_storage is not None: estimation_set = Dataset( in_storage=buildings_for_estimation_storage, in_table_name=buildings_for_estimation_table, id_name=building_set.get_id_name(), dataset_name=building_set.get_dataset_name()) if location_id_variable: estimation_set.compute_variables( location_id_variable, resources=Resources(data_objects)) # needs to be a primary attribute because of the join method below estimation_set.add_primary_attribute( estimation_set.get_attribute(location_id_variable), VariableName(location_id_variable).alias()) years = estimation_set.get_attribute("scheduled_year") recent_years = constants['recent_years'] indicator = zeros(estimation_set.size()) for year in range(base_year - recent_years, base_year + 1): indicator = logical_or(indicator, years == year) idx = where(logical_not(indicator))[0] estimation_set.remove_elements(idx) #if filter: #estimation_set.compute_variables(filter, resources=Resources(data_objects)) #index = where(estimation_set.get_attribute(filter) > 0)[0] #estimation_set.subset_by_index(index, flush_attributes_if_not_loaded=False) if join_datasets: building_set.join_by_rows(estimation_set, require_all_attributes=False, change_ids_if_not_unique=True) index = arange(building_set.size() - estimation_set.size(), agent_set.size()) else: index = building_set.get_id_index( estimation_set.get_id_attribute()) else: if building_set is not None: index = arange(building_set.size()) else: index = None if add_member_prefix: specification_table = self.group_member.add_member_prefix_to_table_names( [specification_table]) from opus_core.model import get_specification_for_estimation #from urbansim.functions import compute_supply_and_add_to_location_set specification = get_specification_for_estimation( specification_dict, specification_storage, specification_table) #specification, dummy = AgentLocationChoiceModelMember.prepare_for_estimate(self, add_member_prefix, #specification_dict, specification_storage, #specification_table, #location_id_variable=location_id_variable, #data_objects=data_objects, **kwargs) return (specification, index)
def prepare_for_estimate(self, specification_dict = None, specification_storage=None, specification_table=None, agent_set=None, agents_for_estimation_storage=None, agents_for_estimation_table=None, join_datasets=False, index_to_unplace=None, portion_to_unplace=1.0, compute_lambda=False, grouping_location_set=None, movers_variable=None, movers_index=None, filter=None, location_id_variable=None, data_objects={}): """Put 'location_id_variable' always in, if the location id is to be computed on the estimation set, i.e. if it is not a primary attribute of the estimation set. Set 'index_to_unplace' to None, if 'compute_lambda' is True. In such a case, the annual supply is estimated without unplacing agents. 'grouping_location_set', 'movers_variable' and 'movers_index' must be given, if 'compute_lambda' is True. """ from opus_core.model import get_specification_for_estimation from urbansim.functions import compute_supply_and_add_to_location_set specification = get_specification_for_estimation(specification_dict, specification_storage, specification_table) if (agent_set is not None) and (index_to_unplace is not None): if self.location_id_string is not None: agent_set.compute_variables(self.location_id_string, resources=Resources(data_objects)) if portion_to_unplace < 1: unplace_size = int(portion_to_unplace*index_to_unplace.size) end_index_to_unplace = sample_noreplace(index_to_unplace, unplace_size) else: end_index_to_unplace = index_to_unplace logger.log_status("Unplace " + str(end_index_to_unplace.size) + " agents.") agent_set.modify_attribute(self.choice_set.get_id_name()[0], resize(array([-1]), end_index_to_unplace.size), end_index_to_unplace) if compute_lambda: movers = zeros(agent_set.size(), dtype="bool8") if movers_index is not None: movers[movers_index] = 1 agent_set.add_primary_attribute(movers, "potential_movers") self.estimate_config["weights_for_estimation_string"] = self.estimate_config["weights_for_estimation_string"]+"_from_lambda" compute_supply_and_add_to_location_set(self.choice_set, grouping_location_set, self.run_config["number_of_units_string"], self.run_config["capacity_string"], movers_variable, self.estimate_config["weights_for_estimation_string"], resources=Resources(data_objects)) # create agents for estimation if (agents_for_estimation_storage is not None) and (agents_for_estimation_table is not None): estimation_set = Dataset(in_storage = agents_for_estimation_storage, in_table_name=agents_for_estimation_table, id_name=agent_set.get_id_name(), dataset_name=agent_set.get_dataset_name()) if location_id_variable is not None: estimation_set.compute_variables(location_id_variable, resources=Resources(data_objects)) # needs to be a primary attribute because of the join method below estimation_set.add_primary_attribute(estimation_set.get_attribute(location_id_variable), VariableName(location_id_variable).get_alias()) if filter: values = estimation_set.compute_variables(filter, resources=Resources(data_objects)) index = where(values > 0)[0] estimation_set.subset_by_index(index, flush_attributes_if_not_loaded=False) if join_datasets: agent_set.join_by_rows(estimation_set, require_all_attributes=False, change_ids_if_not_unique=True) index = arange(agent_set.size()-estimation_set.size(),agent_set.size()) else: index = agent_set.get_id_index(estimation_set.get_id_attribute()) else: if agent_set is not None: if filter is not None: values = agent_set.compute_variables(filter, resources=Resources(data_objects)) index = where(values > 0)[0] else: index = arange(agent_set.size()) else: index = None return (specification, index)
def prepare_for_estimate( self, add_member_prefix=True, specification_dict=None, specification_storage=None, specification_table=None, building_set=None, buildings_for_estimation_storage=None, buildings_for_estimation_table=None, constants=None, base_year=0, building_categories=None, location_id_variable=None, join_datasets=False, data_objects=None, **kwargs ): # buildings = None if building_set is not None: if location_id_variable is not None: building_set.compute_variables(location_id_variable, resources=Resources(data_objects)) # create agents for estimation if buildings_for_estimation_storage is not None: estimation_set = Dataset( in_storage=buildings_for_estimation_storage, in_table_name=buildings_for_estimation_table, id_name=building_set.get_id_name(), dataset_name=building_set.get_dataset_name(), ) if location_id_variable: estimation_set.compute_variables(location_id_variable, resources=Resources(data_objects)) # needs to be a primary attribute because of the join method below estimation_set.add_primary_attribute( estimation_set.get_attribute(location_id_variable), VariableName(location_id_variable).alias() ) years = estimation_set.get_attribute("scheduled_year") recent_years = constants["recent_years"] indicator = zeros(estimation_set.size(), dtype="int32") for year in range(base_year - recent_years, base_year + 1): indicator = logical_or(indicator, years == year) idx = where(logical_not(indicator))[0] estimation_set.remove_elements(idx) # if filter: # estimation_set.compute_variables(filter, resources=Resources(data_objects)) # index = where(estimation_set.get_attribute(filter) > 0)[0] # estimation_set.subset_by_index(index, flush_attributes_if_not_loaded=False) if join_datasets: building_set.join_by_rows(estimation_set, require_all_attributes=False, change_ids_if_not_unique=True) index = arange(building_set.size() - estimation_set.size(), building_set.size()) else: index = building_set.get_id_index(estimation_set.get_id_attribute()) else: if building_set is not None: index = arange(building_set.size()) else: index = None if add_member_prefix: specification_table = self.group_member.add_member_prefix_to_table_names([specification_table]) from opus_core.model import get_specification_for_estimation # from urbansim.functions import compute_supply_and_add_to_location_set specification = get_specification_for_estimation(specification_dict, specification_storage, specification_table) # specification, dummy = AgentLocationChoiceModelMember.prepare_for_estimate(self, add_member_prefix, # specification_dict, specification_storage, # specification_table, # location_id_variable=location_id_variable, # data_objects=data_objects, **kwargs) return (specification, index)
def prepare_for_estimate(self, specification_dict=None, specification_storage=None, specification_table=None, agent_set=None, agents_for_estimation_storage=None, agents_for_estimation_table=None, join_datasets=False, index_to_unplace=None, portion_to_unplace=1.0, compute_lambda=False, grouping_location_set=None, movers_variable=None, movers_index=None, filter=None, location_id_variable=None, data_objects={}): """Put 'location_id_variable' always in, if the location id is to be computed on the estimation set, i.e. if it is not a primary attribute of the estimation set. Set 'index_to_unplace' to None, if 'compute_lambda' is True. In such a case, the annual supply is estimated without unplacing agents. 'grouping_location_set', 'movers_variable' and 'movers_index' must be given, if 'compute_lambda' is True. """ from opus_core.model import get_specification_for_estimation from urbansim.functions import compute_supply_and_add_to_location_set specification = get_specification_for_estimation( specification_dict, specification_storage, specification_table) if (agent_set is not None) and (index_to_unplace is not None): if self.location_id_string is not None: agent_set.compute_variables(self.location_id_string, resources=Resources(data_objects)) if portion_to_unplace < 1: unplace_size = int(portion_to_unplace * index_to_unplace.size) end_index_to_unplace = sample_noreplace( index_to_unplace, unplace_size) else: end_index_to_unplace = index_to_unplace logger.log_status("Unplace " + str(end_index_to_unplace.size) + " agents.") agent_set.modify_attribute( self.choice_set.get_id_name()[0], resize(array([-1]), end_index_to_unplace.size), end_index_to_unplace) if compute_lambda: movers = zeros(agent_set.size(), dtype="bool8") if movers_index is not None: movers[movers_index] = 1 agent_set.add_primary_attribute(movers, "potential_movers") self.estimate_config[ "weights_for_estimation_string"] = self.estimate_config[ "weights_for_estimation_string"] + "_from_lambda" compute_supply_and_add_to_location_set( self.choice_set, grouping_location_set, self.run_config["number_of_units_string"], self.run_config["capacity_string"], movers_variable, self.estimate_config["weights_for_estimation_string"], resources=Resources(data_objects)) # create agents for estimation if (agents_for_estimation_storage is not None) and (agents_for_estimation_table is not None): estimation_set = Dataset(in_storage=agents_for_estimation_storage, in_table_name=agents_for_estimation_table, id_name=agent_set.get_id_name(), dataset_name=agent_set.get_dataset_name()) if location_id_variable is not None: estimation_set.compute_variables( location_id_variable, resources=Resources(data_objects)) # needs to be a primary attribute because of the join method below estimation_set.add_primary_attribute( estimation_set.get_attribute(location_id_variable), VariableName(location_id_variable).get_alias()) if filter: values = estimation_set.compute_variables( filter, resources=Resources(data_objects)) index = where(values > 0)[0] estimation_set.subset_by_index( index, flush_attributes_if_not_loaded=False) if join_datasets: agent_set.join_by_rows(estimation_set, require_all_attributes=False, change_ids_if_not_unique=True) index = arange(agent_set.size() - estimation_set.size(), agent_set.size()) else: index = agent_set.get_id_index( estimation_set.get_id_attribute()) else: if agent_set is not None: if filter is not None: values = agent_set.compute_variables( filter, resources=Resources(data_objects)) index = where(values > 0)[0] else: index = arange(agent_set.size()) else: index = None return (specification, index)
id_name='household_id', dataset_name='household') households.get_attribute_names() households.get_id_attribute() households.size() households.get_attribute("income") households.get_attribute_names() households.load_dataset() households.get_attribute_names() #households.plot_histogram("income", bins = 10) #households.r_histogram("income") #households.r_scatter("persons", "income") households.correlation_coefficient("persons", "income") households.correlation_matrix(["persons", "income"]) households.summary() households.add_primary_attribute(data=[4,6,9,2,4,8,2,1,3,2], name="location") households.get_attribute_names() households.modify_attribute(name="location", data=[0,0], index=[0,1]) households.get_attribute("location") households.get_data_element_by_id(5).location #households.write_dataset(out_storage=storage, out_table_name="households_output") households.get_dataset_name() # Working with models from opus_core.choice_model import ChoiceModel choicemodel = ChoiceModel(choice_set=[1,2,3], utilities = "opus_core.linear_utilities", probabilities = "opus_core.mnl_probabilities",