def test_my_inputs_for_hmps(self): variable_name = "biocomplexity.land_cover.hmps" storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='land_covers', table_data={ 'relative_x': array([1,2,1,2]), 'relative_y': array([1,1,2,2]), "lct": array([1, 2, 1, 4]), } ) dataset_pool = DatasetPool(package_order=['biocomplexity'], storage=storage) dataset_pool._add_dataset( 'constant', { "FOOTPRINT": array([[0,1,0], [1,1,1], [0,1,0]]), 'HU': 1, } ) land_cover = dataset_pool.get_dataset('land_cover') land_cover.compute_variables(variable_name, dataset_pool=dataset_pool) values = land_cover.get_attribute(variable_name) should_be = array([2, 2, 2, 2], dtype=float32) should_be = ln(should_be + 1) / SSSmps.standardization_constant_MPS self.assert_(ma.allclose( values, should_be, rtol=1e-7), msg = "Error in " + variable_name)
def test_addition_of_jobs_with_one_characteristics(self): dataset_pool = DatasetPool(storage=self.storage, package_order=["washtenaw","urbansim", "opus_core"]) gridcell_set = dataset_pool.get_dataset('gridcell') event_set = self._create_job_addition_event_set_with_characteristics() jobs = dataset_pool.get_dataset("job") AgentEventModel().run(gridcell_set, event_set, jobs, 2000, dataset_pool) number_of_jobs_of_sector_1 = gridcell_set.compute_variables("urbansim.gridcell.number_of_jobs_of_sector_1", dataset_pool=dataset_pool) number_of_jobs_of_sector_2 = gridcell_set.compute_variables("urbansim.gridcell.number_of_jobs_of_sector_2", dataset_pool=dataset_pool) number_of_jobs_of_sector_4 = gridcell_set.compute_variables("urbansim.gridcell.number_of_jobs_of_sector_4", dataset_pool=dataset_pool) # the model should add 2 jobs of sector 1 to gridcell 1, # 5 jobs of sector 1 to gridcell 5 self.assert_(ma.allclose(number_of_jobs_of_sector_1, array( [6,4,4,4,9,4,4,4,4,4]))) # other sectors don't change self.assert_(ma.allclose(number_of_jobs_of_sector_2, array( 10 * [3]))) self.assert_(ma.allclose(number_of_jobs_of_sector_4, array( 10 * [3]))) AgentEventModel().run(gridcell_set, event_set, jobs, 2001, dataset_pool) number_of_jobs_of_sector_1 = gridcell_set.compute_variables("urbansim.gridcell.number_of_jobs_of_sector_1", dataset_pool=dataset_pool) number_of_jobs_of_sector_2 = gridcell_set.compute_variables("urbansim.gridcell.number_of_jobs_of_sector_2", dataset_pool=dataset_pool) number_of_jobs_of_sector_4 = gridcell_set.compute_variables("urbansim.gridcell.number_of_jobs_of_sector_4", dataset_pool=dataset_pool) # the model should add 2 jobs of sector 2 to gridcell 5, # 1 job of sector 1 to gridcell 1, # 4 jobs of sector 2 to gridcell 2 # 70% jobs of sector 2 to gridcell 3 self.assert_(ma.allclose(number_of_jobs_of_sector_1, array( [7, 4, 4, 4, 9, 4, 4, 4, 4, 4]))) self.assert_(ma.allclose(number_of_jobs_of_sector_2, array( [3, 6, 5, 3, 5, 3, 3, 3, 3, 3]))) # sector 4 does not change self.assert_(ma.allclose(number_of_jobs_of_sector_4, array( 10 * [3])))
def test_full_tree(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='parcels', table_data={ 'parcel_id':array([1,2,3,4]), 'is_in_city_seattle':array([1, 1, 0, 0]) }, ) storage.write_table( table_name='households', table_data={ 'household_id':array([1,2,3,4,5]), 'income':array([1000, 300000, 50000, 0, 10550]) }, ) dataset_pool = DatasetPool(package_order=['psrc', 'urbansim'], storage=storage) household_x_parcel = dataset_pool.get_dataset('household_x_parcel') household_x_parcel.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_parcel.get_attribute(self.variable_name) should_be = array([[1000*1, 1000*1, 1000*0, 1000*0], [300000*1, 300000*1, 300000*0,300000*0 ], [50000*1, 50000*1, 50000*0,50000*0], [0, 0, 0, 0], [10550*1, 10550*1, 10550*0, 10550*0]]) self.assert_(ma.allclose(values, should_be, rtol=1e-3), msg="Error in " + self.variable_name)
def test_with_zero_denominator(self): storage = StorageFactory().get_storage("dict_storage") storage.write_table(table_name="zones", table_data={"zone_id": array([1, 2, 3, 4])}) storage.write_table( table_name="travel_data", table_data={ "from_zone_id": array([1, 2, 2, 3, 4]), "to_zone_id": array([1, 2, 1, 2, 2]), "am_single_vehicle_to_work_travel_time": array([1.1, 2.2, 3.3, 4.4, 5.5]), "am_pk_period_drive_alone_vehicle_trips": array([10.1, 20.0, 30.0, 0.0, 0.0]), }, ) dataset_pool = DatasetPool(package_order=["urbansim"], storage=storage) zone = dataset_pool.get_dataset("zone") zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) should_be = array( [ (1.1 * 10.1) / (10.1), (2.2 * 20.0 + 3.3 * 30) / (20.0 + 30.0), (2.2 * 20.0 + 3.3 * 30) / (20.0 + 30.0), # when denominator = 0, use prior good value (2.2 * 20.0 + 3.3 * 30) / (20.0 + 30.0), ] ) # when denominator = 0, use prior good value self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)
def test_my_input(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='zones', table_data={ 'zone_id': array([1, 3]), } ) storage.write_table( table_name='travel_data', table_data={ "from_zone_id":array([1,3,3,1]), "to_zone_id": array([1,1,3,3]), "am_pk_period_drive_alone_vehicle_trips":array([1, 7, 3, 4]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) should_be = array([8, 7]) self.assert_(ma.allequal(values, should_be), msg="Error in " + self.variable_name)
def test_my_input(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='zones', table_data={ "zone_id": array([1,3]), "number_of_jobs": array([10, 1]) } ) storage.write_table( table_name='travel_data', table_data={ "from_zone_id":array([3,3,1,1]), "to_zone_id":array([1,3,1,3]), "am_total_transit_time_walk":array([1, 2, 3, 4]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) should_be = array([1.17361, 10.25]) self.assert_(ma.allclose(values, should_be, rtol=1e-3), msg="Error in " + self.variable_name)
def test_my_inputs( self ): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='households', table_data={ 'household_id': array([1, 2, 3, 4]), 'age_of_head': array([12, 20, 25, 30]), } ) storage.write_table( table_name='urbansim_constants', table_data={ 'young_age': array([25]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household = dataset_pool.get_dataset('household') household.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household.get_attribute(self.variable_name) should_be = array( [1,1,1,0] ) self.assert_(ma.allequal(values, should_be,), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='gridcells', table_data={ 'grid_id': array([1, 2, 3]), 'zone_id': array([1, 1, 3]), } ) storage.write_table( table_name='zones', table_data={ 'zone_id': array([1, 2, 3]), "trip_weighted_average_generalized_cost_hbw_to_work_am_drive_alone": array([4.1, 5.3, 6.2]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) gridcell = dataset_pool.get_dataset('gridcell') gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = gridcell.get_attribute(self.variable_name) should_be = array([4.1, 4.1, 6.2]) self.assert_(ma.allclose(values, should_be, rtol=1e-3), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='land_covers', table_data={ 'relative_x': array([1,2,1,2]), 'relative_y': array([1,1,2,2]), "lct": array([-9999, 5, 3, 1]) } ) dataset_pool = DatasetPool(package_order=['biocomplexity'], storage=storage) footprint = array([[0,1,0], [1,1,1], [0,1,0]]) dataset_pool._add_dataset( 'constant', { "FOOTPRINT": footprint, 'AG': 10, } ) gridcell = dataset_pool.get_dataset('land_cover') gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = gridcell.get_attribute(self.variable_name) should_be = array([2, 4, 4, 5]) self.assert_(ma.allequal( values, should_be), msg = "Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='zones', table_data={ 'zone_id': array([1, 2]), } ) storage.write_table( table_name='travel_data', table_data={ "from_zone_id": array([1,1,2,2]), "to_zone_id":array([1,2,1,2]), "hbw_daily_biking_person_trip_table": array([1.1, 2.2, 3.3, 4.4]), "college_daily_biking_person_trip_table": array([1.0, 2.0, 3.0, 4.0]), "hbnw_daily_biking_person_trip_table": array([2.0, 3.0, 1.0, 0.0]), "nhb_daily_biking_person_trip_table": array([12.8, 4.5, 1.2, 8.0]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) should_be = array([28.6, 24.9]) self.assert_(ma.allclose(values, should_be, rtol=1e-4), msg="Error in " + self.variable_name)
def test_safely_divide_two_attributes(self): from opus_core.datasets.dataset_pool import DatasetPool storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='tests', table_data={ 'id': array([1,2,3,4]), 'numerator': array([1,2,3,0]), 'denominator': array([2.,0.,2.,0.]), } ) dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage) test = dataset_pool.get_dataset('test') variable = Variable() variable.set_dataset(test) result = variable.safely_divide_two_attributes('opus_core.test.numerator', 'opus_core.test.denominator') self.assert_(ma.allclose(array([.5, 0, 1.5, 0]), result)) result = variable.safely_divide_two_attributes('opus_core.test.numerator', 'opus_core.test.denominator', value_for_divide_by_zero=-1.0) self.assert_(ma.allclose(array([.5, -1., 1.5, -1.]), result))
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='gridcells', table_data={ 'grid_id': array([1,2,3,4]), 'relative_x': array([1,2,1,2]), 'relative_y': array([1,1,2,2]), 'is_development_type_residential': array([1, 1, 1, 0]) } ) storage.write_table( table_name='urbansim_constants', table_data={ "walking_distance_circle_radius": array([150]), 'cell_size': array([150]), "acres": array([105.0]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) gridcell = dataset_pool.get_dataset('gridcell') gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = gridcell.get_attribute(self.variable_name) should_be = array([5, 4, 4, 2]) self.assert_(ma.allclose( values, should_be, rtol=1e-7), msg = "Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='zones', table_data={ 'zone_id': array([1, 2]), } ) storage.write_table( table_name='travel_data', table_data={ "from_zone_id": array([1,1,2,2]), 'to_zone_id': array([1,2,1,2]), "am_single_vehicle_to_work_travel_time":array([1.1, 2.2, 3.3, 4.4]), "am_pk_period_drive_alone_vehicle_trips":array([1.0, 2.0, 3.0, 4.0]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) should_be = array([(1.1*1.0 +2.2*2.0)/(3.0), (3.3*3.0 + 4.4*4.0)/(7.0)]) self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)
def test_with_all_zero_denominator(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='zones', table_data={ "zone_id": array([1,2,3,4]), } ) storage.write_table( table_name='travel_data', table_data={ "from_zone_id":array([1,2,2,3,4]), "to_zone_id":array([1,2,1,2,2]), "am_single_vehicle_to_work_travel_time":array([1.1, 2.2, 3.3, 4.4, 5.5]), "am_pk_period_drive_alone_vehicle_trips":array([0, 0.0, 0.0, 0.0, 0.0]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) should_be = array([0.0, 0.0, 0.0, 0.0]) self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='zones', table_data={ "zone_id": array([1,2]), "trip_mode_bike": array([3,1]), "trip_mode_walk": array([5,6]), "trip_mode_park_ride": array([3,2]), "trip_mode_share_ride2": array([1,8]), "trip_mode_drive_alone": array([2,9]), "trip_mode_share_ride3": array([8,4]), "trip_mode_transit": array([5,5]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) should_be = array([8.0/27.0, 7.0/35.0]) self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='buildings', table_data={ 'building_id': arange(50), 'residential_units': array(5*[0]+10*[20]+5*[15]+10*[50]+15*[3]+5*[45]), } ) storage.write_table( table_name='jobs', table_data={ 'job_id': array([1, 2, 3]), } ) dataset_pool = DatasetPool(package_order=['urbansim_parcel', 'urbansim'], storage=storage) job_x_building = dataset_pool.get_dataset('job_x_building', dataset_arguments={"index2": array([[13, 15, 23, 49], [5, 9, 17, 43], [17, 18, 40, 47]], dtype="int32")}) job_x_building.compute_variables(self.variable_name) values = job_x_building.get_attribute(self.variable_name) # The values are computed using formula from Ben-Akiva book (Chapter of correcting for sampling bias) should_be = array([[-11.3207881 , -11.03310603, -12.23707884, -12.13171832], [-15.01597613, -15.01597613, -14.72829406, -13.11885615], [-14.18521949, -14.18521949, -12.57578158, -15.28383178]]) + 11.03310603 self.assert_(ma.allclose(values, should_be, rtol=1e-4), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage("dict_storage") storage.write_table( table_name="gridcells", table_data={ "grid_id": arange(50), "residential_units": array(5 * [0] + 10 * [20] + 5 * [15] + 10 * [50] + 15 * [3] + 5 * [45]), }, ) storage.write_table(table_name="households", table_data={"household_id": array([1, 2, 3])}) dataset_pool = DatasetPool(package_order=["urbansim"], storage=storage) household_x_gridcell = dataset_pool.get_dataset( "household_x_gridcell", dataset_arguments={"index2": array([[13, 15, 23, 49], [5, 9, 17, 43], [17, 18, 40, 47]], dtype="int32")}, ) household_x_gridcell.compute_variables(self.variable_name) values = household_x_gridcell.get_attribute(self.variable_name) # The values are computed using formula from Ben-Akiva book (Chapter of correcting for sampling bias) should_be = ( array( [ [-11.3207881, -11.03310603, -12.23707884, -12.13171832], [-15.01597613, -15.01597613, -14.72829406, -13.11885615], [-14.18521949, -14.18521949, -12.57578158, -15.28383178], ] ) + 11.03310603 ) self.assert_(ma.allclose(values, should_be, rtol=1e-4), msg="Error in " + self.variable_name)
def get_values(self, sector, threshold): self.variable_name = "urbansim_parcel.zone.sector_%s_employment_within_%s_minutes_travel_time_hbw_am_transit_walk" % (sector, threshold) storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='zones', table_data={ "zone_id":array([1,3]), "number_of_jobs_of_sector_2":array([10, 1]), "number_of_jobs_of_sector_3":array([7, 2]), } ) storage.write_table( table_name='travel_data', table_data={ "from_zone_id": array([3,3,1,1]), "to_zone_id": array([1,3,1,3]), "am_total_transit_time_walk": array([1.1, 2.2, 3.3, 4.4]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) return values
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='zones', table_data={ 'zone_id': array([1, 2]), } ) storage.write_table( table_name='travel_data', table_data={ "from_zone_id": array([1,1,2,2]), "to_zone_id":array([1,2,1,2]), 'hbw_daily_drive_to_park_ride_person_trip_table': array([1.1, 2.2, 3.3, 4.4]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) should_be = array([3.3, 7.7]) self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)
def test_no_translation(self): variable_name = "biocomplexity.land_cover.xmps" storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='land_covers', table_data={ 'relative_x': array([1,2,3,1,2,3,1,2,3]), 'relative_y': array([1,1,1,2,2,2,3,3,3]), "lct": array([1, 2, 3, 4, 5, 4, 3, 5, 1]), } ) dataset_pool = DatasetPool(package_order=['biocomplexity'], storage=storage) dataset_pool._add_dataset( 'constant', { "FOOTPRINT": array([[0,1,0], [1,1,1], [0,1,0]]), "ALL_URBAN": ['HU', 'MU', 'LU'], 'HU': 1, 'MU': 2, 'LU': 3 } ) land_cover = dataset_pool.get_dataset('land_cover') self.assertRaises(RuntimeError, land_cover.compute_variables, variable_name, dataset_pool=dataset_pool)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='gridcells', table_data={ 'grid_id': array([1,2,3]), 'residential_avg_val_per_unit_within_walking_distance': array([50, 10, 20]), } ) storage.write_table( table_name='households', table_data={ 'household_id': array([1, 2, 3, 4]), 'is_low_income': array([1, 0, 1, 0]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household_x_gridcell = dataset_pool.get_dataset('household_x_gridcell') household_x_gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_gridcell.get_attribute(self.variable_name) should_be = array([[50, 10, 20], [0, 0, 0 ], [50, 10, 20], [0,0,0]]) self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='buildings', table_data={ 'building_id': array([1,2,3,4, 5, 6]), 'year_built': array([1995, 2000, 2005, 0, 1800, 1799]), 'land_value': array([0,10,500,20,0,9]), 'improvement_value': array([2,0,10,10,0,70]) } ) storage.write_table( table_name='urbansim_constants', table_data={ "absolute_min_year": array([1800]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) buildings = dataset_pool.get_dataset('building') buildings.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = buildings.get_attribute(self.variable_name) should_be = array([False, False, True, False, False, False]) self.assert_(ma.allequal( values, should_be), msg = "Error in " + self.variable_name)
def get_values(self, number): variable_name = "psrc.zone.employment_within_%s_minutes_travel_time_hbw_am_walk" % number storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='zones', table_data={ "zone_id":array([1,3]), "number_of_jobs":array([10, 1]), } ) storage.write_table( table_name='travel_data', table_data={ "from_zone_id": array([3,3,1,1]), "to_zone_id": array([1,3,1,3]), "am_walk_time_in_minutes": array([1.1, 2.2, 3.3, 4.4]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(variable_name) return values
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='land_covers', table_data={ 'relative_x': array([1,2,1,2]), 'relative_y': array([1,1,2,2]), "comm_add4": array([1, 2, 5, 15]) } ) dataset_pool = DatasetPool(package_order=['biocomplexity'], storage=storage) dataset_pool._add_dataset( 'constant', { "CELLSIZE": 250 # this results in a 3x3 grid, (750/250)x(750/250) } ) gridcell = dataset_pool.get_dataset('land_cover') gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = gridcell.get_attribute(self.variable_name) should_be = array([1*4+2*2+5*2+15, 1*2+2*4+5+15*2, 1*2+2+5*4+15*2, 1+2*2+5*2+15*4]) should_be = ln(should_be + 1) / 10.0 self.assert_(ma.allclose( values, should_be, rtol=1e-7), msg = "Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='gridcells', table_data={ 'grid_id': array([1,2,3]), 'housing_cost': array([1000, 10000, 100000]), } ) storage.write_table( table_name='households', table_data={ 'household_id': array([1, 2, 3]), 'income': array([1, 20, 500]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household_x_gridcell = dataset_pool.get_dataset('household_x_gridcell') household_x_gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_gridcell.get_attribute(self.variable_name) should_be = array([[-999, -9999, -99999], [-980, -9980, -99980 ], [-500, -9500, -99500]]) self.assert_(ma.allequal(values, should_be,), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='buildings', table_data={ 'building_id': array([1, 2, 3]), } ) storage.write_table( table_name='jobs', table_data={ 'job_id': array([1, 2, 3, 4, 5, 6]), 'sector_id': array([1, 1, 3, 2, 3, 3]), 'building_id': array([1, 1, 1, 2, 3, 3]) } ) dataset_pool = DatasetPool(package_order=['urbansim_parcel', 'urbansim'], storage=storage) buildings = dataset_pool.get_dataset('building') values = buildings.compute_variables(self.variable_name, dataset_pool=dataset_pool) should_be = array([1/3., 0, 1]) self.assert_(ma.allequal(values, should_be), 'Error in ' + self.variable_name)
def get_values(self, number): self.variable_name = "eugene.gridcell.travel_time_hbw_am_drive_alone_to_%s" % number storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='zones', table_data={ 'zone_id': array([1, 2, 3]), "travel_time_hbw_am_drive_alone_to_1": array([1.1, 2.2, 3.3]), "travel_time_hbw_am_drive_alone_to_3": array([0.1, 0.2, 0.3]) } ) storage.write_table( table_name='gridcells', table_data={ "grid_id": array([1, 2, 3, 4]), "zone_id": array([1, 1, 3, 1]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('gridcell') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) return values
def test_my_inputs( self ): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='households', table_data={ 'household_id': array([1, 2, 3, 4]), 'income': array([50, 100, 200, 300]), } ) storage.write_table( table_name='urbansim_constants', table_data={ "low_income_fraction": array([.25]), 'mid_income_fraction': array([.25]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household = dataset_pool.get_dataset('household') household.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household.get_attribute(self.variable_name) should_be = array([0, 0, 0, 1]) self.assert_(ma.allequal(values, should_be,), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') building_types_table_name = 'building_types' storage.write_table( table_name=building_types_table_name, table_data={ 'building_type_id':array([1, 2, 3, 4]), 'generic_building_type_id': array([2,3,1,1]) } ) buildings_table_name = 'buildings' storage.write_table( table_name=buildings_table_name, table_data={ 'building_id': array([1, 2, 3, 4, 5, 6]), 'building_type_id': array([2, 1, 2, 4, 3, 3]) } ) dataset_pool = DatasetPool(package_order=['urbansim_parcel', 'urbansim'], storage=storage) buildings = dataset_pool.get_dataset('building') values = buildings.compute_variables(self.variable_name, dataset_pool=dataset_pool) should_be = array([True, False, True, False, False, False]) self.assert_(ma.allequal(values, should_be), 'Error in ' + self.variable_name)
def test_3(self): variable_name = "psrc.gridcell.travel_time_hbw_am_drive_alone_to_3" storage = StorageFactory().get_storage("dict_storage") storage.write_table( table_name="gridcells", table_data={"grid_id": array([1, 2, 3]), "zone_id": array([1, 1, 3])} ) storage.write_table( table_name="zones", table_data={ "zone_id": array([1, 2, 3]), "travel_time_hbw_am_drive_alone_to_1": array([1.1, 2.2, 3.3]), "travel_time_hbw_am_drive_alone_to_3": array([0.1, 0.2, 0.3]), }, ) dataset_pool = DatasetPool(package_order=["urbansim"], storage=storage) gridcell = dataset_pool.get_dataset("gridcell") gridcell.compute_variables(variable_name, dataset_pool=dataset_pool) values = gridcell.get_attribute(variable_name) should_be = array([0.1, 0.1, 0.3]) self.assert_(ma.allclose(values, should_be, rtol=1e-3), msg="Error in " + variable_name)
def test_get_dataset_named(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='tests', table_data={ 'id': array([1, 2]), 'attr1': array([1, 2]), 'attr2': array([10, 100]) }) dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage) test_x_test = dataset_pool.get_dataset('test_x_test') result = test_x_test.get_dataset_named('test') self.assertEqual(result.get_dataset_name(), 'test', msg="error in get_dataset_named") self.assertRaises(ValueError, test_x_test.get_dataset_named, 'squid')
def test_interaction_set_component_expression(self): # test an expression involving a fully-qualified variable that applies to a component of an interaction set expr = "3+opus_core.test_agent.income_times_2" storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='test_agents', table_data={'id': array([1, 2, 3]), 'income': array([1, 20, 500])} ) storage.write_table( table_name='test_locations', table_data={'id': array([1,2]), 'cost': array([1000, 2000])} ) dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage) test_agent_x_test_location = dataset_pool.get_dataset('test_agent_x_test_location') result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool) should_be = array([[5, 5], [43, 43], [1003, 1003]]) self.assert_(ma.allclose(result, should_be, rtol=1e-6), msg = "Error in " + expr)
class Tests(object): def setUp(self): cache_path = "/workspace/urbansim_cache/asu" self.variable_name = opus_path_for_variable_from_module_path(__file__) storage = file_flt_storage(cache_path) table_names = storage.get_table_names() self.dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) for table_name in table_names: dataset = self.dataset_pool.get_dataset( table_name, dataset_arguments={'id_name': []}) def test_compute(self): dataset_name = self.variable_name.split('.')[-2] dataset = self.dataset_pool.get_dataset(dataset_name) results = dataset.compute_variables(self.variable_name, dataset_pool=self.dataset_pool)
def test_full_tree(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='gridcells', table_data={ 'grid_id': array([1,2,3,4]), 'relative_x': array([1,2,1,2]), 'relative_y': array([1,1,2,2]), } ) storage.write_table( table_name='households', table_data={ 'household_id': array([1, 2, 3, 4, 5, 6]), 'grid_id': array([1, 2, 3, 4, 2, 2]), 'income': array([1500, 5000, 3000, 10000, 1000, 8000]) # low income: <= 1500 } ) storage.write_table( table_name='urbansim_constants', table_data={ "walking_distance_circle_radius": array([150]), 'cell_size': array([150]), 'low_income_fraction': array([.25]), 'mid_income_fraction': array([.3]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household_x_gridcell = dataset_pool.get_dataset('household_x_gridcell') household_x_gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_gridcell.get_attribute(self.variable_name) should_be = array([[57.14286, 36.36364, 20, 14.28571], [0,0,0,0], [0,0,0,0], [0,0,0,0], [57.14286, 36.36364, 20, 14.28571], [0,0,0,0]]) self.assert_(ma.allclose(values, should_be, rtol=1e-5), msg="Error in " + self.variable_name)
def get_dataset_pool(self): """Return the DatasetPool object.""" if self.dataset_pool is None: self.dataset_pool = DatasetPool( self.package_order, storage=self.in_storage) return self.dataset_pool
def send_to_urbancanvas(self): ''' Sends to UrbanCanvas for visualization. ''' self._update_variable_from_fields() func = batch_check_data dummy, result, msgs = func([self.variable,], self.validator)[0] expression = dummy['definition'] if dummy['dataset'] == 'parcel': from opus_core.storage_factory import StorageFactory from opus_core.datasets.dataset_pool import DatasetPool import os, sys base_year = self.validator.project.xml_config.get_estimation_configuration()['base_year'] project_name = self.validator.project.name opus_data_path = self.validator.project.xml_config.get_opus_data_path() logger.log_note(base_year) logger.log_note(project_name) logger.log_note(opus_data_path) cache = os.path.join(opus_data_path,project_name,'base_year_data',str(base_year)) logger.log_note(cache) storage = StorageFactory().get_storage('flt_storage',storage_location=cache) dataset_pool = DatasetPool(storage=storage, package_order=[project_name,'urbansim_parcel','urbansim','opus_core']) parcels = dataset_pool.get_dataset('parcel') parcel_ids = pd.Series(parcels.get_attribute('parcel_id')) values = pd.Series(parcels.compute_variables([expression],dataset_pool=dataset_pool).astype('float')) parcels = pd.DataFrame({"parcel_id":parcel_ids,"vl_values":values}) #parcels.set_index(keys='parcel_id',inplace=True) #parcels["vl_values"][parcels["vl_values"]==0] = np.nan parcels = parcels[parcels["vl_values"]>0] os.chdir(os.path.join(opus_data_path,project_name)) parcels.to_csv('variable_library_indicator.csv',index=False) #np.savez('variable_library_indicator',parcel_id=parcels.vl_values.index.values.astype('int32'),values=parcels.vl_values.values.astype('int32')) ##############UNCOMMENT IF WEBSERVICE IS DESIRED # parcels.save('variable_library.pkl') ##I believe 'save' was just deprectated in pandas- its now to_pickle or some such thing... change this later # web_service_path = os.path.join(os.getenv("OPUS_HOME"),'src',project_name,'scripts','web_service.py') # logger.log_note(web_service_path) # p = subprocess.Popen([sys.executable,web_service_path]) # MessageBox.information(mainwindow = self, text = 'Click OK when done viewing in UrbanCanvas') # p.kill() MessageBox.information(mainwindow = self, text = 'Variable exported to the project data directory for viewing in UrbanCanvas') else: MessageBox.information(mainwindow = self, text = 'Not a parcel variable. Only parcel variables can be sent to UrbanCanvas')
def test_full_tree(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='parcels', table_data={ 'parcel_id': array([1, 2, 3, 4]), 'lot_sf_unit': array([1000, 1000, 3000, 2000]) }, ) storage.write_table( table_name='households', table_data={ 'household_id': array([1, 2, 3, 4, 5]), 'income': array([1000, 300000, 50000, 0, 10550]) }, ) dataset_pool = DatasetPool(package_order=['psrc', 'urbansim'], storage=storage) household_x_parcel = dataset_pool.get_dataset('household_x_parcel') household_x_parcel.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_parcel.get_attribute(self.variable_name) should_be = array([[ 1000 * log(1000), 1000 * log(1000), 1000 * log(3000), 1000 * log(2000) ], [ 300000 * log(1000), 300000 * log(1000), 300000 * log(3000), 300000 * log(2000) ], [ 50000 * log(1000), 50000 * log(1000), 50000 * log(3000), 50000 * log(2000) ], [0, 0, 0, 0], [ 10550 * log(1000), 10550 * log(1000), 10550 * log(3000), 10550 * log(2000) ]]) self.assert_(ma.allclose(values, should_be, rtol=1e-3), msg="Error in " + self.variable_name)
def test_aggregate_sum(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ 'my_variable':array([4,8,0.5,1]), 'id':array([1,2,3,4]), 'id2':array([1,2,1,2]), } ) storage.write_table(table_name='faz', table_data={"id2":array([1,2])}) ds = Dataset(in_storage=storage, in_table_name='zones', id_name="id", dataset_name="myzone") ds2 = Dataset(in_storage=storage, in_table_name='faz', id_name="id2", dataset_name="myfaz") dataset_pool = DatasetPool() dataset_pool._add_dataset('myzone', ds) dataset_pool._add_dataset('myfaz', ds2) values = ds2.compute_variables(['myfaz.aggregate(myzone.my_variable, function=sum)'], dataset_pool=dataset_pool) should_be = array([4.5, 9]) self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in aggregate_sum")
def test_divide(self): expr = 'test_location.cost/test_agent.income' storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='test_agents', table_data={'id': array([1, 2, 3]), 'income': array([1, 20, 500])} ) storage.write_table( table_name='test_locations', table_data={'id': array([1,2]), 'cost': array([1000, 2000])} ) dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage) test_agent_x_test_location = dataset_pool.get_dataset('test_agent_x_test_location') result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool) should_be = array([[1000, 2000], [50, 100], [2, 4]]) self.assert_(ma.allclose(result, should_be, rtol=1e-6), msg = "Error in " + expr)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='parcels', table_data={ 'parcel_id':array([1,2,3,4]), 'zone_id':array([1, 1, 3, 2]) }, ) storage.write_table( table_name='households', table_data={ 'household_id':array([1,2,3,4,5]), 'parcel_id':array([3, 1, 1, 2, 4]), 'zone_id':array([3, 1, 1, 1, 2]), 'work_place_zone_id':array([1, 3, 3, 2, 3]) }, ) storage.write_table( table_name='travel_data', table_data={ 'from_zone_id':array([3,3,1,1,1,2,2,3,2]), 'to_zone_id':array([1,3,1,3,2,1,3,2,2]), 'am_single_vehicle_to_work_travel_time':array([1.1, 2.2, 3.3, 4.4, 0.5, 0.7, 8.7, 7.8, 1.0]) } ) dataset_pool = DatasetPool(package_order=['psrc', 'urbansim'], storage=storage) household_x_parcel = dataset_pool.get_dataset('household_x_parcel') household_x_parcel.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_parcel.get_attribute(self.variable_name) should_be = array([[3.3, 3.3, 1.1, 0.7], [4.4, 4.4, 2.2, 8.7], [4.4, 4.4, 2.2, 8.7], [0.5, 0.5, 7.8, 1.0], [4.4, 4.4, 2.2, 8.7]]) self.assert_(ma.allclose(values, should_be, rtol=1e-3), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') #declare an array of four gridcells, each with the specified sector ID below storage.write_table(table_name='development_events', table_data={ 'grid_id': array([100, 100, 101, 102]), 'scheduled_year': array([1999, 1998, 1999, 1999]), 'starting_development_type_id': array([1, 3, 2, 3]), 'ending_development_type_id': array([1, 1, 2, 3]), }) storage.write_table(table_name='development_type_groups', table_data={ 'name': array(["vacant_developable", "developed"]), 'group_id': array([1, 2]), }) dataset_pool = DatasetPool(package_order=['urbansim', 'opus_core'], storage=storage) dataset_pool._add_dataset('development_type', mock_developmenttype()) development_event = dataset_pool.get_dataset('development_event') # Test variable 1 development_event.compute_variables(self.variable_name1, dataset_pool=dataset_pool) values = development_event.get_attribute(self.variable_name1) should_be = array([True, True, False, False]) self.assert_(ma.allequal(values, should_be), msg="Error in " + self.variable_name1) # Test variable 2 development_event.compute_variables(self.variable_name2, dataset_pool=dataset_pool) values = development_event.get_attribute(self.variable_name2) should_be = array([True, True, True, False]) self.assert_(ma.allequal(values, should_be), msg="Error in " + self.variable_name2)
def test_my_input(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ "zone_id": array([1, 3]), "vehicle_miles_traveled": array([2, 4]) }) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) should_be = array([1.832, 3.664]) self.assert_(ma.allclose(values, should_be, rtol=1e-3), msg="Error in " + self.variable_name)
def test_full_tree(self): """Percent of households within walking distance that are minority, given that the head of the decision-making household is not minority. (If the head of the decision-making household is minority the corresponding value is 0.) """ storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='gridcells', table_data={ 'grid_id': array([1, 2, 3, 4]), 'relative_x': array([1, 2, 1, 2]), 'relative_y': array([1, 1, 2, 2]), }) storage.write_table(table_name='households', table_data={ 'household_id': array([1, 2, 3, 4, 5, 6]), 'grid_id': array([1, 2, 3, 4, 2, 2]), 'is_minority': array([1, 0, 1, 0, 0, 1]), }) storage.write_table(table_name='urbansim_constants', table_data={ "walking_distance_circle_radius": array([150]), 'cell_size': array([150]), }) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household_x_gridcell = dataset_pool.get_dataset('household_x_gridcell') household_x_gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_gridcell.get_attribute(self.variable_name) should_be = array( [[0, 0, 0, 0], [100 * (5.0 / 7.0), 100 * (4.0 / 11.0), 80.0, 100 * (2.0 / 7.0)], [0, 0, 0, 0], [100 * (5.0 / 7.0), 100 * (4.0 / 11.0), 80.0, 100 * (2.0 / 7.0)], [100 * (5.0 / 7.0), 100 * (4.0 / 11.0), 80.0, 100 * (2.0 / 7.0)], [0, 0, 0, 0]]) self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='gridcells', table_data={ 'grid_id': array([1,2,3]), 'percent_low_income_households_within_walking_distance': array([50, 0, 15]), } ) storage.write_table( table_name='households', table_data={ 'household_id': array([1, 2, 3, 4]), 'is_low_income': array([1, 0, 1, 1]), } ) storage.write_table( table_name='urbansim_constants', table_data={ "walking_distance_circle_radius": array([150]), 'cell_size': array([150]), 'low_income_fraction': array([.25]), 'mid_income_fraction': array([.3]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household_x_gridcell = dataset_pool.get_dataset('household_x_gridcell') household_x_gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_gridcell.get_attribute(self.variable_name) should_be = array([[50, 0, 15], [0, 0, 0], [50, 0, 15], [50, 0, 15]]) self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)
def test(self): '''Test dataset with following situations: [normal, normal, normal, divide-by-zero, cap-very-large-values]''' cache_dir = os.path.join(self.temp_dir, 'cache') data = { self._id_name: array([1, 2, 3, 4, 5]), 'population': array([10, 20, 30, 0, 1]), } self._write_data_to_year(data, cache_dir, 2000) data = { self._id_name: array([1, 2, 3, 4, 5]), 'population': array([11, 21, 31, 0, 1]), } self._write_data_to_year(data, cache_dir, 2001) data = { self._id_name: array([1, 2, 3, 4, 5]), 'population': array([20, 30, 30, 40, 10000]), } self._write_data_to_year(data, cache_dir, 2002) attribute_cache = AttributeCache(cache_directory=cache_dir) SimulationState(new_instance=True, base_cache_dir=self.temp_dir) SimulationState().set_cache_directory(cache_dir) SessionConfiguration(new_instance=True, in_storage=attribute_cache) SimulationState().set_current_time(2002) dataset_pool_2002 = DatasetPool(package_order=['urbansim'], storage=attribute_cache) dataset = dataset_pool_2002.get_dataset(self._dataset_name) variable_name = '%s.%s.percent_population_difference_from_2000' % ( self._package_name, self._dataset_name) dataset.compute_variables([variable_name], dataset_pool=dataset_pool_2002) pop_2002 = dataset.get_attribute(variable_name) self.assert_( ma.allclose(pop_2002[array([0, 1, 2, 4])], array([100, 50, 0, 999900]))) self.assert_(isinf(pop_2002[3]))
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='gridcells', table_data={ 'grid_id': array([1,2,3]), 'is_in_development_type_group_mixed_use': array([0,1,1]), } ) storage.write_table( table_name='households', table_data={ 'household_id': array([1,2,3,4,5,6]), 'is_young': array([1,0,1,0,0,1]) } ) storage.write_table( table_name='urbansim_constants', table_data={ "young_age": array([30]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household_x_gridcell = dataset_pool.get_dataset('household_x_gridcell') household_x_gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_gridcell.get_attribute(self.variable_name) should_be = array([[0,1,1], [0,0,0], [0,1,1], [0,0,0], [0,0,0], [0,1,1]]) self.assert_(ma.allequal(values, should_be), msg="Error in " + self.variable_name)
def test_full_tree(self): """Percent of households within walking distance that are mid-income, given that the decision-making household is mid-income. (If the household is not mid-income the corresponding value is 0.) """ storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='gridcells', table_data={ 'grid_id': array([1, 2, 3, 4]), 'relative_x': array([1, 2, 1, 2]), 'relative_y': array([1, 1, 2, 2]), }) storage.write_table( table_name='households', table_data={ 'household_id': array([1, 2, 3, 4, 5, 6]), 'grid_id': array([1, 2, 3, 4, 2, 2]), 'income': array([1000, 5000, 3000, 10000, 1000, 8000]) # 3000 <= mid income <= 5000 }) storage.write_table(table_name='urbansim_constants', table_data={ "walking_distance_circle_radius": array([150]), 'cell_size': array([150]), 'low_income_fraction': array([.25]), 'mid_income_fraction': array([.3]), }) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household_x_gridcell = dataset_pool.get_dataset('household_x_gridcell') household_x_gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_gridcell.get_attribute(self.variable_name) should_be = array([[0, 0, 0, 0], [28.57143, 27.27273, 60, 28.57143], [28.57143, 27.27273, 60, 28.57143], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]) self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ "zone_id": array([1, 2]), "trip_mode_share_ride2": array([1, 8]), "trip_mode_drive_alone": array([2, 9]), "trip_mode_share_ride3": array([8, 4]), }) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) zone = dataset_pool.get_dataset('zone') zone.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = zone.get_attribute(self.variable_name) should_be = array([9.0 / 11.0, 12.0 / 21.0]) self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)
def test(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='tests', table_data={ 'id': array([1, 2]), 'attr1': array([1, 2]), 'attr2': array([10, 100]), }) dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage) test_x_test = dataset_pool.get_dataset('test_x_test') test_x_test.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = test_x_test.get_attribute(self.variable_name) should_be = array([[1. / 10, 2. / 10], [1. / 100, 2. / 100]]) self.assert_(ma.allclose(values, should_be, rtol=1e-20), msg="Error in " + self.variable_name)
def test_aggregate_fully_qualified_variable(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ 'zone_id': array([1, 2]), }) # it would be nicer to call this table 'gridcells' but we want to use the existing test variable storage.write_table(table_name='tests', table_data={ 'a_dependent_variable': array([4, 8, 0.5, 1]), 'id': array([1, 2, 3, 4]), 'zone_id': array([1, 2, 1, 2]), }) zone_dataset = Dataset(in_storage=storage, in_table_name='zones', id_name="zone_id", dataset_name='zone') test_dataset = Dataset(in_storage=storage, in_table_name='tests', id_name="id", dataset_name='tests') dataset_pool = DatasetPool() dataset_pool._add_dataset('zone', zone_dataset) dataset_pool._add_dataset('tests', test_dataset) values = zone_dataset.compute_variables( ['zone.aggregate(opus_core.tests.a_test_variable)'], dataset_pool=dataset_pool) should_be = array([45, 90]) self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in test_aggregate_fully_qualified_variable")
def test_versioning_with_aggregate(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='households', table_data={ 'my_variable': array([4, 8, 2, 1, 40, 23, 78]), 'id0': arange(7) + 1, 'id1': array([1, 3, 1, 2, 3, 2, 1]) }) storage.write_table(table_name='fazes', table_data={ 'id1': array([1, 2, 3]), 'id2': array([1, 2, 1]) }) storage.write_table(table_name='fazdistr', table_data={'id2': array([1, 2])}) ds0 = Dataset(in_storage=storage, in_table_name='households', id_name="id0", dataset_name="myhousehold") ds1 = Dataset(in_storage=storage, in_table_name='fazes', id_name="id1", dataset_name="myfaz") ds2 = Dataset(in_storage=storage, in_table_name='fazdistr', id_name="id2", dataset_name="myfazdistr") dataset_pool = DatasetPool() dataset_pool._add_dataset('myhousehold', ds0) dataset_pool._add_dataset('myfaz', ds1) dataset_pool._add_dataset('myfazdistr', ds2) ds0.modify_attribute("id1", array([1, 3, 1, 2, 3, 2, 1])) # has version 1 variable = 'my_var = myfazdistr.aggregate(10.0*myhousehold.my_variable, intermediates=[myfaz])' ds2.compute_variables([variable], dataset_pool=dataset_pool) self.assert_(ds2.get_version("my_var") == 0) ds2.compute_variables([variable], dataset_pool=dataset_pool) self.assert_( ds2.get_version("my_var") == 0) # version should stay the same, i.e. it should not recompute ds0.touch_attribute("id1") # has version 2 ds2.compute_variables([variable], dataset_pool=dataset_pool) self.assert_( ds2.get_version("my_var") == 1) # version should be 1, i.e. it should recompute when id changes ds1.touch_attribute("id2") # has version 1 ds2.compute_variables([variable], dataset_pool=dataset_pool) self.assert_( ds2.get_version("my_var") == 2) # version should be 2, i.e. it should recompute when id changes
def test_aggregate_bad_function(self): # the 'function' argument must be a single name -- test this expr = "zone.aggregate(2*gridcell.my_variable, function=3+4)" storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ 'zone_id': array([1, 2]), }) storage.write_table(table_name='gridcells', table_data={ 'my_variable': array([4, 8, 0.5, 1]), 'grid_id': array([1, 2, 3, 4]), 'zone_id': array([1, 2, 1, 2]), }) zone_dataset = Dataset(in_storage=storage, in_table_name='zones', id_name="zone_id", dataset_name='zone') gridcell_dataset = Dataset(in_storage=storage, in_table_name='gridcells', id_name="grid_id", dataset_name='gridcell') dataset_pool = DatasetPool() dataset_pool._add_dataset('gridcell', gridcell_dataset) dataset_pool._add_dataset('zone', zone_dataset) self.assertRaises(ValueError, zone_dataset.compute_variables, [expr], dataset_pool=dataset_pool)
def test_aggregate_unqualified_name(self): # test aggregate without the dataset provided for the variable being aggregated expr = 'zone.aggregate(my_variable)' # to be correct, should be 'zone.aggregate(gridcell.my_variable)' storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ 'zone_id': array([1, 2]), }) storage.write_table(table_name='gridcells', table_data={ 'my_variable': array([4, 8, 0.5, 1]), 'grid_id': array([1, 2, 3, 4]), 'zone_id': array([1, 2, 1, 2]), }) zone_dataset = Dataset(in_storage=storage, in_table_name='zones', id_name="zone_id", dataset_name='zone') gridcell_dataset = Dataset(in_storage=storage, in_table_name='gridcells', id_name="grid_id", dataset_name='gridcell') dataset_pool = DatasetPool() dataset_pool._add_dataset('gridcell', gridcell_dataset) dataset_pool._add_dataset('zone', zone_dataset) self.assertRaises(ValueError, zone_dataset.compute_variables, [expr], dataset_pool=dataset_pool)
def test_aggregate(self): # test aggregate with no function specified (so defaults to 'sum') storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ 'zone_id': array([1, 2]), }) storage.write_table(table_name='gridcells', table_data={ 'my_variable': array([4, 8, 0.5, 1]), 'grid_id': array([1, 2, 3, 4]), 'zone_id': array([1, 2, 1, 2]), }) zone_dataset = Dataset(in_storage=storage, in_table_name='zones', id_name="zone_id", dataset_name='zone') gridcell_dataset = Dataset(in_storage=storage, in_table_name='gridcells', id_name="grid_id", dataset_name='gridcell') dataset_pool = DatasetPool() dataset_pool._add_dataset('gridcell', gridcell_dataset) dataset_pool._add_dataset('zone', zone_dataset) values = zone_dataset.compute_variables( ['zone.aggregate(gridcell.my_variable)'], dataset_pool=dataset_pool) should_be = array([4.5, 9]) self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in aggregate")
def test_run_model(self): dataset_pool = DatasetPool( storage=self.storage, package_order=['urbansim_parcel', 'urbansim']) model = EmploymentEventsModel(dataset_pool=dataset_pool) job_set = dataset_pool.get_dataset('job') # run 2006 model.run(dataset_pool.get_dataset('employment_event'), job_set, current_year=2006) results = self.get_count_all_sectors_and_areas(job_set) expected_results = array( [4100, 100, 1000, 5500, 1000, 1000, 1000, 1000, 1000]) self.assertEqual(ma.allequal(results, expected_results), True) # check locations buildings = dataset_pool.get_dataset('building') jobs_in_sec_1 = buildings.compute_variables( ['urbansim_parcel.building.number_of_jobs_of_sector_1'], dataset_pool=dataset_pool) self.assertEqual(ma.allequal(jobs_in_sec_1, array([4100, 5500, 1000])), True) # run 2008 model.run(dataset_pool.get_dataset('employment_event'), job_set, current_year=2008) results = self.get_count_all_sectors_and_areas(job_set) expected_results = array( [4100, 100, 1000, 5500, 1500, 1000, 1000, 1000, 900]) self.assertEqual(ma.allequal(results, expected_results), True)
def test_disaggregate_fully_qualified_variable(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ 'zone_id': array([1, 2, 3, 4]), 'id': array([1, 2, 1, 2]) }) # it would be nicer to call this table 'fazzes' but we want to use the existing test variable storage.write_table(table_name='test_locations', table_data={ 'cost': array([4, 8]), 'id': array([1, 2]) }) zone_dataset = Dataset(in_storage=storage, in_table_name='zones', id_name="zone_id", dataset_name="zone") test_dataset = Dataset(in_storage=storage, in_table_name='test_locations', id_name="id", dataset_name='test_location') dataset_pool = DatasetPool() dataset_pool._add_dataset('zone', zone_dataset) dataset_pool._add_dataset('test_location', test_dataset) values = zone_dataset.compute_variables( ['zone.disaggregate(opus_core.test_location.cost_times_3)'], dataset_pool=dataset_pool) should_be = array([12, 24, 12, 24]) self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in test_disaggregate_fully_qualified_variable")
def test_aggregate_squared_with_cast(self): # more exercising the SUBPATTERN_NUMBER_OF_AGENTS_WITH_CAST tree pattern storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ 'zone_id': array([1, 2]), }) storage.write_table(table_name='gridcells', table_data={ 'my_variable': array([4, 8, 0.5, 1]), 'grid_id': array([1, 2, 3, 4]), 'zone_id': array([1, 2, 1, 2]), }) zone_dataset = Dataset(in_storage=storage, in_table_name='zones', id_name="zone_id", dataset_name='zone') gridcell_dataset = Dataset(in_storage=storage, in_table_name='gridcells', id_name="grid_id", dataset_name='gridcell') dataset_pool = DatasetPool() dataset_pool._add_dataset('gridcell', gridcell_dataset) dataset_pool._add_dataset('zone', zone_dataset) values = zone_dataset.compute_variables( ['(zone.aggregate(gridcell.my_variable)**2).astype(float32)'], dataset_pool=dataset_pool) should_be = array([4.5 * 4.5, 9.0 * 9.0]) self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in aggregate")
def test_aggregate_all_mean(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ 'my_variable': array([4, 8, 10, 1]), 'id': array([1, 2, 3, 4]), }) storage.write_table(table_name='regions', table_data={ "id": array([1]), }) ds = Dataset(in_storage=storage, in_table_name='zones', id_name="id", dataset_name="myzone") ds2 = Dataset(in_storage=storage, in_table_name='regions', id_name="id", dataset_name="myregion") dataset_pool = DatasetPool() dataset_pool._add_dataset('myzone', ds) dataset_pool._add_dataset('myregion', ds2) ds2.compute_variables([ "myvar = myregion.aggregate_all(myzone.my_variable, function=mean)" ], dataset_pool=dataset_pool) values = ds2.get_attribute("myvar") should_be = array([5.75]) self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in aggregate_all_mean")
def _compute_variable_for_prior_year(self, dataset, full_name, time, resources=None): """Create a new dataset for this variable, compute the variable, and then return the values for this variable.""" calling_dataset_pool = SessionConfiguration().get_dataset_pool() calling_time = SimulationState().get_current_time() SimulationState().set_current_time(time) # Do not flush any variables when computing dependencies for a lag variable. prior_flush_state = SimulationState().get_flush_datasets() SimulationState().set_flush_datasets(False) try: # Get an empty dataset pool with same search paths. my_dataset_pool = DatasetPool( package_order=calling_dataset_pool.get_package_order(), storage=AttributeCache()) try: ds = dataset.empty_dataset_like_me(in_storage=AttributeCache()) except FileNotFoundError: ## necessary when a dataset is not cached, but created on-the-fly, e.g submarket ds = my_dataset_pool.get_dataset(dataset.dataset_name) # Don't pass any datasets via resources, since they may be from a different time. my_resources = Resources(resources) for key in my_resources: if isinstance(key, Dataset): del my_resources[key] ds.compute_variables(full_name, my_dataset_pool, resources=my_resources) values = ds.get_attribute(full_name) return values finally: SimulationState().set_current_time(calling_time) SimulationState().set_flush_datasets(prior_flush_state)
def test_my_inputs(self): # suppose that there are 4 grid cells, and the number of residential units within walking distance in # each of the cells is [7, 100, 0, 24] # for the ln of the number of residential units, if the number of units is 0 we use the bounded log, so # that the corresponding value is also 0 storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='gridcells', table_data={ 'grid_id': array([1, 2, 3, 4]), 'ln_residential_units_within_walking_distance': array([log(7), log(100), 0, log(24)]), }) storage.write_table(table_name='households', table_data={ 'household_id': array([1, 2, 3]), 'persons': array([4, 1, 10]), }) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household_x_gridcell = dataset_pool.get_dataset('household_x_gridcell') household_x_gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_gridcell.get_attribute(self.variable_name) should_be = array([[log(7) * 4, log(100) * 4, 0, log(24) * 4], [log(7), log(100), 0, log(24)], [log(7) * 10, log(100) * 10, 0, log(24) * 10]]) self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg="Error in " + self.variable_name)