class QCDataSizeTests(opus_unittest.OpusTestCase): def setUp(self): self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp_attribute_cache') self.table_name = 'test_table' self.storage = AttributeCache(self.temp_dir) def tearDown(self): if os.path.exists(self.temp_dir): rmtree(self.temp_dir) def test_detect(self): # create cache where a table has attributes of different length, # namely size 2 in 1980 and size 3 in 1979 SimulationState().set_current_time(1980) table_data = {'int_column': np.array([100, 70], dtype="int32"), 'bool_column': np.array([False, True])} # file name will be e.g. 'int_column.li4' for a little-endian machine self.storage.write_table(self.table_name, table_data) SimulationState().set_current_time(1979) table_data = {'flt_column': np.array([10, 70, 5.7], dtype="float32")} self.storage.write_table(self.table_name, table_data) res = DatasetSizeModel(self.temp_dir).run() SimulationState().set_current_time(2000) self.assertEqual(res.sum(), 1) # reset time to the original one self.assertEqual(SimulationState().get_current_time(), 2000)
def test(self): # Set up a test cache storage = AttributeCache(cache_directory=self._temp_dir) SimulationState().set_current_time(2000) table_name = 'foo' values = { 'attribute1': array([1, 2, 3], dtype=int32), 'attribute2': array([4, 5, 6], dtype=int32), } storage.write_table(table_name, values) table_dir = os.path.join(self._temp_dir, '2000', table_name) self.assert_(os.path.exists(table_dir)) actual = set(os.listdir(table_dir)) expected = set([ 'attribute1.%(endian)si4' % replacements, 'attribute2.%(endian)si4' % replacements ]) self.assertEqual(expected, actual) exporter = ExportCacheToDbfTableCommand( cache_directory=self._temp_dir, year='2000', table_name=table_name, dbf_directory=self._temp_dir, decimalcount=4, ) exporter.execute() out_storage = dbf_storage(self._temp_dir) db = _dbf_class(out_storage._get_file_path_for_table(table_name)) length = max([len(values[key]) for key in values.keys()]) i = 0 field_type = {} for name, type in [ field.fieldInfo()[:2] for field in db.header.fields ]: field_type[name] = type for rec in db: for key in values.keys(): if field_type[key.upper()] is 'F': self.assertAlmostEqual(values[key][i], rec[key], 4) else: self.assertEqual(values[key][i], rec[key]) i = i + 1 self.assertEquals( length, i, msg="More values expected than the dbf file contains") db.close()
def setUp(self): self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp_attribute_cache') table_name = 'test_table' storage = AttributeCache(self.temp_dir) self.temp_refdir = tempfile.mkdtemp(prefix='opus_tmp_reference_cache') refstorage = AttributeCache(self.temp_refdir) # Create two caches with the same table but with different attributes. # The simulation cache has two years SimulationState().set_current_time(2010) table_data = {'int_column': np.array([100, 70], dtype="int32"), 'bool_column': np.array([False, True])} storage.write_table(table_name, table_data) SimulationState().set_current_time(2000) table_data = {'flt_column': np.array([10, 70], dtype="float32")} storage.write_table(table_name, table_data) # create reference cache SimulationState().set_current_time(2005) table_data = {'str_column': np.array(['a', 'b']), 'bool_column': np.array([False, True])} refstorage.write_table(table_name, table_data) # create another simulation cache with a table of different length self.temp_dir2 = tempfile.mkdtemp(prefix='opus_tmp_attribute_cache2') storage = AttributeCache(self.temp_dir2) SimulationState().set_current_time(2010) table_data = {'str_column': np.array(['a', 'b', 'c']), 'bool_column': np.array([False, True, True])} storage.write_table(table_name, table_data)
def test(self): # Set up a test cache storage = AttributeCache(cache_directory=self._temp_dir) SimulationState().set_current_time(2000) table_name = 'foo' values = { 'attribute1': array([1,2,3], dtype=int32), 'attribute2': array([4,5,6], dtype=int32), } storage.write_table(table_name, values) table_dir = os.path.join(self._temp_dir, '2000', table_name) self.assert_(os.path.exists(table_dir)) actual = set(os.listdir(table_dir)) expected = set(['attribute1.%(endian)si4' % replacements, 'attribute2.%(endian)si4' % replacements]) self.assertEqual(expected, actual) exporter = ExportCacheToDbfTableCommand( cache_directory = self._temp_dir, year = '2000', table_name = table_name, dbf_directory = self._temp_dir, decimalcount = 4, ) exporter.execute() out_storage = dbf_storage(self._temp_dir) db = _dbf_class(out_storage._get_file_path_for_table(table_name)) length = max([len(values[key]) for key in values.keys()]) i = 0 field_type = {} for name, type in [field.fieldInfo()[:2] for field in db.header.fields]: field_type[name] = type for rec in db: for key in values.keys(): if field_type[key.upper()] is 'F': self.assertAlmostEqual(values[key][i], rec[key], 4) else: self.assertEqual(values[key][i], rec[key]) i = i + 1 self.assertEquals(length, i, msg="More values expected than the dbf file contains") db.close()
class MergeCache(Model): """Merge multiple years of one cache directory into a single one that can be used for example for a warm start.""" def __init__(self, directory): self.storage = AttributeCache(directory) def run(self, year, cleanup_settings={}): SimulationState().set_current_time(year) tables = self.storage.get_table_names() # cleanup for table in tables: tabdata = self.storage.load_table(table) if table in cleanup_settings.keys(): for attr in cleanup_settings[table]: if attr in tabdata.keys(): logger.log_status('Deleting attribute %s in %s.' % (attr, table)) del tabdata[attr] self.storage.write_table(table, tabdata) logger.log_status('Deleting all computed tables.') self.storage.delete_computed_tables() logger.log_status('Cache directory merged into %s' % year)
def setUp(self): household_data = { 'household_id': array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 'blockgroup_id': array([1, 1, 1, 1, 2, 2, 2, 2, 2, 3]), } fraction_data = { 'fraction_id': array([1, 2, 3, 4, 5, 6, 6]), #unused, but necessary to use dataset_pool to get data 'blockgroup_id': array([1, 1, 1, 2, 2, 2, 3]), 'zone_id': array([1, 2, 3, 3, 4, 5, 6]), 'fraction': array([0.25, 0.25, 0.5, 0.2, 0.4, 0.4, 1.0]) } blockgroup_data = { #unused by the model, for result verification only 'blockgroup_id': array([1, 2, 3]), } zone_data = { #unused by the model, for result verification only 'zone_id': array([1, 2, 3, 4, 5, 6]), } self.tmp_dir = tempfile.mkdtemp(prefix='urbansim_tmp') SimulationState().set_cache_directory(self.tmp_dir) attribute_cache = AttributeCache() self.dataset_pool = SessionConfiguration( new_instance=True, package_order=['urbansim', 'opus_core'], in_storage=attribute_cache).get_dataset_pool() #storage = StorageFactory().get_storage('flt_storage', storage_location=self.tmp_dir) attribute_cache.write_table(table_name='households', table_data=household_data) attribute_cache.write_table(table_name='fractions', table_data=fraction_data) attribute_cache.write_table(table_name='blockgroups', table_data=blockgroup_data) attribute_cache.write_table(table_name='zones', table_data=zone_data) #self.dataset_pool = DatasetPool(storage = storage, package_order = ['urbansim_parcel', 'urbansim', 'opus_core']) self.household = self.dataset_pool.get_dataset('household') self.fraction = self.dataset_pool.get_dataset('fraction') self.blockgroup = self.dataset_pool.get_dataset('blockgroup') self.zone = self.dataset_pool.get_dataset('zone')
def setUp(self): household_data = { 'household_id': array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 'blockgroup_id': array([1, 1, 1, 1, 2, 2, 2, 2, 2, 3]), } fraction_data = { 'fraction_id': array([1, 2, 3, 4, 5, 6, 6]), #unused, but necessary to use dataset_pool to get data 'blockgroup_id': array([1, 1, 1, 2, 2, 2, 3]), 'zone_id': array([1, 2, 3, 3, 4, 5, 6]), 'fraction': array([0.25, 0.25, 0.5, 0.2, 0.4, 0.4, 1.0]) } blockgroup_data = { #unused by the model, for result verification only 'blockgroup_id': array([1, 2, 3]), } zone_data = { #unused by the model, for result verification only 'zone_id': array([1, 2, 3, 4, 5, 6]), } self.tmp_dir = tempfile.mkdtemp(prefix='urbansim_tmp') SimulationState().set_cache_directory(self.tmp_dir) attribute_cache = AttributeCache() self.dataset_pool = SessionConfiguration(new_instance=True, package_order=['urbansim', 'opus_core'], in_storage=attribute_cache).get_dataset_pool() #storage = StorageFactory().get_storage('flt_storage', storage_location=self.tmp_dir) attribute_cache.write_table(table_name = 'households', table_data = household_data) attribute_cache.write_table(table_name = 'fractions', table_data = fraction_data) attribute_cache.write_table(table_name = 'blockgroups', table_data = blockgroup_data) attribute_cache.write_table(table_name = 'zones', table_data = zone_data) #self.dataset_pool = DatasetPool(storage = storage, package_order = ['urbansim_parcel', 'urbansim', 'opus_core']) self.household = self.dataset_pool.get_dataset('household') self.fraction = self.dataset_pool.get_dataset('fraction') self.blockgroup = self.dataset_pool.get_dataset('blockgroup') self.zone = self.dataset_pool.get_dataset('zone')
class Tests(opus_unittest.OpusTestCase): def setUp(self): building_data = { "building_id": array([1, 2, 3, 4, 5, 6, 7, 8]), "parcel_id": array([1, 2, 2, 3, 4, 4, 5, 5]), "non_residential_sqft": array([6, 2, 3, 6, 1, 2, 5, 0]), "residential_units": array([0, 0, 0, 0, 0, 0, 1, 1]), "price_per_unit": array([50, 21, 32, 15, 60, 90, 100, 200]), } parcel_data = { "parcel_id": array([1, 2, 3, 4, 5]), "generic_land_use_type_id": array([6, 6, 3, 4, 1]), "raz_id": array([3, 4, 5, 5, 6]), } job_data = { "job_id": array([1, 2, 3, 4, 5, 6, 7, 8]), "building_id": array([1, 1, 2, 3, 6, 1, 6, 4]), #'parcel_id': array([ 1, 1, 2, 2, 4, 1, 4, 3]), #'raz_id': array([ 3, 3, 4, 4, 5, 3, 5, 5]), "sector_id": array([13, 12, 13, 12, 13, 13, 12, 13]), "dummy_id": array([1, 2, 3, 4, 5, 6, 7, 8]), } self.tmp_dir = tempfile.mkdtemp(prefix="urbansim_tmp") SimulationState().set_cache_directory(self.tmp_dir) self.attribute_cache = AttributeCache() self.dataset_pool = SessionConfiguration( new_instance=True, package_order=["urbansim", "opus_core"], in_storage=self.attribute_cache ).get_dataset_pool() # storage = StorageFactory().get_storage('flt_storage', storage_location=self.tmp_dir) self.attribute_cache.write_table(table_name="buildings", table_data=building_data) self.attribute_cache.write_table(table_name="parcels", table_data=parcel_data) # self.attribute_cache.write_table(table_name = 'households', table_data = household_data) self.attribute_cache.write_table(table_name="jobs", table_data=job_data) # self.attribute_cache.write_table(table_name = 'persons', table_data = person_data) # self.attribute_cache.write_table(table_name = 'refinements', table_data = refinement_data) # self.dataset_pool = DatasetPool(storage = storage, package_order = ['urbansim_parcel', 'urbansim', 'opus_core']) # self.refinement = self.dataset_pool.get_dataset('refinement') self.jobs = self.dataset_pool.get_dataset("job") # self.persons = self.dataset_pool.get_dataset('person') # self.hhs = self.dataset_pool.get_dataset('household') self.buildings = self.dataset_pool.get_dataset("building") # self.buildings.compute_variables('raz_id=building.disaggregate(parcel.raz_id)', self.dataset_pool) def tearDown(self): shutil.rmtree(self.tmp_dir) def test_add_and_remove_agents(self): """ """ scheduled_events_data = { "year": array([2000, 2000, 2000, 2000, 2000]), "action": array(["remove", "remove", "add", "add", "target"]), "amount": array([1, 1, 4, 3, 7]), "sector_id": array([13, 12, -1, 11, 12]), "building_id": array([-1, -1, -1, 8, -1]), "raz_id": array([3, 5, 5, -1, -1]), } # self.attribute_cache.write_table(table_name = 'scheduled_events', table_data = scheduled_events_data) # events_dataset = self.dataset_pool.get_dataset('scheduled_event') storage = StorageFactory().get_storage("dict_storage") storage.write_table(table_name="events", table_data=scheduled_events_data) events_dataset = Dataset(in_storage=storage, in_table_name="events", id_name=[]) model = ScheduledEventsModel(self.jobs, scheduled_events_dataset=events_dataset) model.run(year=2000, dataset_pool=self.dataset_pool) # check that there are indeed 50000 total households after running the model results = self.jobs.size() should_be = 18 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) # examine each action in turn: results = logical_and(self.jobs.get_attribute("sector_id") == 13, self.jobs.get_attribute("raz_id") == 3).sum() should_be = 2 - 1 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = logical_and(self.jobs.get_attribute("sector_id") == 12, self.jobs.get_attribute("raz_id") == 5).sum() should_be = 1 - 1 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = (self.jobs.get_attribute("raz_id") == 5).sum() should_be = 3 - 1 + 4 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = logical_and( self.jobs.get_attribute("sector_id") == 11, self.jobs.get_attribute("building_id") == 8 ).sum() should_be = 0 + 3 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = (self.jobs.get_attribute("sector_id") == 12).sum() should_be = 7 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) def DELtest_add_and_remove_agents_from_geography_other_than_location_set(self): """this has been included in the above test """ scheduled_events_data = { "year": array([2000, 2000, 2000, 2000, 2000]), "action": array(["remove", "remove", "add", "add", "target"]), "amount": array([1, 1, 4, 3, 7]), "sector_id": array([13, 13, -1, 11, 12]), "building_id": array([-1, -1, -1, 8, -1]), "raz_id": array([3, 4, 5, -1, -1]), } # self.attribute_cache.write_table(table_name = 'scheduled_events', table_data = scheduled_events_data) # events_dataset = self.dataset_pool.get_dataset('scheduled_event') storage = StorageFactory().get_storage("dict_storage") storage.write_table(table_name="events", table_data=scheduled_events_data) events_dataset = Dataset(in_storage=storage, in_table_name="events", id_name=[]) model = ScheduledEventsModel(self.jobs, scheduled_events_dataset=events_dataset) model.run(year=2000, dataset_pool=self.dataset_pool) # check that there are indeed 50000 total households after running the model results = self.jobs.size() should_be = 17 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) # examine each action in turn: results = logical_and(self.jobs.get_attribute("sector_id") == 13, self.jobs.get_attribute("raz_id") == 3).sum() should_be = 2 - 1 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = logical_and(self.jobs.get_attribute("sector_id") == 13, self.jobs.get_attribute("raz_id") == 4).sum() should_be = 1 - 1 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = (self.jobs.get_attribute("raz_id") == 5).sum() should_be = 2 + 4 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = logical_and( self.jobs.get_attribute("sector_id") == 11, self.jobs.get_attribute("building_id") == 8 ).sum() should_be = 0 + 3 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = (self.jobs.get_attribute("sector_id") == 12).sum() should_be = 7 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) def test_modify_dataset_attribute(self): """ """ scheduled_events_data = { "year": array([2000, 2000, 2000, 2000, 2001, 2001]), "action": array( ["set_value", "subtract_value", "add_value", "multiply_value", "subtract_value", "multiply_value"] ), "amount": array([4, 2, 3, 1.1, 1, 0.9]), "attribute": array( [ "residential_units", "non_residential_sqft", "non_residential_sqft", "price_per_unit", "non_residential_sqft", "price_per_unit", ] ), "building_id": array([3, 3, 5, -1, 3, -1]), "parcel_id": array([-1, -1, -1, 5, -1, 5]), } # self.attribute_cache.write_table(table_name = 'scheduled_events', table_data = scheduled_events_data) # events_dataset = self.dataset_pool.get_dataset('scheduled_event') storage = StorageFactory().get_storage("dict_storage") storage.write_table(table_name="events", table_data=scheduled_events_data) events_dataset = Dataset(in_storage=storage, in_table_name="events", id_name=[]) model = ScheduledEventsModel(self.buildings, scheduled_events_dataset=events_dataset) model.run(year=2000, dataset_pool=self.dataset_pool) results = self.buildings.size() should_be = 8 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) # examine each action in turn: index = self.buildings.get_attribute("building_id") == 3 results = ( self.buildings.get_attribute("residential_units")[index], self.buildings.get_attribute("non_residential_sqft")[index], ) should_be = (4, 1) self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) index = self.buildings.get_attribute("building_id") == 5 results = self.buildings.get_attribute("non_residential_sqft")[index] should_be = 1 + 3 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = self.buildings.get_attribute("price_per_unit") should_be = array([50, 21, 32, 15, 60, 90, 100 * 1.1, 200 * 1.1]) self.assertTrue(allclose(should_be, results), "Error, should_be: %s, but result: %s" % (should_be, results)) model.run(year=2001) index = self.buildings.get_attribute("building_id") == 3 results = ( self.buildings.get_attribute("residential_units")[index], self.buildings.get_attribute("non_residential_sqft")[index], ) should_be = (4, 0) self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = self.buildings.get_attribute("price_per_unit") should_be = array([50, 21, 32, 15, 60, 90, 100 * 1.1 * 0.9, 200 * 1.1 * 0.9]) self.assertTrue(allclose(should_be, results), "Error, should_be: %s, but result: %s" % (should_be, results)) def test_demolish_buildings_on_a_parcel(self): """test demolish buildings, create new buildings, and convert an existing building """ scheduled_events_data = { "year": array([2000, 2000, 2000, 2000, 2000]), "action": array(["remove", "add", "set_value", "set_value", "set_value"]), "amount": array([4, 2, 8, 7, 150]), "attribute": array(["", "", "residential_units", "non_residential_sqft", "price_per_unit"]), "building_id": array([3, -1, 5, 5, 5]), "parcel_id": array([-1, 1, -1, -1, -1]), "residential_units": array([-1, 2, -1, -1, -1]), "non_residential_sqft": array([-1, 1, -1, -1, -1]), "price_per_unit": array([-1, 99, -1, -1, -1]), } # self.attribute_cache.write_table(table_name = 'scheduled_events', table_data = scheduled_events_data) # events_dataset = self.dataset_pool.get_dataset('scheduled_event') storage = StorageFactory().get_storage("dict_storage") storage.write_table(table_name="events", table_data=scheduled_events_data) events_dataset = Dataset(in_storage=storage, in_table_name="events", id_name=[]) model = ScheduledEventsModel(self.buildings, scheduled_events_dataset=events_dataset) model.run(year=2000, dataset_pool=self.dataset_pool) results = self.buildings.size() should_be = 9 self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) index = self.buildings.get_attribute("building_id") > 8 results = array( [ self.buildings.get_attribute("parcel_id")[index], self.buildings.get_attribute("residential_units")[index], self.buildings.get_attribute("non_residential_sqft")[index], self.buildings.get_attribute("price_per_unit")[index], ] ) should_be = array([[1, 1], [2, 2], [1, 1], [99, 99]]) self.assertTrue(allclose(should_be, results), "Error, should_be: %s, but result: %s" % (should_be, results)) index = where(self.buildings.get_attribute("building_id") == 5) results = self.buildings.get_multiple_attributes( ["parcel_id", "residential_units", "non_residential_sqft", "price_per_unit"] )[index] should_be = array([4, 8, 7, 150]) self.assertTrue(allclose(should_be, results), "Error, should_be: %s, but result: %s" % (should_be, results))
def setUp(self): building_data = { 'building_id': array([1, 2, 3, 4, 5, 6, 7, 8]), 'parcel_id': array([1, 2, 2, 3, 4, 4, 5, 5]), 'non_residential_sqft': \ array([6, 2, 3, 6, 1, 2, 5, 0]), 'residential_units': \ array([0, 0, 0, 0, 0, 0, 1, 1]) } parcel_data = { 'parcel_id': array([1, 2, 3, 4, 5]), 'generic_land_use_type_id': array([6, 6, 3, 4, 1]), 'raz_id': array([3, 4, 5, 5, 6]) } job_data = { 'job_id': array([1, 2, 3, 4, 5, 6, 7, 8]), 'building_id': array([1, 1, 2, 3, 6, 1, 6, 4]), 'sector_id': array([13, 12, 13, 12, 13, 13, 12, 13]), 'dummy_id': array([1, 2, 3, 4, 5, 6, 7, 8]) } household_data = { 'household_id': array([1, 2]), 'building_id': array([7, 8]), 'persons': array([3, 4]), } person_data = { 'person_id': array([1, 2, 3, 4, 5, 6, 7]), 'household_id': array([1, 1, 1, 2, 2, 2, 2]), 'job_id': array([2, 1, -1, -1, 3, 4, 7]) } refinement_data = { 'refinement_id': arange(1, 8), 'year': array([2021, 2021, 2021, 2022, 2023, 2024, 2024]), 'transaction_id': array([1, 1, 1, 2, 3, 1, 1]), 'action': array([ 'subtract', 'subtract', 'add', 'target', 'add', 'add', 'set_value' ]), 'amount': array([2, 1, 4, 7, 1, 1, -1]), 'agent_dataset': array([ 'job', 'job', 'job', 'household', 'household', 'household', 'person' ]), 'agent_expression': array([ 'job.sector_id==13', 'job.sector_id==13', '', 'household.household_id>0', 'household.persons>5', 'household.persons==3', 'person.job_id' ]), 'location_expression': array([ 'urbansim.building.raz_id==3', 'urbansim.building.raz_id==4', '(urbansim.building.raz_id==5) * (building.disaggregate(parcel.generic_land_use_type_id)==4)', 'urbansim.building.raz_id==6', 'urbansim.building.raz_id==6', 'urbansim.building.raz_id==6', 'household.refinement_id==6' ]), 'location_capacity_attribute': array([ '', 'non_residential_sqft', 'non_residential_sqft', 'residential_units', 'residential_units', '', '' ]) } self.tmp_dir = tempfile.mkdtemp(prefix='urbansim_tmp') SimulationState().set_cache_directory(self.tmp_dir) attribute_cache = AttributeCache() self.dataset_pool = SessionConfiguration( new_instance=True, package_order=['urbansim', 'opus_core'], in_storage=attribute_cache).get_dataset_pool() #storage = StorageFactory().get_storage('flt_storage', storage_location=self.tmp_dir) attribute_cache.write_table(table_name='buildings', table_data=building_data) attribute_cache.write_table(table_name='parcels', table_data=parcel_data) attribute_cache.write_table(table_name='households', table_data=household_data) attribute_cache.write_table(table_name='jobs', table_data=job_data) attribute_cache.write_table(table_name='persons', table_data=person_data) attribute_cache.write_table(table_name='refinements', table_data=refinement_data) #self.dataset_pool = DatasetPool(storage = storage, package_order = ['urbansim_parcel', 'urbansim', 'opus_core']) self.refinement = self.dataset_pool.get_dataset('refinement') self.jobs = self.dataset_pool.get_dataset('job') self.persons = self.dataset_pool.get_dataset('person') self.hhs = self.dataset_pool.get_dataset('household') self.buildings = self.dataset_pool.get_dataset('building')
def setUp(self): building_data = { 'building_id': array([1, 2, 3, 4, 5, 6, 7, 8]), 'parcel_id': array([1, 2, 2, 3, 4, 4, 5, 5]), 'non_residential_sqft': \ array([6, 2, 3, 6, 1, 2, 5, 0]), 'residential_units': \ array([0, 0, 0, 0, 0, 0, 1, 1]) } parcel_data = { 'parcel_id': array([1, 2, 3, 4, 5]), 'generic_land_use_type_id': array([6, 6, 3, 4, 1]), 'raz_id': array([3, 4, 5, 5, 6]) } job_data = { 'job_id': array([ 1, 2, 3, 4, 5, 6, 7, 8]), 'building_id': array([ 1, 1, 2, 3, 6, 1, 6, 4]), 'sector_id': array([13,12,13,12,13,13,12,13]), 'dummy_id': array([ 1, 2, 3, 4, 5, 6, 7, 8]) } household_data = { 'household_id': array([1, 2]), 'building_id': array([7, 8]), 'persons': array([3, 4]), } person_data = { 'person_id': array([ 1, 2, 3, 4, 5, 6, 7]), 'household_id': array([ 1, 1, 1, 2, 2, 2, 2]), 'job_id': array([ 2, 1, -1, -1, 3, 4, 7]) } refinement_data = { 'refinement_id': arange(1, 8), 'year': array([2021,2021,2021,2022, 2023, 2024, 2024]), 'transaction_id':array([1, 1, 1, 2, 3, 1, 1]), 'action': array(['subtract', 'subtract', 'add', 'target', 'add', 'add', 'set_value']), 'amount': array([2, 1, 4, 7, 1, 1, -1]), 'agent_dataset': array(['job', 'job', 'job', 'household', 'household', 'household', 'person' ]), 'agent_expression': array(['job.sector_id==13', 'job.sector_id==13', '', 'household.household_id>0', 'household.persons>5', 'household.persons==3', 'person.job_id' ]), 'location_expression': array(['urbansim.building.raz_id==3', 'urbansim.building.raz_id==4', '(urbansim.building.raz_id==5) * (building.disaggregate(parcel.generic_land_use_type_id)==4)', 'urbansim.building.raz_id==6', 'urbansim.building.raz_id==6', 'urbansim.building.raz_id==6', 'household.refinement_id==6' ]), 'location_capacity_attribute':array(['', 'non_residential_sqft', 'non_residential_sqft', 'residential_units', 'residential_units', '', '' ]) } self.tmp_dir = tempfile.mkdtemp(prefix='urbansim_tmp') SimulationState().set_cache_directory(self.tmp_dir) attribute_cache = AttributeCache() self.dataset_pool = SessionConfiguration(new_instance=True, package_order=['urbansim', 'opus_core'], in_storage=attribute_cache).get_dataset_pool() #storage = StorageFactory().get_storage('flt_storage', storage_location=self.tmp_dir) attribute_cache.write_table(table_name = 'buildings', table_data = building_data) attribute_cache.write_table(table_name = 'parcels', table_data = parcel_data) attribute_cache.write_table(table_name = 'households', table_data = household_data) attribute_cache.write_table(table_name = 'jobs', table_data = job_data) attribute_cache.write_table(table_name = 'persons', table_data = person_data) attribute_cache.write_table(table_name = 'refinements', table_data = refinement_data) #self.dataset_pool = DatasetPool(storage = storage, package_order = ['urbansim_parcel', 'urbansim', 'opus_core']) self.refinement = self.dataset_pool.get_dataset('refinement') self.jobs = self.dataset_pool.get_dataset('job') self.persons = self.dataset_pool.get_dataset('person') self.hhs = self.dataset_pool.get_dataset('household') self.buildings = self.dataset_pool.get_dataset('building')
class Tests(opus_unittest.OpusTestCase): """unittest""" def setUp(self, attribute_cache=True): hh_data = { 'household_id': array([1, 2, 3, 4]), 'building_id': array([11, 22, 33, 22]), 'size': array([4, 3, 2, 1]), 'income': array([51, 52, 53, 54])*1000, 'keep': array([4.1, 4.2, 4.3, 4.4]), } p_data = { 'person_id': array([ 1, 2, 3, 5, 6, 7, 8, 9, 10]), 'household_id': array([ 1, 1, 1, 2, 2, 3, 3, 3, 4]), 'age': array([75, 71, 29, 56, 16, 22, 20, 96, 88]), } if attribute_cache: self.tmp_dir = tempfile.mkdtemp(prefix='urbansim_tmp') SimulationState().set_cache_directory(self.tmp_dir) self.attribute_cache = AttributeCache() self.dataset_pool = SessionConfiguration(new_instance=True, package_order=['urbansim', 'opus_core'], in_storage=self.attribute_cache ).get_dataset_pool() self.attribute_cache.write_table(table_name='households', table_data=hh_data) self.attribute_cache.write_table(table_name='persons', table_data=p_data) self.hh_ds = self.dataset_pool.get_dataset('household') self.p_ds = self.dataset_pool.get_dataset('person') else: storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='households', table_data=hh_data) self.hh_ds = Dataset(in_storage=storage, in_table_name='households', dataset_name='household') storage.write_table(table_name='persons', table_data=p_data) self.p_ds = Dataset(in_storage=storage, in_table_name='persons', dataset_name='person') self.dmgh_data_dir = tempfile.mkdtemp(prefix='urbansim_tmp') self.dmgh_data_file = os.path.join(self.dmgh_data_dir, 'demographic_data.h5') out_fh = h5py.File(self.dmgh_data_file, 'w') n_hhs = 5 hh_dtype = {'names':['year', 'household_id', 'income', 'head_person_id'], 'formats':['i4', 'i4', 'f8', 'i4']} hhdata = out_fh.create_dataset('household', shape=(n_hhs, ), dtype=hh_dtype, compression='gzip', compression_opts=9) hhs = [(2000, 5, 65000.0, 9), (2000, 1, 61000.0, 3), (2000, 2, 62000.0, 4), (2000, 3, 63000.0, 7), (2001, 1, 71000.0, 3)] hhdata[:] = array(hhs, dtype=hh_dtype) n_ps = 16 ps_dtype = {'names':['year', 'person_id', 'household_id', 'age'], 'formats':['i4', 'i4', 'i4', 'i4']} psdata = out_fh.create_dataset('person', shape=(n_ps, ), dtype=ps_dtype, compression='gzip', compression_opts=9) ps = [(2000, 1, 1, 76), (2000, 2, 1, 72), (2000, 3, 1, 30), (2000, 4, 2, -1), (2000, 5, 2, 57), (2000, 6, 2, 17), (2000, 9, 5, 67), (2000,10, 5, 71), (2000, 7, 3, 23), (2000, 8, 3, 21), (2000,81, 3, 2), (2001, 1, 1, 77), (2001, 2, 1, 73), (2001, 3, 1, 31), (2001, 4, 1, 35), (2001,31, 1, 1)] psdata[:] = array(ps, dtype=ps_dtype) dataset_names = ['household', 'person'] for dataset_name in dataset_names: for year in unique(out_fh[dataset_name][:, 'year']): year_str = str(year) group = out_fh.get(year_str, None) if group is None: group = out_fh.create_group(year_str) is_year = out_fh[dataset_name][:, 'year'] == year group.create_dataset(dataset_name, data=out_fh[dataset_name][is_year]) del out_fh[dataset_name] out_fh.close() def tearDown(self): shutil.rmtree(self.dmgh_data_dir) if hasattr(self, 'tmp_dir'): shutil.rmtree(self.tmp_dir) def test_run1(self): model = ExternalDemographicModel() attrs_mapping = {'income': "household.income", 'size': "household.number_of_agents(person)", 'age_of_head': "household.aggregate(person.age * (person.disaggregate(household.head_person_id)==person.person_id))", } attrs_mapping_p = { 'household_id': 'person.household_id', 'age': 'person.age', 'age_months' : 'age * 12', } model.run(self.dmgh_data_file, self.hh_ds, self.p_ds, year=2000, keep_attributes=['building_id', 'keep'], keep_attributes_p=[], demographic_attributes=attrs_mapping, demographic_attributes_p=attrs_mapping_p, dataset_pool=self.dataset_pool) new_hh_ds = self.dataset_pool.get_dataset('household') self.assert_(allclose(new_hh_ds['household_id'], array([ 5, 1, 2, 3]))) self.assert_(allclose(new_hh_ds['building_id'], array([-1, 11, 22, 33]))) self.assert_(allclose(new_hh_ds['keep'], array([-1,4.1, 4.2, 4.3]))) self.assert_(allclose(new_hh_ds['income'], array([65, 61, 62, 63])*1000.0)) self.assert_(allclose(new_hh_ds['size'], array([ 2, 3, 3, 3]))) self.assert_(allclose(new_hh_ds['age_of_head'], array([67, 30, -1, 23]))) new_p_ds = self.dataset_pool.get_dataset('person') print('array([' + ', '.join([str(i) for i in new_p_ds['person_id']]) + '])') print('array([' + ', '.join([str(i) for i in new_p_ds['household_id']]) + '])') print('array([' + ', '.join([str(i) for i in new_p_ds['age']]) + '])') print('array([' + ', '.join([str(i) for i in new_p_ds['age_months']]) + '])') self.assert_(allclose(new_p_ds['person_id'], array([1, 2, 3, 4, 5, 6, 9, 10, 7, 8, 81]))) self.assert_(allclose(new_p_ds['household_id'], array([1, 1, 1, 2, 2, 2, 5, 5, 3, 3, 3]))) self.assert_(allclose(new_p_ds['age'], array([76, 72, 30, -1, 57, 17, 67, 71, 23, 21, 2]))) self.assert_(allclose(new_p_ds['age_months'], array([912, 864, 360, -12, 684, 204, 804, 852, 276, 252, 24]))) self.assert_((new_p_ds['age'] * 12 == new_p_ds['age_months']).all(), 'age_months computed correctly') model.run(self.dmgh_data_file, self.hh_ds, self.p_ds, year=2001, keep_attributes=['building_id', 'keep'], keep_attributes_p=[], demographic_attributes=attrs_mapping, demographic_attributes_p=attrs_mapping_p, dataset_pool=self.dataset_pool) new_hh_ds = self.dataset_pool.get_dataset('household') self.assert_(allclose(new_hh_ds['household_id'], array([ 1]))) self.assert_(allclose(new_hh_ds['building_id'], array([11]))) self.assert_(allclose(new_hh_ds['keep'], array([4.1]))) self.assert_(allclose(new_hh_ds['income'], array([71])*1000.0)) self.assert_(allclose(new_hh_ds['size'], array([ 5]))) self.assert_(allclose(new_hh_ds['age_of_head'], array([31]))) new_p_ds = self.dataset_pool.get_dataset('person') print('array([' + ', '.join([str(i) for i in new_p_ds['person_id']]) + '])') print('array([' + ', '.join([str(i) for i in new_p_ds['household_id']]) + '])') print('array([' + ', '.join([str(i) for i in new_p_ds['age']]) + '])') print('array([' + ', '.join([str(i) for i in new_p_ds['age_months']]) + '])') self.assert_(allclose(new_p_ds['person_id'], array([1, 2, 3, 4, 31]))) self.assert_(allclose(new_p_ds['household_id'], array([1, 1, 1, 1, 1]))) self.assert_(allclose(new_p_ds['age'], array([77, 73, 31, 35, 1]))) self.assert_(allclose(new_p_ds['age_months'], array([924, 876, 372, 420, 12]))) self.assert_((new_p_ds['age'] * 12 == new_p_ds['age_months']).all(), 'age_months computed correctly')
class Tests(opus_unittest.OpusTestCase): def setUp(self): building_data = { 'building_id': array([1, 2, 3, 4, 5, 6, 7, 8]), 'parcel_id': array([1, 2, 2, 3, 4, 4, 5, 5]), 'non_residential_sqft': \ array([6, 2, 3, 6, 1, 2, 5, 0]), 'residential_units': \ array([0, 0, 0, 0, 0, 0, 1, 1]), 'price_per_unit': \ array([50,21,32,15,60,90,100,200]) } parcel_data = { 'parcel_id': array([1, 2, 3, 4, 5]), 'generic_land_use_type_id': array([6, 6, 3, 4, 1]), 'raz_id': array([3, 4, 5, 5, 6]) } job_data = { 'job_id': array([1, 2, 3, 4, 5, 6, 7, 8]), 'building_id': array([1, 1, 2, 3, 6, 1, 6, 4]), #'parcel_id': array([ 1, 1, 2, 2, 4, 1, 4, 3]), #'raz_id': array([ 3, 3, 4, 4, 5, 3, 5, 5]), 'sector_id': array([13, 12, 13, 12, 13, 13, 12, 13]), 'dummy_id': array([1, 2, 3, 4, 5, 6, 7, 8]), } self.tmp_dir = tempfile.mkdtemp(prefix='urbansim_tmp') SimulationState().set_cache_directory(self.tmp_dir) self.attribute_cache = AttributeCache() self.dataset_pool = SessionConfiguration( new_instance=True, package_order=['urbansim', 'opus_core'], in_storage=self.attribute_cache).get_dataset_pool() #storage = StorageFactory().get_storage('flt_storage', storage_location=self.tmp_dir) self.attribute_cache.write_table(table_name='buildings', table_data=building_data) self.attribute_cache.write_table(table_name='parcels', table_data=parcel_data) # self.attribute_cache.write_table(table_name = 'households', table_data = household_data) self.attribute_cache.write_table(table_name='jobs', table_data=job_data) # self.attribute_cache.write_table(table_name = 'persons', table_data = person_data) # self.attribute_cache.write_table(table_name = 'refinements', table_data = refinement_data) #self.dataset_pool = DatasetPool(storage = storage, package_order = ['urbansim_parcel', 'urbansim', 'opus_core']) # self.refinement = self.dataset_pool.get_dataset('refinement') self.jobs = self.dataset_pool.get_dataset('job') # self.persons = self.dataset_pool.get_dataset('person') # self.hhs = self.dataset_pool.get_dataset('household') self.buildings = self.dataset_pool.get_dataset('building') #self.buildings.compute_variables('raz_id=building.disaggregate(parcel.raz_id)', self.dataset_pool) def tearDown(self): shutil.rmtree(self.tmp_dir) def test_add_and_remove_agents(self): """ """ scheduled_events_data = { "year": array([2000, 2000, 2000, 2000, 2000]), "action": array(["remove", "remove", "add", "add", "target"]), "amount": array([1, 1, 4, 3, 7]), "sector_id": array([13, 12, -1, 11, 12]), "building_id": array([-1, -1, -1, 8, -1]), "raz_id": array([3, 5, 5, -1, -1]), } # self.attribute_cache.write_table(table_name = 'scheduled_events', table_data = scheduled_events_data) # events_dataset = self.dataset_pool.get_dataset('scheduled_event') storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='events', table_data=scheduled_events_data) events_dataset = Dataset(in_storage=storage, in_table_name='events', id_name=[]) model = ScheduledEventsModel(self.jobs, scheduled_events_dataset=events_dataset) model.run(year=2000, dataset_pool=self.dataset_pool) #check that there are indeed 50000 total households after running the model results = self.jobs.size() should_be = 18 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) #examine each action in turn: results = logical_and( self.jobs.get_attribute("sector_id") == 13, self.jobs.get_attribute("raz_id") == 3).sum() should_be = 2 - 1 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = logical_and( self.jobs.get_attribute("sector_id") == 12, self.jobs.get_attribute("raz_id") == 5).sum() should_be = 1 - 1 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = (self.jobs.get_attribute("raz_id") == 5).sum() should_be = 3 - 1 + 4 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = logical_and( self.jobs.get_attribute("sector_id") == 11, self.jobs.get_attribute("building_id") == 8).sum() should_be = 0 + 3 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = (self.jobs.get_attribute("sector_id") == 12).sum() should_be = 7 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) def DELtest_add_and_remove_agents_from_geography_other_than_location_set( self): """this has been included in the above test """ scheduled_events_data = { "year": array([2000, 2000, 2000, 2000, 2000]), "action": array(["remove", "remove", "add", "add", "target"]), "amount": array([1, 1, 4, 3, 7]), "sector_id": array([13, 13, -1, 11, 12]), "building_id": array([-1, -1, -1, 8, -1]), "raz_id": array([3, 4, 5, -1, -1]), } # self.attribute_cache.write_table(table_name = 'scheduled_events', table_data = scheduled_events_data) # events_dataset = self.dataset_pool.get_dataset('scheduled_event') storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='events', table_data=scheduled_events_data) events_dataset = Dataset(in_storage=storage, in_table_name='events', id_name=[]) model = ScheduledEventsModel(self.jobs, scheduled_events_dataset=events_dataset) model.run(year=2000, dataset_pool=self.dataset_pool) #check that there are indeed 50000 total households after running the model results = self.jobs.size() should_be = 17 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) #examine each action in turn: results = logical_and( self.jobs.get_attribute("sector_id") == 13, self.jobs.get_attribute("raz_id") == 3).sum() should_be = 2 - 1 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = logical_and( self.jobs.get_attribute("sector_id") == 13, self.jobs.get_attribute("raz_id") == 4).sum() should_be = 1 - 1 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = (self.jobs.get_attribute("raz_id") == 5).sum() should_be = 2 + 4 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = logical_and( self.jobs.get_attribute("sector_id") == 11, self.jobs.get_attribute("building_id") == 8).sum() should_be = 0 + 3 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = (self.jobs.get_attribute("sector_id") == 12).sum() should_be = 7 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) def test_modify_dataset_attribute(self): """ """ scheduled_events_data = { "year": array([2000, 2000, 2000, 2000, 2001, 2001]), "action": array([ "set_value", "subtract_value", "add_value", "multiply_value", "subtract_value", "multiply_value" ]), "amount": array([4, 2, 3, 1.1, 1, 0.9]), "attribute": array([ "residential_units", "non_residential_sqft", "non_residential_sqft", "price_per_unit", "non_residential_sqft", "price_per_unit" ]), "building_id": array([3, 3, 5, -1, 3, -1]), "parcel_id": array([-1, -1, -1, 5, -1, 5]), } # self.attribute_cache.write_table(table_name = 'scheduled_events', table_data = scheduled_events_data) # events_dataset = self.dataset_pool.get_dataset('scheduled_event') storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='events', table_data=scheduled_events_data) events_dataset = Dataset(in_storage=storage, in_table_name='events', id_name=[]) model = ScheduledEventsModel(self.buildings, scheduled_events_dataset=events_dataset) model.run(year=2000, dataset_pool=self.dataset_pool) results = self.buildings.size() should_be = 8 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) #examine each action in turn: index = self.buildings.get_attribute("building_id") == 3 results = (self.buildings.get_attribute("residential_units")[index], self.buildings.get_attribute("non_residential_sqft")[index]) should_be = (4, 1) self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) index = self.buildings.get_attribute("building_id") == 5 results = self.buildings.get_attribute("non_residential_sqft")[index] should_be = 1 + 3 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = self.buildings.get_attribute("price_per_unit") should_be = array([50, 21, 32, 15, 60, 90, 100 * 1.1, 200 * 1.1]) self.assertTrue( allclose(should_be, results), "Error, should_be: %s, but result: %s" % (should_be, results)) model.run(year=2001) index = self.buildings.get_attribute("building_id") == 3 results = (self.buildings.get_attribute("residential_units")[index], self.buildings.get_attribute("non_residential_sqft")[index]) should_be = (4, 0) self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) results = self.buildings.get_attribute("price_per_unit") should_be = array( [50, 21, 32, 15, 60, 90, 100 * 1.1 * 0.9, 200 * 1.1 * 0.9]) self.assertTrue( allclose(should_be, results), "Error, should_be: %s, but result: %s" % (should_be, results)) def test_demolish_buildings_on_a_parcel(self): """test demolish buildings, create new buildings, and convert an existing building """ scheduled_events_data = { "year": array([2000, 2000, 2000, 2000, 2000]), "action": array(["remove", "add", "set_value", "set_value", "set_value"]), "amount": array([4, 2, 8, 7, 150]), "attribute": array([ "", "", "residential_units", "non_residential_sqft", "price_per_unit" ]), "building_id": array([3, -1, 5, 5, 5]), "parcel_id": array([-1, 1, -1, -1, -1]), "residential_units": array([-1, 2, -1, -1, -1]), "non_residential_sqft": array([-1, 1, -1, -1, -1]), "price_per_unit": array([-1, 99, -1, -1, -1]), } # self.attribute_cache.write_table(table_name = 'scheduled_events', table_data = scheduled_events_data) # events_dataset = self.dataset_pool.get_dataset('scheduled_event') storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='events', table_data=scheduled_events_data) events_dataset = Dataset(in_storage=storage, in_table_name='events', id_name=[]) model = ScheduledEventsModel(self.buildings, scheduled_events_dataset=events_dataset) model.run(year=2000, dataset_pool=self.dataset_pool) results = self.buildings.size() should_be = 9 self.assertEqual( should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results)) index = self.buildings.get_attribute("building_id") > 8 results = array([ self.buildings.get_attribute("parcel_id")[index], self.buildings.get_attribute("residential_units")[index], self.buildings.get_attribute("non_residential_sqft")[index], self.buildings.get_attribute("price_per_unit")[index] ]) should_be = array([[1, 1], [2, 2], [1, 1], [99, 99]]) self.assertTrue( allclose(should_be, results), "Error, should_be: %s, but result: %s" % (should_be, results)) index = where(self.buildings.get_attribute("building_id") == 5) results = self.buildings.get_multiple_attributes([ "parcel_id", "residential_units", "non_residential_sqft", "price_per_unit" ])[index] should_be = array([4, 8, 7, 150]) self.assertTrue( allclose(should_be, results), "Error, should_be: %s, but result: %s" % (should_be, results))