def test_quantile(self): from numpy import array, ma a = array([35, 6, 22, 1, 60]) b = array([6, 3, 5, 9, 1, 7, 10, 2, 8, 4, 0]) self.assertEqual(ma.allequal(quantile(a, array([0.2, 0.9, 0.5])), array([1, 35, 6])), True)
def test_join_datasets_with_2_ids(self): from numpy import ma storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='data1', table_data={ 'id1':array([2,4,2]), 'id2':array([1,2,3]), 'attr1':array([4,7,1]), 'attr2':array([100,0,1000]), } ) storage.write_table( table_name='data2', table_data={ 'id1':array([4,2,2]), 'id2':array([2,3,1]), 'attr1':array([50,60,70]) } ) ds1 = Dataset(in_storage=storage, in_table_name='data1', id_name=['id1', 'id2'], dataset_name='data1') ds2 = Dataset(in_storage=storage, in_table_name='data2', id_name=['id1', 'id2'], dataset_name='data2') ds1.join(ds2, 'attr1') self.assertEqual(ma.allequal(ds1.get_attribute('attr1'), array([70,50,60])), True) self.assertEqual(ma.allequal(ds1.get_attribute('attr2'), array([100,0,1000])), True)
def test_load_specification_with_definition_nest_and_equations(self): specification = { "_definition_": [ ("pop = urbansim.gridcell.population", "bpop"), "inc = urbansim.gridcell.average_income", "art = urbansim.gridcell.is_near_arterial", ], -2: { 'name': 'nest_id', 1: {1: [ "pop", "inc", "constant" ], 2: [ "art"] }, 2: {3:["pop", "inc" ]} } } result = load_specification_from_dictionary(specification) vars = result.get_variable_names() coefs = result.get_coefficient_names() eqs = result.get_equations() other = result.get_other_fields() self.assert_(alltrue(coefs == array(["bpop", "inc", "constant", "art", "bpop", "inc"])), msg = "Error in test_load_specification_with_definition_nest_and_equations (coefficients)") self.assert_(alltrue(vars == array(["pop", "inc", "constant", "art", "pop", "inc"])), msg = "Error in test_load_specification_with_definition_nest_and_equations (variables)") self.assert_(ma.allequal(eqs, array([1,1,1,2,3,3])), msg = "Error in test_load_specification_with_definition_nest_and_equations (equations)") self.assert_(ma.allequal(other['dim_nest_id'], array([1,1,1,1,2,2])), msg = "Error in test_load_specification_with_definition_nest_and_equations (nests)")
def test_agent_times_choice(self): expression = "agent_x_choice.agent_times_choice(attr)" storage = StorageFactory().get_storage("dict_storage") storage.write_table( table_name="agents", table_data={ "id": array([1, 2, 3, 4, 5]), "attr_2": array([3, 2, 4, 10, 20]), "attr_3": array([10, 100, 1000, 500, 0]), "attr_4": array([100, 500, 0, 20, -30]), }, ) storage.write_table(table_name="choices", table_data={"id": array([1, 2, 3, 4])}) agents = Dataset(in_storage=storage, in_table_name="agents", dataset_name="agent", id_name="id") choices = Dataset(in_storage=storage, in_table_name="choices", dataset_name="choice", id_name="id") ids = InteractionDataset(dataset1=agents, dataset2=choices, index1=array([0, 1, 3, 4]), index2=array([1, 2, 3])) result = ids.compute_variables(expression) should_be = array([[3, 10, 100], [2, 100, 500], [10, 500, 20], [20, 0, -30]]) self.assertEqual(ma.allequal(result, should_be), True) agents.touch_attribute("attr_2") # in order to recompute the expression choices.add_primary_attribute(name="name", data=array(["bus", "car", "tran", "walk"])) agents.add_primary_attribute(name="attr_tran", data=array([100, 1000, 10000, 5000, 10])) result = ids.compute_variables(expression) should_be = array([[3, 100, 100], [2, 1000, 500], [10, 5000, 20], [20, 10, -30]]) self.assertEqual(ma.allequal(result, should_be), True)
def test_load_specification(self): specification = {1: [ ("urbansim.gridcell.population", "BPOP"), ("urbansim.gridcell.average_income", "BINC"), ], 2: [ ("urbansim.gridcell.is_near_arterial", "BART"), ("urbansim.gridcell.is_near_highway", "BHWY"), ], 3: [ ("lage = ln(urbansim.gridcell.average_age+1)", "BAGE") ] } result = load_specification_from_dictionary(specification) vars = result.get_variable_names() coefs = result.get_coefficient_names() subm = result.get_submodels() fixedval = result.get_fixed_values() self.assert_(alltrue(coefs == array(["BPOP", "BINC", "BART", "BHWY", "BAGE"])), msg = "Error in test_load_specification (coefficients)") self.assert_(alltrue(vars == array(["population", "average_income", "is_near_arterial", "is_near_highway", "lage"])), msg = "Error in test_load_specification (variables)") self.assert_(ma.allequal(subm, array([1, 1, 2, 2, 3])), msg = "Error in test_load_specification (submodels)") self.assert_(fixedval.size == 0, msg = "Error in test_load_specification (fixed_values should be empty)") # add a variable with a fixed value coefficient specification[3].append(("constant", "C", 1)) result = load_specification_from_dictionary(specification) fixedval = result.get_fixed_values() self.assert_(ma.allequal(fixedval, array([0, 0, 0, 0, 0, 1])), msg = "Error in test_load_specification (fixed_values)")
def test_ematch(self): from numpy import array, ma self.assertEqual(ma.allequal(ematch(array(["abcde", "abcd"]), "abcd"), array([1])), True, msg = "Error in ematch.") self.assertEqual(ma.allequal(ematch(array(["ab(c]de", "abcd"]), "ab(c]de"), array([0])), True,
def test_ZonalMean_exec(self): lo=ZonalMean_exec(self.l12,self.l13) res=np.asarray([[3,3,3,4],[4,4,2,2],[2,5,5,5],[6,6,5,6]]) self.assertTrue(allequal(lo._data,res)) lo=ZonalMean_exec(self.l14,self.l13) res=np.asarray([[-3,-3,-3,-4],[-4,-4,-2,-2],[-2,-5,-5,-5],[-6,-6,-5,-6]]) self.assertTrue(allequal(lo._data,res))
def _check_dataset_methods_on_dataset_view(self, ds, years_to_merge): self.assert_(ds is not None) ds.load_dataset(attributes='*', in_table_name='tests', in_storage=AttributeCache() ) id = ds.get_attribute('id') attr1 = ds.get_attribute('attr1') # Does compute_variables work? ds.compute_variables(['opus_core.test.attr1_times_2']) attr1_times_2 = ds.get_attribute('attr1_times_2') # Are values as expected? self.assert_(ma.allequal(attr1*2, attr1_times_2)) # Does results have expected number of elements? self.assertEqual(len(years_to_merge)*3, len(attr1_times_2)) # Does _compute_if_needed work? ds._compute_if_needed( 'opus_core.test.attr2_times_2', dataset_pool=SessionConfiguration().get_dataset_pool() ) attr2_times_2 = ds.get_attribute('attr2_times_2') attr2 = ds.get_attribute('attr2') self.assert_(ma.allequal(attr2*2, attr2_times_2))
def test_focalsum(self): lo = FocalSum(self.l1, buffersize=1) res = np.asarray([[4,6,6,4],[6,9,9,6],[6,9,9,6],[4,6,6,4]]) self.assertTrue(allequal(lo._data, res)) lo = FocalSum(self.l1, buffersize=2) res = np.asarray([[9,12,12,9],[12,16,16,12],[12,16,16,12],[9,12,12,9]]) self.assertTrue(allequal(lo._data, res))
def test_simple_lag_variable(self): test_data = { 1000:{ 'tests':{ 'id':array([1,2,3]), 'attr1':array([10,20,30]), }, }, 1001:{ 'tests':{ 'id':array([1,2,3]), 'attr1':array([111,222,333]), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self._temp_dir, test_data) SimulationState().set_current_time(1001) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=['opus_core'], in_storage=attribute_cache) ds = Dataset(in_storage = attribute_cache, in_table_name = 'tests', id_name = ['id'], dataset_name = 'tests') ds.compute_variables(['opus_core.tests.attr1']) self.assert_(ma.allequal(ds.get_attribute('attr1'), array([111,222,333]))) ds.compute_variables(['opus_core.tests.attr1_lag1']) self.assert_(ma.allequal(ds.get_attribute('attr1_lag1'), array([10,20,30])))
def test_change_three_elements(self): """3 values are in common - change them to -1. Other attributes stay unchanged.""" data = { 'my_id': array([1,2,3,4,5]), 'attr': array([10,2,3,50,2]), 'attr2': array([4,3,2,5,3]) } data2 = { 'attr': array([2,6,7,3]) } storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='dataset', table_data=data) dataset = Dataset(in_storage = storage, in_table_name='dataset', id_name='my_id' ) storage.write_table(table_name='dataset2', table_data=data2) dataset2 = Dataset(in_storage = storage, in_table_name='dataset2', id_name='attr' ) JoinAttributeModificationModel().run(dataset,dataset2, value=-1) self.assertEqual(ma.allequal(dataset.get_attribute('attr'), array([10,-1,-1,50,-1])), True) self.assertEqual(ma.allequal(dataset.get_attribute('attr2'), data['attr2']), True)
def test(self): opus_core_path = OpusPackage().get_opus_core_path() dbf_directory = os.path.join( opus_core_path, 'tests', 'data', 'dbf') table_name = 'test_logical' cache_directory = self._temp_dir year = 1000 exporter = ExportDbfTableToCacheCommand( dbf_directory = dbf_directory, table_name = table_name, cache_directory = cache_directory, year = year, ) exporter.execute() attribute_cache = AttributeCache(cache_directory=cache_directory) old_time = SimulationState().get_current_time() SimulationState().set_current_time(year) values = attribute_cache.load_table(table_name) self.assertEqual(set(['keyid', 'works']), set(values.keys())) self.assert_(ma.allequal(array([1,2,3,4,5]), values['keyid'])) self.assert_(ma.allequal(array([1,1,-1,0,0]), values['works'])) SimulationState().set_current_time(old_time)
def test_agent_times_choice(self): expression = 'agent_x_choice.agent_times_choice(attr)' storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='agents', table_data={'id': array([1, 2, 3, 4, 5]), 'attr_2': array([3, 2, 4, 10, 20]), 'attr_3': array([10, 100, 1000, 500, 0]), 'attr_4': array([100, 500, 0, 20, -30]) } ) storage.write_table(table_name='choices', table_data={'id': array([1, 2, 3, 4])} ) agents = Dataset(in_storage=storage, in_table_name='agents', dataset_name='agent', id_name='id') choices = Dataset(in_storage=storage, in_table_name='choices', dataset_name='choice', id_name='id') ids = InteractionDataset(dataset1=agents, dataset2=choices, index1=array([0,1,3,4]), index2=array([1,2,3])) result = ids.compute_variables(expression) should_be = array([[3, 10, 100], [2,100,500], [10,500, 20], [20, 0, -30]]) self.assertEqual(ma.allequal(result, should_be), True) agents.touch_attribute('attr_2') # in order to recompute the expression choices.add_primary_attribute(name='name', data=array(['bus', 'car', 'tran', 'walk'])) agents.add_primary_attribute(name='attr_tran', data=array([100, 1000, 10000, 5000,10])) result = ids.compute_variables(expression) should_be = array([[3, 100, 100], [2,1000,500], [10,5000, 20], [20, 10, -30]]) self.assertEqual(ma.allequal(result, should_be), True)
def test_transformation(self): from numpy import array, ma a = array([9, 4, 25, 36]) self.assertEqual(ma.allequal(try_transformation(a, "sqrt"), array([3, 2, 5, 6])), True) self.assertEqual(ma.allequal(try_transformation(a, "*2"), array([18, 8, 50, 72])), True)
def test_focalmaximum(self): lo = FocalMaximum(self.l1,self.l2, buffersize=0) self.assertTrue(allequal(lo._data, self.l2._data)) lo = FocalMaximum(self.l1,self.l2, buffersize=2) self.assertTrue(allequal(lo._data, self.l2._data)) lo = FocalMaximum(self.l1,self.l2, buffersize=2,decomposition=columndecomposition) self.assertTrue(allequal(lo._data, self.l2._data))
def test_ZonalMinimum_exec(self): lo=ZonalMinimum_exec(self.l12,self.l13) res=np.asarray([[1,1,1,2],[2,2,1,1],[1,1,1,1],[1,1,1,1]]) self.assertTrue(allequal(lo._data,res)) lo=ZonalMinimum_exec(self.l14,self.l13) res=np.asarray([[-5,-5,-5,-6],[-6,-6,-3,-3],[-3,-7,-7,-7],[-9,-9,-7,-9]]) self.assertTrue(allequal(lo._data,res))
def test_ZonalMaximum_exec(self): lo=ZonalMaximum_exec(self.l12,self.l13) res=np.asarray([[5,5,5,6],[6,6,3,3],[3,7,7,7],[9,9,7,9]]) self.assertTrue(allequal(lo._data,res)) lo=ZonalMaximum_exec(self.l14,self.l13) res=np.asarray([[-1,-1,-1,-2],[-2,-2,-1,-1],[-1,-1,-1,-1],[-1,-1,-1,-1]]) self.assertTrue(allequal(lo._data,res))
def test_add_nothing(self): projects = None m = AddProjectsToBuildings() m.run(projects, self.buildings) self.assertEqual(ma.allequal(self.buildings.get_attribute("residential_units"), array(10*[200, 0, 0])), True) self.assertEqual(ma.allequal(self.buildings.get_attribute("commercial_job_spaces"), array(10*[0, 100, 0])), True) self.assertEqual(ma.allequal(self.buildings.get_attribute("industrial_job_spaces"), array(10*[0, 0, 100])), True)
def test_focalmean_np_exec(self): lo = FocalMean_np_exec(self.l1, buffersize=1) res = np.asarray([[1]*4]*4) self.assertTrue(allequal(lo._data, res)) lo = FocalMean_np_exec(self.l2, buffersize=3,decomposition=columndecomposition) res = np.asarray([[2]*4]*4) self.assertTrue(allequal(lo._data, res))
def test_create(self): proposals = self.dataset_pool.get_dataset("development_project_proposal") where_valid_units = where(proposals.get_attribute("units_proposed") > 0)[0] self.assert_(ma.allequal(self.dataset.get_id_attribute(), proposals.get_id_attribute()[where_valid_units])) self.assert_(ma.allequal(self.dataset.get_attribute("parcel_id"), proposals.get_attribute("parcel_id")[where_valid_units])) self.assert_(ma.allequal(self.dataset.get_attribute("template_id"), proposals.get_attribute("template_id")[where_valid_units]))
def test_ZonalSum_exec(self): lo=ZonalSum_exec(self.l12,self.l13) res=np.asarray([[9,9,9,12],[12,12,6,6],[6,20,20,20],[18,18,20,18]]) self.assertTrue(allequal(lo._data,res)) lo=ZonalSum_exec(self.l14,self.l13) #To ensure ZonalSum operation by executor gives the correct output with negative values res=np.asarray([[-9,-9,-9,-12],[-12,-12,-6,-6],[-6,-20,-20,-20],[-18,-18,-20,-18]]) self.assertTrue(allequal(lo._data,res))
def test_zonalsum(self): lo=zonalsum(self.l12,self.l13) res=np.asarray([[9,9,9,12],[12,12,6,6],[6,20,20,20],[18,18,20,18]]) self.assertTrue(allequal(lo._data,res)) #To check ZonalSum operation with negative zonal values lo=zonalsum(self.l14,self.l13) res=np.asarray([[-9,-9,-9,-12],[-12,-12,-6,-6],[-6,-20,-20,-20],[-18,-18,-20,-18]]) self.assertTrue(allequal(lo._data,res))
def test_zonalmean(self): lo=zonalmean(self.l12,self.l13) res=np.asarray([[3,3,3,4],[4,4,2,2],[2,5,5,5],[6,6,5,6]]) self.assertTrue(allequal(lo._data,res)) #To check ZonalMean operation with negative zonal values lo=zonalmean(self.l14,self.l13) res=np.asarray([[-3,-3,-3,-4],[-4,-4,-2,-2],[-2,-5,-5,-5],[-6,-6,-5,-6]]) self.assertTrue(allequal(lo._data,res))
def test_zonalmaximum(self): lo=zonalmaximum(self.l12,self.l13) res=np.asarray([[5,5,5,6],[6,6,3,3],[3,7,7,7],[9,9,7,9]]) self.assertTrue(allequal(lo._data,res)) #To check ZonalMaximum operation with negative zonal values lo=zonalmaximum(self.l14,self.l13) res=np.asarray([[-1,-1,-1,-2],[-2,-2,-1,-1],[-1,-1,-1,-1],[-1,-1,-1,-1]]) self.assertTrue(allequal(lo._data,res))
def test_zonalminimum(self): lo=zonalminimum(self.l12,self.l13) res=np.asarray([[1,1,1,2],[2,2,1,1],[1,1,1,1],[1,1,1,1]]) self.assertTrue(allequal(lo._data,res)) #To check ZonalMinimum operation with negative zonal values lo=zonalminimum(self.l14,self.l13) res=np.asarray([[-5,-5,-5,-6],[-6,-6,-3,-3],[-3,-7,-7,-7],[-9,-9,-7,-9]]) self.assertTrue(allequal(lo._data,res))
def test_flush_dataset_correct_data(self): job_set = Dataset(self.job_set_resources, dataset_name="jobs") job_set.add_attribute(self.job_id, "job_id", metadata=AttributeType.PRIMARY) job_set.add_attribute(self.expected_sic_data, "sic", metadata=AttributeType.COMPUTED) job_set.flush_dataset() returned_sic_data = job_set.get_attribute("sic") returned_id_data = job_set.get_attribute("job_id") self.assert_(ma.allequal(returned_id_data,self.job_id)) self.assert_(ma.allequal(returned_sic_data,self.expected_sic_data))
def test_focalminimum_np(self): lo = FocalMinimum_np(self.l1,self.l2, buffersize=0) self.assertTrue(allequal(lo._data, self.l1._data)) lo = FocalMinimum_np(self.l1,self.l2, buffersize=2) self.assertTrue(allequal(lo._data, self.l1._data)) lo1=FocalMinimum_np(self.l1,lo, buffersize=1) self.assertTrue(allequal(lo1._data,lo._data)) lo1=FocalMaximum(self.l1,lo, buffersize=1,decomposition=columndecomposition) self.assertTrue(allequal(lo1._data,lo._data))
def test_ZonalMinority_exec(self): lo=ZonalMinority_exec(self.l12,self.l13) res=np.asarray([[1,1,1,2],[2,2,1,1],[1,1,1,1],[1,1,1,1]]) self.assertTrue(allequal(lo._data,res)) l12 = lst_to_layer([[5,5,5,6],[4,2,2,2],[3,6,6,6],[9,9,9,1]]) l13 = lst_to_layer([[1,1,1,1],[2,2,2,2],[3,3,3,3],[4,4,4,4]]) lo=ZonalMinority_exec(l12,l13) res=np.asarray([[6,6,6,6],[4,4,4,4],[3,3,3,3],[1,1,1,1]]) self.assertTrue(allequal(lo._data,res))
def test_clip_to_zero_if_needed(self): from numpy import array, ma logger.enable_hidden_error_and_warning_words() result = clip_to_zero_if_needed(array([0,3,6,-4,8]), "test1") logger.disable_hidden_error_and_warning_words() self.assertEqual(ma.allequal(result, array([0,3,6,0,8])), True, msg = "Error in test_clip_to_zero_if_needed" ) result = clip_to_zero_if_needed(array([0,3,6,4,8.5]), "test2")
def test_focalMajority(self): lo = FocalMajority(self.l1, buffersize=0) self.assertTrue(allequal(lo._data, self.l1._data)) lo = FocalMajority(self.l1, buffersize=1) self.assertTrue(allequal(lo._data, self.l1._data)) res = np.asarray([[1,1,2,3],[1,1,2,3],[1,1,2,2],[1,1,3,2]]) lo = FocalMajority(self.l11, buffersize=1) self.assertTrue(allequal(res,lo._data)) lo = FocalMajority(self.l11, buffersize=1,decomposition=columndecomposition) self.assertTrue(allequal(res,lo._data))
def test_my_inputs(self): number_of_commercial_jobs = array([12, 39, 0, 10]) commercial_sqft = array([1200, 16, 3900, 15]) commercial_sqft_per_job = array([20, 3, 30, 0]) values = VariableTestToolbox().compute_variable(self.variable_name, { "zone": { "number_of_commercial_jobs": number_of_commercial_jobs, "buildings_commercial_sqft": commercial_sqft, "commercial_sqft_per_job": commercial_sqft_per_job } }, dataset="zone") should_be = array([1200 / 20.0 - 12, 0, 3900 / 30.0, 0]) self.assertEqual(ma.allequal(values, should_be), True, msg="Error in " + self.variable_name)
def test_load_specification_with_definition_with_implicit_coefficients( self): """Coeficient names should be aliases of the variables.""" specification = { "_definition_": [ "urbansim.gridcell.population", "urbansim.gridcell.average_income", "urbansim.gridcell.is_near_arterial", "lage = ln(urbansim.gridcell.average_age+1)", ], 1: ["population", "average_income", "lage"], 2: [ "is_near_arterial", ("urbansim.gridcell.is_near_highway", "BHWY"), ], } result = load_specification_from_dictionary(specification) vars = result.get_variable_names() coefs = result.get_coefficient_names() subm = result.get_submodels() self.assert_( alltrue(coefs == array([ "population", "average_income", "lage", "is_near_arterial", "BHWY" ])), msg= "Error in test_load_specification_with_definition_with_implicit_coefficients (coefficients)" ) self.assert_( alltrue(vars == array([ "population", "average_income", "lage", "is_near_arterial", "is_near_highway" ])), msg= "Error in test_load_specification_with_definition_with_implicit_coefficients (variables)" ) self.assert_( ma.allequal(subm, array([1, 1, 1, 2, 2])), msg= "Error in test_load_specification_with_definition_with_implicit_coefficients (submodels)" ) # test data type self.assert_(subm.dtype.name == "int16", msg="Error in data type of submodels.")
def test_load_specification_with_definition_with_equations(self): specification = { "_definition_": [ "pop = urbansim.gridcell.population", "inc = urbansim.gridcell.average_income", "art = urbansim.gridcell.is_near_arterial", ], -2: { "equation_ids": (1, 2), "pop": ("bpop", 0), "inc": (0, "binc"), "art": ("bart", 0), "constant": ("asc", 0) } } result = load_specification_from_dictionary(specification) vars = result.get_variable_names() coefs = result.get_coefficient_names() eqs = result.get_equations() lvars = result.get_long_variable_names() self.assert_( alltrue(coefs == array(["asc", "bart", "bpop", "binc"])), msg= "Error in test_load_specification_with_definition_with_equations (coefficients)" ) self.assert_( alltrue(vars == array(["constant", "art", "pop", "inc"])), msg= "Error in test_load_specification_with_definition_with_equations (variables)" ) self.assert_( ma.allequal(eqs, array([1, 1, 1, 2])), msg= "Error in test_load_specification_with_definition_with_equations (equations)" ) self.assert_( alltrue(lvars == array([ "constant", "art = urbansim.gridcell.is_near_arterial", "pop = urbansim.gridcell.population", "inc = urbansim.gridcell.average_income" ])), msg= "Error in test_load_specification_with_definition_with_equations (long names of variables)" )
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') parcels_table_name = 'parcels' storage.write_table( table_name=parcels_table_name, table_data={ 'parcel_id':array([1,2,3,4,5]), 'grid_id':array([1, 2, 1, 4, 3]) }, ) parcels = ParcelDataset(in_storage=storage, in_table_name=parcels_table_name) city_storage = StorageFactory().get_storage('dict_storage') cities_table_name = 'cities' city_storage.write_table( table_name=cities_table_name, table_data={ 'city_id':array([63000, 56000, 99999]), 'city_name':array(["seattle", "bellevue", "skykomish"]) }, ) cities = CityDataset(in_storage=city_storage, in_table_name=cities_table_name) values = VariableTestToolbox().compute_variable(self.variable_name, data_dictionary = { 'parcel':parcels, 'city':cities, 'gridcell':{ 'grid_id':array([1, 2, 3, 4]), 'city_id':array([63000, 56000, 0, 63000])} }, dataset = 'parcel' ) should_be = array([True, False, True, True, False]) self.assert_(ma.allequal(values, should_be), 'Error in ' + self.variable_name)
def test_my_inputs(self): grid_id = array([2, 1, 3]) faz_id = array([4, 5, 6]) values = VariableTestToolbox().compute_variable( self.variable_name, { "development_project": { "grid_id": grid_id }, "gridcell": { "faz_id": faz_id } }, dataset="development_project") should_be = array([5, 4, 6]) self.assertEqual(ma.allequal(values, should_be), True, msg="Error in " + self.variable_name)
def test_my_inputs(self): grid_id = array([2, 1, 3]) faz_id = array([4, 5, 6]) values = VariableTestToolbox().compute_variable(self.variable_name, { "household": { "grid_id": grid_id }, "gridcell": { "grid_id": array([1, 2, 3], dtype="int32"), "faz_id": faz_id } }, dataset="household") should_be = array([5, 4, 6]) self.assertEqual(ma.allequal(values, should_be), True, msg="Error in " + self.variable_name)
def test_my_inputs(self): values = VariableTestToolbox().compute_variable( self.variable_name, { "land_cover": { #"lct":array([1, 2, 3]), "devgrid_id": array([1, 1, 2, -9999]) }, "gridcell": { "grid_id": array([1, 2, 3]), "development_type_id": array([1, 5, 3]) } }, dataset="land_cover") should_be = array([1, 1, 5, -9999]) self.assert_(ma.allequal(values, should_be), msg="Error in " + self.variable_name)
def test_person_dataset(self): households_data = { "household_id":arange(4)+1, "building_id": array([3,6,1,2], dtype=int32), "persons": array([1,2,2,4], dtype=int32) } household_characteristics_for_ht_data = { "characteristic": array(2*['persons']), "min": array([1, 3]), "max": array([2,-1]) } person_data = { "person_id": arange(9)+1, "household_id": array([1,2,2,3,3,4,4,4,4]), "job_id": array([30, 50, 0, 1, 23, 54, 78, 2, 6]), } annual_household_control_totals_data = { "year": array(2*[2000]), "persons": array([0,1]), "total_number_of_households": array([0, 4]) } storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='hh_set', table_data=households_data) hh_set = HouseholdDataset(in_storage=storage, in_table_name='hh_set') storage.write_table(table_name='prs_set', table_data=person_data) prs_set = PersonDataset(in_storage=storage, in_table_name='prs_set') storage.write_table(table_name='hct_set', table_data=annual_household_control_totals_data) hct_set = ControlTotalDataset(in_storage=storage, in_table_name='hct_set', what="household", id_name=["year", "persons"]) storage.write_table(table_name='hc_set', table_data=household_characteristics_for_ht_data) hc_set = HouseholdCharacteristicDataset(in_storage=storage, in_table_name='hc_set') model = HouseholdTransitionModel(debuglevel=3) model.run(year=2000, person_set=prs_set, household_set=hh_set, control_totals=hct_set, characteristics=hc_set) # The run should remove the first three households and first 5 persons and add 3 copies of the last household, i.e. 12 persons self.assertEqual(prs_set.size(), 16, "Error in size of the person_set. Should be 16, is %s." % prs_set.size()) self.assertEqual(ma.allequal(prs_set.get_attribute('household_id'), array([4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7])), True, "Error in assigning household_id to new persons.")
def test_my_inputs(self): #declare an array of four locations, each with the specified sector ID below commercial_sqft = array([1000, 500, 5000, 233]) commercial_sqft_per_job = array([20, 0, 100, 33]) values = VariableTestToolbox().compute_variable(self.variable_name, { "zone": { "buildings_commercial_sqft": commercial_sqft, "commercial_sqft_per_job": commercial_sqft_per_job } }, dataset="zone") #notice that the computation code above purposely truncates decimal results, #which makes sense because fractions of jobs don't exist should_be = array([50.0, 0.0, 50.0, 7.0]) self.assertEqual(ma.allequal(values, should_be), True, msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table( table_name='gridcells', table_data={ 'grid_id': array([1,2,3]), 'is_in_development_type_group_mixed_use': array([0,1,1]), } ) storage.write_table( table_name='households', table_data={ 'household_id': array([1,2,3,4,5,6]), 'is_young': array([1,0,1,0,0,1]) } ) storage.write_table( table_name='urbansim_constants', table_data={ "young_age": array([30]), } ) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) household_x_gridcell = dataset_pool.get_dataset('household_x_gridcell') household_x_gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household_x_gridcell.get_attribute(self.variable_name) should_be = array([[0,1,1], [0,0,0], [0,1,1], [0,0,0], [0,0,0], [0,1,1]]) self.assert_(ma.allequal(values, should_be), msg="Error in " + self.variable_name)
def test_my_inputs(self): """Total number of residential of buildings. """ values = VariableTestToolbox().compute_variable(self.variable_name, data_dictionary = { 'zone':{ 'zone_id':array([1,2,3]), }, 'building': { 'is_building_type_residential':array([1,0,1,0,1,1]), 'zone_id':array([2,3,1,1,2,1]), 'building_size':array([100, 350, 1000, 0, 430, 95]) }, }, dataset = 'zone' ) should_be = array([1095, 530, 0]) self.assert_(ma.allequal(values, should_be), 'Error in ' + self.variable_name)
def test_lottery_choices_without_index_with_capacity_constrains_equal_units( self): probabilities = array(10 * [[0.3, 0.2, 0.1, 0.4]] + 10 * [[0.25, 0.25, 0.25, 0.25]] + 10 * [[0.7, 0.1, 0.1, 0.1]]) capacity = array([10, 3, 5, 100000]) units = array(30 * [1], dtype="int32") resources = Resources({"capacity": capacity, "agents_units": units}) def run(): lottery = lottery_choices() choices = lottery.run(probabilities, resources=resources) return lottery.get_choice_histogram(units, choices, capacity.size) for i in range(10): result = run() self.assertEqual( ma.allequal(result <= array([10, 3, 5, 100000]), array([True, True, True, True])), True, msg="Error in lottery_choices when capacity constrains.")
def test_my_inputs(self): number_of_commercial_jobs = array([12, 39]) commercial_sqft = array([1200, 16, 3900, 15]) commercial_sqft_per_job = array([20, 3, 30, 0]) values = VariableTestToolbox().compute_variable(self.variable_name, { "zone": { "number_of_commercial_jobs": number_of_commercial_jobs }, "gridcell": { "commercial_sqft": commercial_sqft, "commercial_sqft_per_job": commercial_sqft_per_job, "zone_id": array([1, 1, 2, 2]) } }, dataset="zone") should_be = array([93, 222]) self.assertEqual(ma.allequal(values, should_be), True, msg="Error in " + self.variable_name)
def test_full_tree(self): """This is an "interaction variable", i.e. it depends on both the gridcell and household datasets, and thus, it is in the household_x_gridcell "category".""" #declare five households, three of which whose age of head is 40, 4th is 50, 5th is 35 age_of_head = array([40, 40, 40, 50, 35]) #declare four gridcells, two of which are in Forecast Analysis Zone #1, and others in FAZ #2, #3 gridcell_faz_id = array([1, 3, 2, 1]) #assign gridcell id's to the five households. here, households #1 and #3 are in the same gridcell household_grid_id = array([2, 1, 2, 4, 4]) #Imagine a 5x4 grid, where the origin is at the upper left. on the vertical axis are the five households, #and on the horizontal axis are the four gridcells, numbered 1-4 (grid id). #fill in each square by answering the question: #for the age of this row's household, how many other households in that column's gridcell faz are #have the same age? if the row's household lives in that faz, include it in the count. values = VariableTestToolbox().compute_variable( self.variable_name, { "gridcell": { "faz_id": gridcell_faz_id }, "household": { "age_of_head": age_of_head, "grid_id": household_grid_id }, "faz": { "faz_id": array([1, 2, 3]) } }, dataset="household_x_gridcell") #what the result says here is that for household #1 (age = 20), there is 1 household of the same age #in gridcelll #1's FAZ. there are 2 households of the same age in gridcell #2's FAZ... should_be = array([[1, 2, 0, 1], [1, 2, 0, 1], [1, 2, 0, 1], [1, 0, 0, 1], [1, 0, 0, 1]]) self.assertEqual(ma.allequal(values, should_be), True, msg="Error in " + self.variable_name)
def test_load_specification_with_definition(self): specification = { "_definition_": [("urbansim.gridcell.population", "BPOP"), ("urbansim.gridcell.average_income", "BINC"), ("urbansim.gridcell.is_near_arterial", "BART"), ("lage = ln(urbansim.gridcell.average_age+1)", "BAGE"), ("constant", "C", 1.5)], 1: ["population", "average_income", "lage"], 2: [ "is_near_arterial", "constant", ("urbansim.gridcell.is_near_highway", "BHWY"), ], } result = load_specification_from_dictionary(specification) vars = result.get_variable_names() coefs = result.get_coefficient_names() subm = result.get_submodels() fixedval = result.get_fixed_values() self.assert_( alltrue( coefs == array(["BPOP", "BINC", "BAGE", "BART", "C", "BHWY"])), msg= "Error in test_load_specification_with_definition (coefficients)") self.assert_( alltrue(vars == array([ "population", "average_income", "lage", "is_near_arterial", "constant", "is_near_highway" ])), msg="Error in test_load_specification_with_definition (variables)") self.assert_( ma.allequal(subm, array([1, 1, 1, 2, 2, 2])), msg="Error in test_load_specification_with_definition (submodels)") self.assert_( ma.allclose(fixedval, array([0, 0, 0, 0, 1.5, 0])), msg= "Error in test_load_specification_with_definition (fixed_values)")
def test_create(self): proposals_components = self.dataset_pool.get_dataset( "development_project_proposal_component") template_ids = self.proposal_components.get_attribute("template_id") self.assert_( ma.allequal(self.proposal_components.size(), proposals_components.size())) self.assert_( self.proposal_components.get_attribute("proposal_id").sum(), proposals_components.get_attribute("proposal_id").sum()) self.assert_( (template_ids == 1).sum(), (proposals_components.get_attribute("template_id") == 1).sum()) self.assert_( (template_ids == 2).sum(), (proposals_components.get_attribute("template_id") == 2).sum()) self.assert_( (template_ids == 3).sum(), (proposals_components.get_attribute("template_id") == 3).sum()) self.assert_( self.proposal_components.get_attribute("component_id").sum(), proposals_components.get_attribute("component_id").sum())
def test_boundingbox_set_nparray(self): arr = np.asarray([[5] * 10] * 10) # Create a 10x10 array, each element has a value of 5 self.bb4.set_nparray(arr,1,-10) #print arr #b1.set_nparray(arr, 1, 0) self.assertTrue(allequal(self.bb4._data, arr)) self.assertEqual(self.bb4.cellsize,1) self.assertEqual(self.bb4.nodata_value,-10) self.assertEqual(self.bb4.data_structure,Datastructure.array) # Negative cellsize with self.assertRaises(PCMLInvalidInput): self.bb3.set_nparray(arr,-1,-10) # Zero cellsize with self.assertRaises(PCMLInvalidInput): self.bb3.set_nparray(arr,0,-10) # None cellsize with self.assertRaises(PCMLInvalidInput): self.bb3.set_nparray(arr,None,-10) # None array nonearray=None with self.assertRaises(PCMLInvalidInput): self.bb2.set_nparray(nonearray,1,-10)
def test(self): cache_dir = os.path.join(self.temp_dir, 'cache') data = { self._id_name: array([1, 2, 3]), 'population': array([10, 20, 30]), } self._write_data_to_year(data, cache_dir, 2000) data = { self._id_name: array([1, 2, 3]), 'population': array([11, 21, 31]), } self._write_data_to_year(data, cache_dir, 2001) data = { self._id_name: array([1, 2, 3]), 'population': array([12, 23, 34]), } self._write_data_to_year(data, cache_dir, 2002) attribute_cache = AttributeCache(cache_directory=cache_dir) SimulationState(new_instance=True, base_cache_dir=self.temp_dir) SimulationState().set_cache_directory(cache_dir) SessionConfiguration(new_instance=True, in_storage=attribute_cache) SimulationState().set_current_time(2002) dataset_pool_2002 = DatasetPool(package_order=['urbansim'], storage=attribute_cache) dataset = dataset_pool_2002.get_dataset(self._dataset_name) variable_name = '%s.%s.absolute_population_difference_from_2000' % ( self._package_name, self._dataset_name) dataset.compute_variables([variable_name], dataset_pool=dataset_pool_2002) pop_2002 = dataset.get_attribute(variable_name) self.assert_(ma.allequal(pop_2002, array([2, 3, 4])))
def test_load_specification_with_definition_nests(self): specification = { "_definition_": [ ("pop = urbansim.gridcell.population", "bpop"), "inc = urbansim.gridcell.average_income", "art = urbansim.gridcell.is_near_arterial", ], -2: { 'name': 'nest_id', 1: ["pop", "inc", "constant"], 2: ["art"] } } result = load_specification_from_dictionary(specification) vars = result.get_variable_names() coefs = result.get_coefficient_names() other = result.get_other_fields() self.assert_( alltrue(coefs == array([ "bpop", "inc", "constant", "art", ])), msg= "Error in test_load_specification_with_definition_nests (coefficients)" ) self.assert_( alltrue(vars == array(["pop", "inc", "constant", "art"])), msg= "Error in test_load_specification_with_definition_nests (variables)" ) self.assert_( ma.allequal(other['dim_nest_id'], array([1, 1, 1, 2])), msg="Error in test_load_specification_with_definition_nests (nests)" )
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') building_types_table_name = 'building_types' storage.write_table(table_name=building_types_table_name, table_data={ 'building_type_id': array([1, 2]), 'name': array(['residential', 'commercial']), 'units': array(['residential_units', 'commercial_sqft']) }) building_types = BuildingTypeDataset( in_storage=storage, in_table_name=building_types_table_name) values = VariableTestToolbox().compute_variable( self.variable_name, data_dictionary={ 'zone': { 'zone_id': array([1, 2]) }, 'gridcell': { 'developable_maximum_commercial_sqft': array([1200, 16, 3900, 15]), 'zone_id': array([1, 1, 2, 2]) }, 'building_type': building_types }, dataset='zone') should_be = array([1216, 3915]) self.assert_(ma.allequal(values, should_be), 'Error in ' + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='households', table_data={ 'household_id': array([1, 2, 3, 4]), 'income': array([45000, 50000, 75000, 100000]), }) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) dataset_pool._add_dataset('urbansim_constant', MockConstant()) household = dataset_pool.get_dataset('household') household.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = household.get_attribute(self.variable_name) should_be = array([1, 1, 0, 0]) self.assert_(ma.allequal( values, should_be, ), msg="Error in " + self.variable_name)
def test_my_inputs(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='jobs', table_data={ 'job_id': array([1, 2, 3, 4]), 'sector_id': array([1, 3, 2, 3]), }) storage.write_table(table_name='employment_sectors', table_data={ 'sector_id': array([1, 2]), 'name': array(["basic", "retail"]), }) storage.write_table(table_name='employment_adhoc_sector_groups', table_data={ 'group_id': array([1, 2]), 'name': array(["basic", "retail"]), }) storage.write_table( table_name='employment_adhoc_sector_group_definitions', table_data={ 'sector_id': array([1, 2]), 'group_id': array([1, 2]), }) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) dataset_pool._add_dataset('employment_sector', mock_employmentsector()) job = dataset_pool.get_dataset('job') job.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = job.get_attribute(self.variable_name) should_be = array([True, False, False, False]) self.assert_(ma.allequal(values, should_be), msg="Error in " + self.variable_name)
def test_at_year_2000(self): cache_dir = os.path.join(self.temp_dir, 'cache') data = { self._id_name: array([1, 2, 3]), 'population': array([10, 20, 30]), } self._write_data_to_year(data, cache_dir, 2000) attribute_cache = AttributeCache(cache_directory=cache_dir) SimulationState(new_instance=True, base_cache_dir=self.temp_dir) SimulationState().set_cache_directory(cache_dir) SessionConfiguration(new_instance=True, in_storage=attribute_cache) SimulationState().set_current_time(2000) dataset_pool_2000 = DatasetPool(package_order=['urbansim'], storage=attribute_cache) dataset = dataset_pool_2000.get_dataset(self._dataset_name) variable_name = 'sanfrancisco.%s.percent_population_difference_from_2000_max_9999' % self._dataset_name dataset.compute_variables([variable_name], dataset_pool=dataset_pool_2000) pop_2000 = dataset.get_attribute(variable_name) self.assert_(ma.allequal(pop_2000, array([0, 0, 0])))
def test(self): storage = StorageFactory().get_storage('dict_storage') storage.write_table(table_name='zones', table_data={ 'zone_id': array([1, 2, 3, 4, 5]), }) storage.write_table(table_name='buildings', table_data={ 'building_id': array([1, 2, 3, 4, 5, 6]), 'zone_id': array([1, 2, 3, 4, 2, 2]), }) dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage) gridcell = dataset_pool.get_dataset('zone') gridcell.compute_variables(self.variable_name, dataset_pool=dataset_pool) values = gridcell.get_attribute(self.variable_name) should_be = array([1, 3, 1, 1, 0]) self.assert_(ma.allequal(values, should_be), msg="Error in " + self.variable_name)
def test_add_three_projects(self): project_data = { 'project_id': arange(1, 12), 'zone_id': array([3, 5, 7, 8, 10] + range(1, 5) + [2, 10]), 'building_type_id': array([1] * 5 + [2] * 4 + [3] * 2), 'residential_units': array([100, 300, 1, 50, 6] + 4 * [0] + 2 * [0]), 'commercial_job_spaces': array(5 * [0] + 3 * [20] + [5] + 2 * [0]), 'industrial_job_spaces': array(5 * [0] + 4 * [0] + [50, 30]), } projects = self.get_projects(project_data) m = AddProjectsToBuildings() m.run(projects, self.buildings, quantity_attribute_names=[ "residential_units", "commercial_job_spaces", "industrial_job_spaces" ]) self.assertEqual( ma.allequal( self.buildings.get_attribute("residential_units"), array([ 200, 0, 0, 200, 0, 0, 300, 0, 0, 200, 0, 0, 500, 0, 0, 200, 0, 0, 201, 0, 0, 250, 0, 0, 200, 0, 0, 206, 0, 0, ])), True) self.assertEqual( ma.allequal(self.buildings.get_attribute("commercial_job_spaces"), array(3 * [0, 120, 0] + [0, 105, 0] + 6 * [0, 100, 0])), True) self.assertEqual( ma.allequal( self.buildings.get_attribute("industrial_job_spaces"), array([0, 0, 100, 0, 0, 150] + 7 * [0, 0, 100] + [0, 0, 130])), True)
def test_LocalMult_np(self): lo = LocalMult_np(self.l1, self.l2) self.assertTrue(allequal(lo._data, self.l2._data)) lo = LocalMult_np(self.l2, self.l3) self.assertTrue(allequal(lo._data, [[10] * 4] * 4))
def test_add(self): lo = self.l1 + self.l2 self.assertTrue(allequal(lo._data, [[3] * 4] * 4))
def test_to_4(self): values = self.get_values(3, 4) should_be = array([7, 9]) self.assert_(ma.allequal(values, should_be), msg="Error in " + self.variable_name)
def test_to_2(self): values = self.get_values(2, 2) should_be = array([0, 10]) self.assert_(ma.allequal(values, should_be), msg="Error in " + self.variable_name)
def test_mult(self): lo = self.l1 * self.l2 self.assertTrue(allequal(lo._data, self.l2._data)) lo = self.l2 * self.l3 self.assertTrue(allequal(lo._data, [[10] * 4] * 4))