def test_versioning_with_disaggregate(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
         table_data={
             'id':array([1,2,3,4]),
             'id2':array([1,2,1,2])
             }
         )
     storage.write_table(table_name='faz',
         table_data={
             'my_variable':array([4,8]), 
             'id2':array([1,2])
             }
         )
     ds = Dataset(in_storage=storage, in_table_name='zones', id_name="id", dataset_name="myzone")
     ds2 = Dataset(in_storage=storage, in_table_name='faz', id_name="id2", dataset_name="myfaz")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     var = "my_var = myzone.disaggregate(10.0*myfaz.my_variable)"
     ds.modify_attribute("id2", array([2,1,2,1])) # should have version 1
     ds.compute_variables([var], dataset_pool=dataset_pool)
     self.assert_(ds.get_version("my_var")==0)
     ds.compute_variables([var], dataset_pool=dataset_pool)
     self.assert_(ds.get_version("my_var")==0) # version should stay the same, i.e. it should not recompute
     ds.touch_attribute("id2") # has version 2
     ds.compute_variables([var], dataset_pool=dataset_pool)
     self.assert_(ds.get_version("my_var")==1) # version should be 1, i.e. it should recompute when id changes
 def test_versioning_with_aggregate(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='households',
                         table_data={
                             'my_variable': array([4, 8, 2, 1, 40, 23, 78]),
                             'id0': arange(7) + 1,
                             'id1': array([1, 3, 1, 2, 3, 2, 1])
                         })
     storage.write_table(table_name='fazes',
                         table_data={
                             'id1': array([1, 2, 3]),
                             'id2': array([1, 2, 1])
                         })
     storage.write_table(table_name='fazdistr',
                         table_data={'id2': array([1, 2])})
     ds0 = Dataset(in_storage=storage,
                   in_table_name='households',
                   id_name="id0",
                   dataset_name="myhousehold")
     ds1 = Dataset(in_storage=storage,
                   in_table_name='fazes',
                   id_name="id1",
                   dataset_name="myfaz")
     ds2 = Dataset(in_storage=storage,
                   in_table_name='fazdistr',
                   id_name="id2",
                   dataset_name="myfazdistr")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myhousehold', ds0)
     dataset_pool._add_dataset('myfaz', ds1)
     dataset_pool._add_dataset('myfazdistr', ds2)
     ds0.modify_attribute("id1", array([1, 3, 1, 2, 3, 2,
                                        1]))  # has version 1
     variable = 'my_var = myfazdistr.aggregate(10.0*myhousehold.my_variable, intermediates=[myfaz])'
     ds2.compute_variables([variable], dataset_pool=dataset_pool)
     self.assert_(ds2.get_version("my_var") == 0)
     ds2.compute_variables([variable], dataset_pool=dataset_pool)
     self.assert_(
         ds2.get_version("my_var") ==
         0)  # version should stay the same, i.e. it should not recompute
     ds0.touch_attribute("id1")  # has version 2
     ds2.compute_variables([variable], dataset_pool=dataset_pool)
     self.assert_(
         ds2.get_version("my_var") ==
         1)  # version should be 1, i.e. it should recompute when id changes
     ds1.touch_attribute("id2")  # has version 1
     ds2.compute_variables([variable], dataset_pool=dataset_pool)
     self.assert_(
         ds2.get_version("my_var") ==
         2)  # version should be 2, i.e. it should recompute when id changes
 def test_number_of_agents(self):
     expr = "mygridcell.number_of_agents(myjob)"
     storage = StorageFactory().get_storage('dict_storage')
     gridcell_grid_id = array([1, 2, 3])
     job_grid_id = array([2, 1, 3, 1]) #specify an array of 4 jobs, 1st job's grid_id = 2 (it's in gridcell 2), etc.
     storage.write_table(table_name='gridcells', table_data={'gid':gridcell_grid_id})
     storage.write_table(table_name='jobs', table_data={'jid':arange(4)+1, 'gid':job_grid_id})
     gs = Dataset(in_storage=storage, in_table_name='gridcells', id_name="gid", dataset_name="mygridcell")
     jobs = Dataset(in_storage=storage, in_table_name='jobs', id_name="jid", dataset_name="myjob")       
     values = gs.compute_variables([expr], resources=Resources({"myjob":jobs, "mygridcell":gs}))
     should_be = array([2, 1, 1])            
     self.assert_(ma.allclose(values, should_be, rtol=1e-7), msg = "Error in " + expr)
     # change gids of jobs (to test if computing dependencies is working)
     jobs.modify_attribute(name="gid", data=array([1,1,1,1]))
     values2 = gs.compute_variables([expr], resources=Resources({"myjob":jobs, "mygridcell":gs}))
     should_be2 = array([4, 0, 0])            
     self.assert_(ma.allclose(values2, should_be2, rtol=1e-7), msg = "Error in " + expr)
Example #4
0
 def test_alias_attribute_with_modification(self):
     # this tests an expression consisting of an alias for a primary attribute that is modified
     expr = "p = persons"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(
         table_name='tests',
         table_data={
             "persons":array([1,5,10]),
             "id":array([1,3,4])
             }
         )
     dataset = Dataset(in_storage=storage, in_table_name='tests', id_name="id", dataset_name="tests")
     # modify the primary attribute 'persons'
     new_values = array([3, 0, 100])
     dataset.modify_attribute('persons', new_values)
     # result should have the new values
     result = dataset.compute_variables([expr])
     self.assertEqual(ma.allclose(result, new_values, rtol=1e-7), True, msg="error in test_alias_attribute_with_modification")
Example #5
0
 def test_alias_attribute_with_modification(self):
     # this tests an expression consisting of an alias for a primary attribute that is modified
     expr = "p = persons"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='tests',
                         table_data={
                             "persons": array([1, 5, 10]),
                             "id": array([1, 3, 4])
                         })
     dataset = Dataset(in_storage=storage,
                       in_table_name='tests',
                       id_name="id",
                       dataset_name="tests")
     # modify the primary attribute 'persons'
     new_values = array([3, 0, 100])
     dataset.modify_attribute('persons', new_values)
     # result should have the new values
     result = dataset.compute_variables([expr])
     self.assertEqual(ma.allclose(result, new_values, rtol=1e-7),
                      True,
                      msg="error in test_alias_attribute_with_modification")
 def test_versioning_with_aggregate(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='households',
         table_data={
             'my_variable':array([4,8,2,1,40,23,78]), 
             'id0':arange(7)+1,
             'id1':array([1,3,1,2,3,2,1])
             }
         )
     storage.write_table(table_name='fazes',
         table_data={
             'id1':array([1,2,3]), 
             'id2':array([1,2,1])
             }
         )
     storage.write_table(table_name='fazdistr',
         table_data={
             'id2':array([1,2])
             }
         )
     ds0 = Dataset(in_storage=storage, in_table_name='households', id_name="id0", dataset_name="myhousehold")
     ds1 = Dataset(in_storage=storage, in_table_name='fazes', id_name="id1", dataset_name="myfaz")             
     ds2 = Dataset(in_storage=storage, in_table_name='fazdistr', id_name="id2", dataset_name="myfazdistr")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myhousehold', ds0)
     dataset_pool._add_dataset('myfaz',ds1)
     dataset_pool._add_dataset('myfazdistr',ds2)
     ds0.modify_attribute("id1", array([1,3,1,2,3,2,1])) # has version 1
     variable = 'my_var = myfazdistr.aggregate(10.0*myhousehold.my_variable, intermediates=[myfaz])'
     ds2.compute_variables([variable], dataset_pool=dataset_pool)
     self.assert_(ds2.get_version("my_var")==0)
     ds2.compute_variables([variable], dataset_pool=dataset_pool)
     self.assert_(ds2.get_version("my_var")==0) # version should stay the same, i.e. it should not recompute
     ds0.touch_attribute("id1") # has version 2
     ds2.compute_variables([variable], dataset_pool=dataset_pool)
     self.assert_(ds2.get_version("my_var")==1) # version should be 1, i.e. it should recompute when id changes
     ds1.touch_attribute("id2") # has version 1
     ds2.compute_variables([variable], dataset_pool=dataset_pool)
     self.assert_(ds2.get_version("my_var")==2) # version should be 2, i.e. it should recompute when id changes
Example #7
0
 def test_variable_dependencies_tree_with_versioning(self):
     storage = StorageFactory().get_storage('dict_storage')
     
     storage.write_table(
         table_name='tests',
         table_data={
             'id':array([2,4]), 
             'a_dependent_variable':array([4,7]),
             'a_dependent_variable2':array([10,1])
             }
         )
     
     ds = Dataset(in_storage=storage, in_table_name='tests', id_name='id', dataset_name='tests')
     
     ds.compute_variables(["opus_core.tests.a_test_variable_with_two_dependencies"])
     
     self.assert_(ds.get_version("a_test_variable_with_two_dependencies")==0) #initially version=0
     self.assert_(ds.get_version("a_dependent_variable")==0)
     self.assert_(ds.get_version("a_dependent_variable2")==0)
     
     ds.modify_attribute("a_dependent_variable", array([0,0]))
     self.assert_(ds.get_version("a_dependent_variable")==1) # version=1
     
     ds.modify_attribute("a_dependent_variable", array([1,1]))
     self.assert_(ds.get_version("a_dependent_variable")==2) # version=2
     
     ds.compute_variables(["opus_core.tests.a_test_variable_with_two_dependencies"])
     self.assert_(ds.get_version("a_test_variable_with_two_dependencies")==1)
     
     ds.compute_variables(["opus_core.tests.a_test_variable_with_two_dependencies"])
     self.assert_(ds.get_version("a_test_variable_with_two_dependencies")==1) # version does not change
     
     autogen_variable = "my_var = 3 * opus_core.tests.a_dependent_variable"
     ds.compute_variables([autogen_variable])
     self.assert_(ds.get_version("my_var")==0)
     ds.compute_variables([autogen_variable])
     self.assert_(ds.get_version("my_var")==0)
 def test_versioning_with_disaggregate(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'id': array([1, 2, 3, 4]),
                             'id2': array([1, 2, 1, 2])
                         })
     storage.write_table(table_name='faz',
                         table_data={
                             'my_variable': array([4, 8]),
                             'id2': array([1, 2])
                         })
     ds = Dataset(in_storage=storage,
                  in_table_name='zones',
                  id_name="id",
                  dataset_name="myzone")
     ds2 = Dataset(in_storage=storage,
                   in_table_name='faz',
                   id_name="id2",
                   dataset_name="myfaz")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     var = "my_var = myzone.disaggregate(10.0*myfaz.my_variable)"
     ds.modify_attribute("id2", array([2, 1, 2,
                                       1]))  # should have version 1
     ds.compute_variables([var], dataset_pool=dataset_pool)
     self.assert_(ds.get_version("my_var") == 0)
     ds.compute_variables([var], dataset_pool=dataset_pool)
     self.assert_(
         ds.get_version("my_var") ==
         0)  # version should stay the same, i.e. it should not recompute
     ds.touch_attribute("id2")  # has version 2
     ds.compute_variables([var], dataset_pool=dataset_pool)
     self.assert_(
         ds.get_version("my_var") ==
         1)  # version should be 1, i.e. it should recompute when id changes
Example #9
0
households.get_attribute_names()
households.get_id_attribute()
households.size()
households.get_attribute("income")
households.get_attribute_names()
households.load_dataset()
households.get_attribute_names()
#households.plot_histogram("income", bins = 10)
#households.r_histogram("income")
#households.r_scatter("persons", "income")
households.correlation_coefficient("persons", "income")
households.correlation_matrix(["persons", "income"])
households.summary()
households.add_primary_attribute(data=[4,6,9,2,4,8,2,1,3,2], name="location")
households.get_attribute_names()
households.modify_attribute(name="location", data=[0,0], index=[0,1])
households.get_attribute("location")
households.get_data_element_by_id(5).location

#households.write_dataset(out_storage=storage, out_table_name="households_output")


households.get_dataset_name()

# Working with models
from opus_core.choice_model import ChoiceModel
choicemodel = ChoiceModel(choice_set=[1,2,3],
                       utilities = "opus_core.linear_utilities",
                       probabilities = "opus_core.mnl_probabilities",
                       choices = "opus_core.random_choices")
from numpy import array
Example #10
0
households.get_attribute_names()
households.get_id_attribute()
households.size()
households.get_attribute("income")
households.get_attribute_names()
households.load_dataset()
households.get_attribute_names()
#households.plot_histogram("income", bins = 10)
#households.r_histogram("income")
#households.r_scatter("persons", "income")
households.correlation_coefficient("persons", "income")
households.correlation_matrix(["persons", "income"])
households.summary()
households.add_primary_attribute(data=[4,6,9,2,4,8,2,1,3,2], name="location")
households.get_attribute_names()
households.modify_attribute(name="location", data=[0,0], index=[0,1])
households.get_attribute("location")
households.get_data_element_by_id(5).location

#households.write_dataset(out_storage=storage, out_table_name="households_output")


households.get_dataset_name()

# Working with models
from opus_core.choice_model import ChoiceModel
choicemodel = ChoiceModel(choice_set=[1,2,3],
                       utilities = "opus_core.linear_utilities",
                       probabilities = "opus_core.mnl_probabilities",
                       choices = "opus_core.random_choices")
from numpy import array