def test_interaction_set_number_of_agents_and_multiply2(self):
     # Similar to test_interaction_set_number_of_agents_and_multiply, except that it uses test_location_x_test_agent
     expr = "test_agent.income * test_location.number_of_agents(myjob)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(
         table_name='test_agents', 
         table_data={'id': array([1, 2, 3]), 'income': array([1, 20, 500])}
         )
     storage.write_table(
         table_name='test_locations', 
         table_data={'id':array([1,2,3])}
         )
     storage.write_table(
         table_name='jobs', 
         table_data={'jid':arange(4)+1, 'id':array([2, 1, 3, 1]) }
         )
     ds = Dataset(in_storage=storage, in_table_name='test_locations', id_name="id", dataset_name="test_location")
     jobs = Dataset(in_storage=storage, in_table_name='jobs', id_name="jid", dataset_name="myjob")       
     dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage)
     dataset_pool._add_dataset('test_location', ds)
     dataset_pool._add_dataset('myjob', jobs)
     test_agent_x_test_location = dataset_pool.get_dataset('test_location_x_test_agent')
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [2, 1, 1]
     should_be = array([ [2, 40, 1000], [1, 20, 500], [1, 20, 500] ])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in test_interaction_set_number_of_agents_and_multiply2")
 def test_interaction_set_number_of_agents(self):
     # Test number_of_agents on an interaction set component.  The interaction set is 
     # test_agent_x_test_location, and we are finding the number of agents in each location
     expr = "test_location.number_of_agents(myjob)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(
         table_name='test_agents', 
         table_data={'id': array([1, 2, 3]), 'income': array([1, 20, 500])}
         )
     storage.write_table(
         table_name='test_locations', 
         table_data={'id':array([1,2,3])}
         )
     storage.write_table(
         table_name='jobs', 
         table_data={'jid':arange(4)+1, 'id':array([2, 1, 3, 1]) }
         )
     ds = Dataset(in_storage=storage, in_table_name='test_locations', id_name="id", dataset_name="test_location")
     jobs = Dataset(in_storage=storage, in_table_name='jobs', id_name="jid", dataset_name="myjob")       
     dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage)
     dataset_pool._add_dataset('test_location', ds)
     dataset_pool._add_dataset('myjob', jobs)
     test_agent_x_test_location = dataset_pool.get_dataset('test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [2, 1, 1]
     should_be = array([ [2, 1, 1], [2, 1, 1], [2, 1, 1] ])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in test_interaction_set_number_of_agents")
 def test_interaction_set_aggregate_and_multiply_same_expr(self):
     # Test doing an aggregate on an interaction set component and using the result in a multiply operation
     # with the same expression.
     expr = "(test_agent.income+1)*test_location.aggregate(test_agent.income+1)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='test_agents',
                         table_data={
                             'id': array([1, 2, 3]),
                             'location_id': array([1, 2, 2]),
                             'income': array([1, 20, 50])
                         })
     storage.write_table(table_name='test_locations',
                         table_data={'location_id': array([1, 2])})
     location_dataset = Dataset(in_storage=storage,
                                in_table_name='test_locations',
                                id_name="location_id",
                                dataset_name="test_location")
     dataset_pool = DatasetPool(package_order=['opus_core'],
                                storage=storage)
     dataset_pool._add_dataset('test_location', location_dataset)
     test_agent_x_test_location = dataset_pool.get_dataset(
         'test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(
         expr, dataset_pool=dataset_pool)
     # test_agent.income+1 is [2, 21, 51]
     # test_location.aggregate(test_agent.income+1) is [2, 72]
     should_be = array([[2 * 2, 2 * 72], [21 * 2, 21 * 72],
                        [51 * 2, 51 * 72]])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6))
 def test_interaction_set_disaggregate_and_multiply(self):
     # test doing a disaggregate on an interaction set component and using the result in a multiply operation
     # this is the same as test_interaction_set_disaggregate except that we multiply by test_agent.income
     expr = "test_agent.income * test_location.disaggregate(myfaz.my_variable)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(
         table_name='test_agents', 
         table_data={'id': array([1, 2, 3]), 'income': array([1, 20, 500])}
         )
     storage.write_table(
         table_name='test_locations',
         table_data={
             'id':array([1,2,3,4]),
             'id2':array([1,2,1,2])
             }
         )
     storage.write_table(
         table_name='faz',
         table_data={
             'my_variable':array([4,8]), 
             'id2':array([1,2])
             }
         )
     ds = Dataset(in_storage=storage, in_table_name='test_locations', id_name="id", dataset_name="test_location")
     ds2 = Dataset(in_storage=storage, in_table_name='faz', id_name="id2", dataset_name="myfaz")
     dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage)
     dataset_pool._add_dataset('test_location', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     test_agent_x_test_location = dataset_pool.get_dataset('test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4, 8, 4, 8]
     should_be = array([ [4, 8, 4, 8], [80, 160, 80, 160], [2000, 4000, 2000, 4000] ])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in interaction_set_disaggregate_and_multiply")
Exemplo n.º 5
0
    def test_my_inputs_convert(self):
        variable_name = "biocomplexity.land_cover.hmai"
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(table_name='land_covers',
                            table_data={
                                'relative_x': array([1, 2, 1, 2]),
                                'relative_y': array([1, 1, 2, 2]),
                                "lct": array([1, 3, 2, 4])
                            })

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant', {
                "FOOTPRINT": array([[0, 1, 0], [1, 1, 1], [0, 1, 0]]),
                'HEAVY_MED_URBAN': ['HU', 'MU'],
                'MU': 2,
                'HU': 1,
            })

        land_cover = dataset_pool.get_dataset('land_cover')
        land_cover.compute_variables(variable_name, dataset_pool=dataset_pool)
        values = land_cover.get_attribute(variable_name)

        should_be = arcsin(sqrt(array([0.25, 0, 0.25, 0])))

        self.assert_(ma.allclose(values, should_be, rtol=1e-7),
                     msg="Error in " + variable_name)
Exemplo n.º 6
0
    def test_my_inputs(self):
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(table_name='land_covers',
                            table_data={
                                'relative_x': array([1, 2, 1, 2]),
                                'relative_y': array([1, 1, 2, 2]),
                                "lct": array([11, 2, 4, 3])
                            })

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant', {
                "CELLSIZE": 30,
                "ALL_URBAN": ['HU', 'MU', 'LU'],
                'HU': 1,
                'MU': 2,
                'LU': 3
            })

        gridcell = dataset_pool.get_dataset('land_cover')
        gridcell.compute_variables(self.variable_name,
                                   dataset_pool=dataset_pool)
        values = gridcell.get_attribute(self.variable_name)

        should_be = array([1, 0, 1, 0], dtype=float32)
        should_be = ln(30 * distance_transform_edt(should_be) +
                       1) / ddt1.standardization_constant_distance

        self.assert_(ma.allclose(values, should_be, rtol=1e-7),
                     msg="Error in " + self.variable_name)
 def test_aggregate_sum(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'my_variable': array([4, 8, 0.5, 1]),
                             'id': array([1, 2, 3, 4]),
                             'id2': array([1, 2, 1, 2]),
                         })
     storage.write_table(table_name='faz',
                         table_data={"id2": array([1, 2])})
     ds = Dataset(in_storage=storage,
                  in_table_name='zones',
                  id_name="id",
                  dataset_name="myzone")
     ds2 = Dataset(in_storage=storage,
                   in_table_name='faz',
                   id_name="id2",
                   dataset_name="myfaz")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     values = ds2.compute_variables(
         ['myfaz.aggregate(10.0*myzone.my_variable, function=sum)'],
         dataset_pool=dataset_pool)
     should_be = array([45, 90])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6),
                  "Error in aggregate_sum")
 def test_interaction_set_aggregate2(self):
     # Similar to test_interaction_set_aggregate, except that it uses test_location_x_test_agent
     expr = "test_location.aggregate(10.0*gridcell.my_variable)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(
         table_name='test_agents', 
         table_data={'id': array([1, 2, 3]), 'income': array([1, 20, 500])}
         )
     storage.write_table(
         table_name='test_locations',
         table_data={
             'location_id':array([1,2])
             }
         )
     storage.write_table(
         table_name='gridcells',
         table_data={
             'my_variable':array([4,8,0.5,1]), 
             'grid_id':array([1,2,3,4]),
             'location_id':array([1,2,1,2]),
             }
         )
     location_dataset = Dataset(in_storage=storage, in_table_name='test_locations', id_name="location_id", dataset_name="test_location")
     gridcell_dataset = Dataset(in_storage=storage, in_table_name='gridcells', id_name="grid_id", dataset_name='gridcell')
     dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage)
     dataset_pool._add_dataset('test_location', location_dataset)
     dataset_pool._add_dataset('gridcell', gridcell_dataset)
     test_agent_x_test_location = dataset_pool.get_dataset('test_location_x_test_agent')
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4.5, 9]
     should_be = array([ [45, 45, 45], [90, 90, 90] ])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in interaction_set_aggregate2")
Exemplo n.º 9
0
    def test_my_inputs_tree_test(self):
        variable_name = "biocomplexity.land_cover.pcc"
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(table_name='land_covers',
                            table_data={
                                'relative_x': array([1, 2, 1, 2]),
                                'relative_y': array([1, 1, 2, 2]),
                                "lct": array([12, 8, 8, 15])
                            })

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant', {
                "FOOTPRINT": array([[0, 1, 0], [1, 1, 1], [0, 1, 0]]),
                "CC": 8,
                "OW": 12
            })

        land_cover = dataset_pool.get_dataset('land_cover')
        land_cover.compute_variables(variable_name, dataset_pool=dataset_pool)
        values = land_cover.get_attribute(variable_name)

        cc_within_fp = array([2, 3, 3, 2])
        ow_within_fp = array([3, 1, 1, 0])
        should_be = cc_within_fp / (5.0 - ow_within_fp)
        should_be = arcsin(sqrt(should_be))

        self.assert_(ma.allclose(values, should_be, rtol=1e-7),
                     msg="Error in " + variable_name)
 def test_aggregate(self):
     # test aggregate with no function specified (so defaults to 'sum')
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'zone_id': array([1, 2]),
                         })
     storage.write_table(table_name='gridcells',
                         table_data={
                             'my_variable': array([4, 8, 0.5, 1]),
                             'grid_id': array([1, 2, 3, 4]),
                             'zone_id': array([1, 2, 1, 2]),
                         })
     zone_dataset = Dataset(in_storage=storage,
                            in_table_name='zones',
                            id_name="zone_id",
                            dataset_name='zone')
     gridcell_dataset = Dataset(in_storage=storage,
                                in_table_name='gridcells',
                                id_name="grid_id",
                                dataset_name='gridcell')
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('gridcell', gridcell_dataset)
     dataset_pool._add_dataset('zone', zone_dataset)
     values = zone_dataset.compute_variables(
         ['zone.aggregate(gridcell.my_variable)'],
         dataset_pool=dataset_pool)
     should_be = array([4.5, 9])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6),
                  "Error in aggregate")
Exemplo n.º 11
0
    def test_no_translation(self):
        variable_name = "biocomplexity.land_cover.xmps"
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(
            table_name='land_covers',
            table_data={
                'relative_x': array([1,2,3,1,2,3,1,2,3]),
                'relative_y': array([1,1,1,2,2,2,3,3,3]),
                "lct": array([1, 2, 3, 4, 5, 4, 3, 5, 1]),
            }
        )

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant',
            {
                "FOOTPRINT": array([[0,1,0], [1,1,1], [0,1,0]]),
                "ALL_URBAN": ['HU', 'MU', 'LU'],
                'HU': 1,
                'MU': 2,
                'LU': 3
            }
        )

        land_cover = dataset_pool.get_dataset('land_cover')
        self.assertRaises(RuntimeError,
                          land_cover.compute_variables,
                          variable_name,
                          dataset_pool=dataset_pool)
 def test_interaction_set_aggregate_and_multiply(self):
     # test doing an aggregate on an interaction set component and using the result in a multiply operation
     # this is the same as test_interaction_set_aggregate except that we multiply by test_agent.income
     expr = "test_agent.income * test_location.aggregate(10.0*gridcell.my_variable)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(
         table_name='test_agents', 
         table_data={'id': array([1, 2, 3]), 'income': array([1, 20, 500])}
         )
     storage.write_table(
         table_name='test_locations',
         table_data={
             'location_id':array([1,2])
             }
         )
     storage.write_table(
         table_name='gridcells',
         table_data={
             'my_variable':array([4,8,0.5,1]), 
             'grid_id':array([1,2,3,4]),
             'location_id':array([1,2,1,2]),
             }
         )
     location_dataset = Dataset(in_storage=storage, in_table_name='test_locations', id_name="location_id", dataset_name="test_location")
     gridcell_dataset = Dataset(in_storage=storage, in_table_name='gridcells', id_name="grid_id", dataset_name='gridcell')
     dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage)
     dataset_pool._add_dataset('test_location', location_dataset)
     dataset_pool._add_dataset('gridcell', gridcell_dataset)
     test_agent_x_test_location = dataset_pool.get_dataset('test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4.5, 9]
     should_be = array([ [45, 90], [900, 1800], [22500, 45000] ])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in test_interaction_set_aggregate_and_multiply")
Exemplo n.º 13
0
    def test_my_inputs(self):
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(
            table_name='land_covers',
            table_data={
                'relative_x': array([1,2,1,2]),
                'relative_y': array([1,1,2,2]),
                "lct": array([3, 2, 1, 0]),
                "footprint_size": array([5, 5, 5, 5])
            }
        )

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        footprint = array([[0,1,0], [1,1,1], [0,1,0]])
        dataset_pool._add_dataset(
            'constant',
            {
                "FOOTPRINT": footprint,
            }
        )

        gridcell = dataset_pool.get_dataset('land_cover')
        gridcell.compute_variables(self.variable_name,
                                   dataset_pool=dataset_pool)
        values = gridcell.get_attribute(self.variable_name)

        should_be = array([3,3,3,0]) / array([5, 5, 5, 5], dtype=float32)
        should_be = arcsin(sqrt(should_be))

        self.assert_(ma.allclose( values, should_be, rtol=1e-7),
                     msg = "Error in " + self.variable_name)
Exemplo n.º 14
0
    def test_my_inputs_for_hmps(self):
        variable_name = "biocomplexity.land_cover.hmps"
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(
            table_name='land_covers',
            table_data={
                'relative_x': array([1,2,1,2]),
                'relative_y': array([1,1,2,2]),
                "lct": array([1, 2, 1, 4]),
            }
        )

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant',
            {
                "FOOTPRINT": array([[0,1,0], [1,1,1], [0,1,0]]),
                'HU': 1,
            }
        )

        land_cover = dataset_pool.get_dataset('land_cover')
        land_cover.compute_variables(variable_name,
                                     dataset_pool=dataset_pool)
        values = land_cover.get_attribute(variable_name)

        should_be = array([2, 2, 2, 2], dtype=float32)
        should_be = ln(should_be + 1) / SSSmps.standardization_constant_MPS

        self.assert_(ma.allclose( values, should_be, rtol=1e-7),
                     msg = "Error in " + variable_name)
 def test_versioning_with_disaggregate(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
         table_data={
             'id':array([1,2,3,4]),
             'id2':array([1,2,1,2])
             }
         )
     storage.write_table(table_name='faz',
         table_data={
             'my_variable':array([4,8]), 
             'id2':array([1,2])
             }
         )
     ds = Dataset(in_storage=storage, in_table_name='zones', id_name="id", dataset_name="myzone")
     ds2 = Dataset(in_storage=storage, in_table_name='faz', id_name="id2", dataset_name="myfaz")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     var = "my_var = myzone.disaggregate(10.0*myfaz.my_variable)"
     ds.modify_attribute("id2", array([2,1,2,1])) # should have version 1
     ds.compute_variables([var], dataset_pool=dataset_pool)
     self.assert_(ds.get_version("my_var")==0)
     ds.compute_variables([var], dataset_pool=dataset_pool)
     self.assert_(ds.get_version("my_var")==0) # version should stay the same, i.e. it should not recompute
     ds.touch_attribute("id2") # has version 2
     ds.compute_variables([var], dataset_pool=dataset_pool)
     self.assert_(ds.get_version("my_var")==1) # version should be 1, i.e. it should recompute when id changes
Exemplo n.º 16
0
    def test_my_inputs(self):
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(
            table_name='land_covers',
            table_data={
                'relative_x': array([1,2,1,2]),
                'relative_y': array([1,1,2,2]),
                "lct": array([11,2,4,3])
            }
        )

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant',
            {
                "CELLSIZE":30,
                "ALL_URBAN":['HU', 'MU', 'LU'],
                'HU': 1,
                'MU': 2,
                'LU': 3
            }
        )

        gridcell = dataset_pool.get_dataset('land_cover')
        gridcell.compute_variables(self.variable_name,
                                   dataset_pool=dataset_pool)
        values = gridcell.get_attribute(self.variable_name)

        should_be = array([1, 0, 1, 0], dtype=float32)
        should_be = ln(30*distance_transform_edt(should_be)+1) / ddt1.standardization_constant_distance

        self.assert_(ma.allclose( values, should_be, rtol=1e-7),
                     msg = "Error in " + self.variable_name)
 def test_interaction_set_aggregate2(self):
     # Similar to test_interaction_set_aggregate, except that it uses test_location_x_test_agent
     expr = "test_location.aggregate(gridcell.my_variable)"
     storage = StorageFactory().get_storage("dict_storage")
     storage.write_table(
         table_name="test_agents", table_data={"id": array([1, 2, 3]), "income": array([1, 20, 500])}
     )
     storage.write_table(table_name="test_locations", table_data={"location_id": array([1, 2])})
     storage.write_table(
         table_name="gridcells",
         table_data={
             "my_variable": array([4, 8, 0.5, 1]),
             "grid_id": array([1, 2, 3, 4]),
             "location_id": array([1, 2, 1, 2]),
         },
     )
     location_dataset = Dataset(
         in_storage=storage, in_table_name="test_locations", id_name="location_id", dataset_name="test_location"
     )
     gridcell_dataset = Dataset(
         in_storage=storage, in_table_name="gridcells", id_name="grid_id", dataset_name="gridcell"
     )
     dataset_pool = DatasetPool(package_order=["opus_core"], storage=storage)
     dataset_pool._add_dataset("test_location", location_dataset)
     dataset_pool._add_dataset("gridcell", gridcell_dataset)
     test_agent_x_test_location = dataset_pool.get_dataset("test_location_x_test_agent")
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4.5, 9]
     should_be = array([[4.5, 4.5, 4.5], [9, 9, 9]])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in interaction_set_aggregate2")
Exemplo n.º 18
0
    def test_my_inputs(self):
        storage = StorageFactory().get_storage('dict_storage')        
        
        storage.write_table(
            table_name='land_covers',
            table_data={
                'relative_x': array([1,2,1,2]),
                'relative_y': array([1,1,2,2]),
                "lct": array([-9999, 5, 3, 1])
            }
        )
        
        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        footprint = array([[0,1,0], [1,1,1], [0,1,0]])
        dataset_pool._add_dataset(
            'constant',
            {
                "FOOTPRINT": footprint,
                'AG': 10,
            }
        )

        gridcell = dataset_pool.get_dataset('land_cover')
        gridcell.compute_variables(self.variable_name, 
                                   dataset_pool=dataset_pool)
        values = gridcell.get_attribute(self.variable_name)
        
        should_be = array([2, 4, 4, 5])
        
        self.assert_(ma.allequal( values, should_be), 
                     msg = "Error in " + self.variable_name)
Exemplo n.º 19
0
 def test_aggregate_unqualified_name(self):
     # test aggregate without the dataset provided for the variable being aggregated
     expr = 'zone.aggregate(my_variable)'
     # to be correct, should be 'zone.aggregate(gridcell.my_variable)'
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'zone_id': array([1, 2]),
                         })
     storage.write_table(table_name='gridcells',
                         table_data={
                             'my_variable': array([4, 8, 0.5, 1]),
                             'grid_id': array([1, 2, 3, 4]),
                             'zone_id': array([1, 2, 1, 2]),
                         })
     zone_dataset = Dataset(in_storage=storage,
                            in_table_name='zones',
                            id_name="zone_id",
                            dataset_name='zone')
     gridcell_dataset = Dataset(in_storage=storage,
                                in_table_name='gridcells',
                                id_name="grid_id",
                                dataset_name='gridcell')
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('gridcell', gridcell_dataset)
     dataset_pool._add_dataset('zone', zone_dataset)
     self.assertRaises(ValueError,
                       zone_dataset.compute_variables, [expr],
                       dataset_pool=dataset_pool)
Exemplo n.º 20
0
 def test_aggregate_bad_function(self):
     # the 'function' argument must be a single name -- test this
     expr = "zone.aggregate(2*gridcell.my_variable, function=3+4)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'zone_id': array([1, 2]),
                         })
     storage.write_table(table_name='gridcells',
                         table_data={
                             'my_variable': array([4, 8, 0.5, 1]),
                             'grid_id': array([1, 2, 3, 4]),
                             'zone_id': array([1, 2, 1, 2]),
                         })
     zone_dataset = Dataset(in_storage=storage,
                            in_table_name='zones',
                            id_name="zone_id",
                            dataset_name='zone')
     gridcell_dataset = Dataset(in_storage=storage,
                                in_table_name='gridcells',
                                id_name="grid_id",
                                dataset_name='gridcell')
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('gridcell', gridcell_dataset)
     dataset_pool._add_dataset('zone', zone_dataset)
     self.assertRaises(ValueError,
                       zone_dataset.compute_variables, [expr],
                       dataset_pool=dataset_pool)
Exemplo n.º 21
0
    def test_my_inputs(self):
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(table_name='land_covers',
                            table_data={
                                'relative_x': array([1, 2, 1, 2]),
                                'relative_y': array([1, 1, 2, 2]),
                                "lct": array([4, 4, 5, 1])
                            })

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        footprint = array([[0, 1, 0], [1, 1, 1], [0, 1, 0]])
        dataset_pool._add_dataset('constant', {
            "FOOTPRINT": footprint,
            'AG': 4,
        })

        gridcell = dataset_pool.get_dataset('land_cover')
        gridcell.compute_variables(self.variable_name,
                                   dataset_pool=dataset_pool)
        values = gridcell.get_attribute(self.variable_name)

        should_be = array([4, 4, 1, 1])

        self.assert_(ma.allequal(values, should_be),
                     msg="Error in " + self.variable_name)
Exemplo n.º 22
0
    def test_my_inputs_convert(self):
        variable_name = "biocomplexity.land_cover.hmai"
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(
            table_name='land_covers',
            table_data={
                'relative_x': array([1,2,1,2]),
                'relative_y': array([1,1,2,2]),
                "lct": array([1, 3, 2, 4])
            }
        )

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant',
            {
                "FOOTPRINT": array([[0,1,0], [1,1,1], [0,1,0]]),
                'HEAVY_MED_URBAN':['HU','MU'],
                'MU': 2,
                'HU': 1,
            }
        )

        land_cover = dataset_pool.get_dataset('land_cover')
        land_cover.compute_variables(variable_name,
                                     dataset_pool=dataset_pool)
        values = land_cover.get_attribute(variable_name)

        should_be = arcsin(sqrt(array([0.25, 0, 0.25, 0])))

        self.assert_(ma.allclose( values, should_be, rtol=1e-7),
                     msg = "Error in " + variable_name)
Exemplo n.º 23
0
    def test_my_inputs(self):
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(table_name='land_covers',
                            table_data={
                                'relative_x': array([1, 2, 1, 2]),
                                'relative_y': array([1, 1, 2, 2]),
                                "lct": array([10, 10, 4, 3])
                            })

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        footprint = array([[0, 1, 0], [1, 1, 1], [0, 1, 0]])
        dataset_pool._add_dataset('constant', {
            "FOOTPRINT": footprint,
            'AG': 10,
        })

        gridcell = dataset_pool.get_dataset('land_cover')
        gridcell.compute_variables(self.variable_name,
                                   dataset_pool=dataset_pool)
        values = gridcell.get_attribute(self.variable_name)

        should_be = array([[1, 1], [0, 0]])
        should_be = correlate(ma.filled(should_be.astype(int32), 0),
                              footprint,
                              mode="reflect")
        should_be = less_equal((should_be / 5.0), 400)
        should_be = ln(distance_transform_edt(should_be) +
                       1) / dag.standardization_constant_distance
        should_be = ravel(transpose(should_be))  # flatten by id

        self.assert_(ma.allclose(values, should_be, rtol=1e-7),
                     msg="Error in " + self.variable_name)
Exemplo n.º 24
0
    def test_my_inputs(self):
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(table_name='land_covers',
                            table_data={
                                'relative_x': array([1, 2, 1, 2]),
                                'relative_y': array([1, 1, 2, 2]),
                                "comm_add4": array([1, 2, 5, 15])
                            })

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant',
            {
                "CELLSIZE":
                250  # this results in a 3x3 grid, (750/250)x(750/250)
            })

        gridcell = dataset_pool.get_dataset('land_cover')
        gridcell.compute_variables(self.variable_name,
                                   dataset_pool=dataset_pool)
        values = gridcell.get_attribute(self.variable_name)

        should_be = array([
            1 * 4 + 2 * 2 + 5 * 2 + 15, 1 * 2 + 2 * 4 + 5 + 15 * 2,
            1 * 2 + 2 + 5 * 4 + 15 * 2, 1 + 2 * 2 + 5 * 2 + 15 * 4
        ])
        should_be = ln(should_be + 1) / 10.0

        self.assert_(ma.allclose(values, should_be, rtol=1e-7),
                     msg="Error in " + self.variable_name)
Exemplo n.º 25
0
    def test_my_inputs(self):
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(table_name='land_covers',
                            table_data={
                                'relative_x':
                                array([1, 2, 1, 2]),
                                'relative_y':
                                array([1, 1, 2, 2]),
                                "land_cover_type_ow_within_footprint":
                                array([3, 2, 1, 0]),
                                "footprint_size":
                                array([5, 4, 5, 5])
                            })

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        footprint = array([[0, 1, 0], [1, 1, 1], [0, 1, 0]])
        dataset_pool._add_dataset('constant', {
            "FOOTPRINT": footprint,
        })

        gridcell = dataset_pool.get_dataset('land_cover')
        gridcell.compute_variables(self.variable_name,
                                   dataset_pool=dataset_pool)
        values = gridcell.get_attribute(self.variable_name)

        should_be = array([3, 2, 1, 0], dtype=float32) / array([5, 4, 5, 5],
                                                               dtype=float32)
        should_be = arcsin(sqrt(should_be))

        self.assert_(ma.allclose(values, should_be, rtol=1e-7),
                     msg="Error in " + self.variable_name)
Exemplo n.º 26
0
 def test_interaction_set_aggregate_and_multiply_same_var(self):
     # Test doing an aggregate on an interaction set component and using the result in a multiply operation
     # with the same variable.  This is similar to test_interaction_set_aggregate except that we multiply
     # by the same variable.
     expr = "test_agent.income*test_location.aggregate(test_agent.income)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='test_agents',
                         table_data={
                             'id': array([1, 2, 3]),
                             'location_id': array([1, 2, 2]),
                             'income': array([1, 20, 50])
                         })
     storage.write_table(table_name='test_locations',
                         table_data={'location_id': array([1, 2])})
     location_dataset = Dataset(in_storage=storage,
                                in_table_name='test_locations',
                                id_name="location_id",
                                dataset_name="test_location")
     dataset_pool = DatasetPool(package_order=['opus_core'],
                                storage=storage)
     dataset_pool._add_dataset('test_location', location_dataset)
     test_agent_x_test_location = dataset_pool.get_dataset(
         'test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(
         expr, dataset_pool=dataset_pool)
     # test_agent.income is [1, 20, 50]
     # test_location.aggregate(test_agent.income) is [1, 70]
     should_be = array([[1, 70], [20, 1400], [50, 3500]])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6))
 def test_disaggregate(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'id': array([1, 2, 3, 4]),
                             'id2': array([1, 2, 1, 2])
                         })
     storage.write_table(table_name='faz',
                         table_data={
                             'my_variable': array([4, 8]),
                             'id2': array([1, 2])
                         })
     ds = Dataset(in_storage=storage,
                  in_table_name='zones',
                  id_name="id",
                  dataset_name="myzone")
     ds2 = Dataset(in_storage=storage,
                   in_table_name='faz',
                   id_name="id2",
                   dataset_name="myfaz")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     values = ds.compute_variables(
         ["myzone.disaggregate(myfaz.my_variable)"],
         dataset_pool=dataset_pool)
     should_be = array([4, 8, 4, 8])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6),
                  "Error in disaggregate")
 def test_disaggregate_fully_qualified_variable(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'zone_id': array([1, 2, 3, 4]),
                             'id': array([1, 2, 1, 2])
                         })
     # it would be nicer to call this table 'fazzes' but we want to use the existing test variable
     storage.write_table(table_name='test_locations',
                         table_data={
                             'cost': array([4, 8]),
                             'id': array([1, 2])
                         })
     zone_dataset = Dataset(in_storage=storage,
                            in_table_name='zones',
                            id_name="zone_id",
                            dataset_name="zone")
     test_dataset = Dataset(in_storage=storage,
                            in_table_name='test_locations',
                            id_name="id",
                            dataset_name='test_location')
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('zone', zone_dataset)
     dataset_pool._add_dataset('test_location', test_dataset)
     values = zone_dataset.compute_variables(
         ['zone.disaggregate(opus_core.test_location.cost_times_3)'],
         dataset_pool=dataset_pool)
     should_be = array([12, 24, 12, 24])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6),
                  "Error in test_disaggregate_fully_qualified_variable")
Exemplo n.º 29
0
    def test_my_inputs(self):
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(
            table_name='land_covers',
            table_data={
                'relative_x': array([1,2,1,2]),
                'relative_y': array([1,1,2,2]),
                "comm_add4": array([1, 2, 5, 15])
            }
        )

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant',
            {
                "CELLSIZE": 250    # this results in a 3x3 grid, (750/250)x(750/250)
            }
        )

        gridcell = dataset_pool.get_dataset('land_cover')
        gridcell.compute_variables(self.variable_name,
                                   dataset_pool=dataset_pool)
        values = gridcell.get_attribute(self.variable_name)

        should_be = array([1*4+2*2+5*2+15, 1*2+2*4+5+15*2, 1*2+2+5*4+15*2, 1+2*2+5*2+15*4])
        should_be = ln(should_be + 1) / 10.0

        self.assert_(ma.allclose( values, should_be, rtol=1e-7),
                     msg = "Error in " + self.variable_name)
 def test_aggregate_all_mean(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'my_variable': array([4, 8, 10, 1]),
                             'id': array([1, 2, 3, 4]),
                         })
     storage.write_table(table_name='regions',
                         table_data={
                             "id": array([1]),
                         })
     ds = Dataset(in_storage=storage,
                  in_table_name='zones',
                  id_name="id",
                  dataset_name="myzone")
     ds2 = Dataset(in_storage=storage,
                   in_table_name='regions',
                   id_name="id",
                   dataset_name="myregion")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds)
     dataset_pool._add_dataset('myregion', ds2)
     ds2.compute_variables([
         "myvar = myregion.aggregate_all(myzone.my_variable, function=mean)"
     ],
                           dataset_pool=dataset_pool)
     values = ds2.get_attribute("myvar")
     should_be = array([5.75])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6),
                  "Error in aggregate_all_mean")
 def test_aggregate_squared_with_cast(self):
     # more exercising the SUBPATTERN_NUMBER_OF_AGENTS_WITH_CAST tree pattern
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'zone_id': array([1, 2]),
                         })
     storage.write_table(table_name='gridcells',
                         table_data={
                             'my_variable': array([4, 8, 0.5, 1]),
                             'grid_id': array([1, 2, 3, 4]),
                             'zone_id': array([1, 2, 1, 2]),
                         })
     zone_dataset = Dataset(in_storage=storage,
                            in_table_name='zones',
                            id_name="zone_id",
                            dataset_name='zone')
     gridcell_dataset = Dataset(in_storage=storage,
                                in_table_name='gridcells',
                                id_name="grid_id",
                                dataset_name='gridcell')
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('gridcell', gridcell_dataset)
     dataset_pool._add_dataset('zone', zone_dataset)
     values = zone_dataset.compute_variables(
         ['(zone.aggregate(gridcell.my_variable)**2).astype(float32)'],
         dataset_pool=dataset_pool)
     should_be = array([4.5 * 4.5, 9.0 * 9.0])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6),
                  "Error in aggregate")
 def test_aggregate_fully_qualified_variable(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'zone_id': array([1, 2]),
                         })
     # it would be nicer to call this table 'gridcells' but we want to use the existing test variable
     storage.write_table(table_name='tests',
                         table_data={
                             'a_dependent_variable': array([4, 8, 0.5, 1]),
                             'id': array([1, 2, 3, 4]),
                             'zone_id': array([1, 2, 1, 2]),
                         })
     zone_dataset = Dataset(in_storage=storage,
                            in_table_name='zones',
                            id_name="zone_id",
                            dataset_name='zone')
     test_dataset = Dataset(in_storage=storage,
                            in_table_name='tests',
                            id_name="id",
                            dataset_name='tests')
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('zone', zone_dataset)
     dataset_pool._add_dataset('tests', test_dataset)
     values = zone_dataset.compute_variables(
         ['zone.aggregate(opus_core.tests.a_test_variable)'],
         dataset_pool=dataset_pool)
     should_be = array([45, 90])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6),
                  "Error in test_aggregate_fully_qualified_variable")
 def test_interaction_set_disaggregate(self):
     # Test doing a disaggregate on an interaction set component.  The interaction set is 
     # test_agent_x_test_location, and test_location will be disaggregated from faz.  
     # (It would be nicer if test_location were called zone, but we wanted to use the existing 
     # test interactions set.)
     expr = "test_location.disaggregate(10.0*myfaz.my_variable)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(
         table_name='test_agents', 
         table_data={'id': array([1, 2, 3]), 'income': array([1, 20, 500])}
         )
     storage.write_table(
         table_name='test_locations',
         table_data={
             'id':array([1,2,3,4]),
             'id2':array([1,2,1,2])
             }
         )
     storage.write_table(
         table_name='faz',
         table_data={
             'my_variable':array([4,8]), 
             'id2':array([1,2])
             }
         )
     ds = Dataset(in_storage=storage, in_table_name='test_locations', id_name="id", dataset_name="test_location")
     ds2 = Dataset(in_storage=storage, in_table_name='faz', id_name="id2", dataset_name="myfaz")
     dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage)
     dataset_pool._add_dataset('test_location', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     test_agent_x_test_location = dataset_pool.get_dataset('test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4, 8, 4, 8]
     should_be = array([ [40, 80, 40, 80], [40, 80, 40, 80], [40, 80, 40, 80] ])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in interaction_set_disaggregate")
 def test_interaction_set_aggregate(self):
     # Test doing an aggregate on an interaction set component.  The interaction set is 
     # test_agent_x_test_location, and test_location will be aggregated from gridcell.  
     # (It would be nicer if test_location were called zone, but we wanted to use the existing 
     # test interactions set.)
     expr = "test_location.aggregate(10.0*gridcell.my_variable)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(
         table_name='test_agents', 
         table_data={'id': array([1, 2, 3]), 'income': array([1, 20, 500])}
         )
     storage.write_table(
         table_name='test_locations',
         table_data={
             'location_id':array([1,2])
             }
         )
     storage.write_table(
         table_name='gridcells',
         table_data={
             'my_variable':array([4,8,0.5,1]), 
             'grid_id':array([1,2,3,4]),
             'location_id':array([1,2,1,2]),
             }
         )
     location_dataset = Dataset(in_storage=storage, in_table_name='test_locations', id_name="location_id", dataset_name="test_location")
     gridcell_dataset = Dataset(in_storage=storage, in_table_name='gridcells', id_name="grid_id", dataset_name='gridcell')
     dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage)
     dataset_pool._add_dataset('test_location', location_dataset)
     dataset_pool._add_dataset('gridcell', gridcell_dataset)
     test_agent_x_test_location = dataset_pool.get_dataset('test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4.5, 9]
     should_be = array([ [45, 90], [45, 90], [45, 90] ])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in interaction_set_aggregate")
 def test_interaction_set_disaggregate_and_multiply2(self):
     # Similar to test_interaction_set_disaggregate_and_multiply, except that it uses test_location_x_test_agent
     expr = "test_location.disaggregate(10.0*myfaz.my_variable) * test_agent.income"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(
         table_name='test_agents', 
         table_data={'id': array([1, 2, 3]), 'income': array([1, 20, 500])}
         )
     storage.write_table(
         table_name='test_locations',
         table_data={
             'id':array([1,2,3,4]),
             'id2':array([1,2,1,2])
             }
         )
     storage.write_table(
         table_name='faz',
         table_data={
             'my_variable':array([4,8]), 
             'id2':array([1,2])
             }
         )
     ds = Dataset(in_storage=storage, in_table_name='test_locations', id_name="id", dataset_name="test_location")
     ds2 = Dataset(in_storage=storage, in_table_name='faz', id_name="id2", dataset_name="myfaz")
     dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage)
     dataset_pool._add_dataset('test_location', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     test_agent_x_test_location = dataset_pool.get_dataset('test_location_x_test_agent')
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4, 8, 4, 8]
     should_be = array([ [40, 800, 20000], [80, 1600, 40000], [40, 800, 20000], [80, 1600, 40000] ])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in interaction_set_disaggregate_and_multiply2")
Exemplo n.º 36
0
    def test_my_inputs( self ):
        storage = StorageFactory().get_storage('dict_storage')        
        
        storage.write_table(
            table_name='households',
            table_data={
                'household_id': array([1, 2, 3, 4]),
                'income': array([45000, 50000, 75000, 100000]),
            }
        )
        
        
        dataset_pool = DatasetPool(package_order=['urbansim'],
                                   storage=storage)
        dataset_pool._add_dataset('urbansim_constant', MockConstant())

        household = dataset_pool.get_dataset('household')
        household.compute_variables(self.variable_name, 
                                    dataset_pool=dataset_pool)
        values = household.get_attribute(self.variable_name)
        
        should_be = array( [1, 1, 0, 0] )
        
        self.assert_(ma.allequal(values, should_be,), 
                     msg="Error in " + self.variable_name)
Exemplo n.º 37
0
 def test_my_tree( self ):
     storage = StorageFactory().get_storage('dict_storage')
     
     #declare an array of four gridcells, each with the specified sector ID below
     storage.write_table(
         table_name='development_events',
         table_data={
             'grid_id': array([100,100,101,102]),
             'scheduled_year': array([1999,1998,1999,1999]),
             'starting_development_type_id': array([1, 3, 1, 3]),
             'ending_development_type_id': array([1, 2, 2, 3]),
         }
     )
     storage.write_table(
         table_name='development_type_groups',
         table_data={
             'name': array(["vacant_developable", "developed"]),
             'group_id': array([1,2]),
         }
     )
     
     dataset_pool = DatasetPool(package_order=['urbansim', 'opus_core'],
                                storage=storage)
     dataset_pool._add_dataset('development_type', mock_developmenttype())
     
     development_event = dataset_pool.get_dataset('development_event')
     development_event.compute_variables(self.variable_name, 
                                         dataset_pool=dataset_pool)
     values = development_event.get_attribute(self.variable_name)
     
     should_be = array( [True, False, True, False] )
     self.assert_(ma.allequal( values, should_be), 
                  msg = "Error in " + self.variable_name )
 def test_interaction_set_aggregate_and_multiply_same_var(self):
     # Test doing an aggregate on an interaction set component and using the result in a multiply operation
     # with the same variable.  This is similar to test_interaction_set_aggregate except that we multiply 
     # by the same variable.
     expr = "test_agent.income*test_location.aggregate(test_agent.income)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(
         table_name='test_agents', 
         table_data={'id': array([1, 2, 3]), 'location_id':array([1,2,2]), 'income': array([1, 20, 50])}
         )
     storage.write_table(
         table_name='test_locations',
         table_data={
             'location_id':array([1,2])
             }
         )
     location_dataset = Dataset(in_storage=storage, in_table_name='test_locations', id_name="location_id", dataset_name="test_location")
     dataset_pool = DatasetPool(package_order=['opus_core'], storage=storage)
     dataset_pool._add_dataset('test_location', location_dataset)
     test_agent_x_test_location = dataset_pool.get_dataset('test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # test_agent.income is [1, 20, 50]
     # test_location.aggregate(test_agent.income) is [1, 70]
     should_be = array([ [1, 70], [20, 1400], [50, 3500] ])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6))
Exemplo n.º 39
0
    def test_my_inputs_for_hmps(self):
        variable_name = "biocomplexity.land_cover.hmps"
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(table_name='land_covers',
                            table_data={
                                'relative_x': array([1, 2, 1, 2]),
                                'relative_y': array([1, 1, 2, 2]),
                                "lct": array([1, 2, 1, 4]),
                            })

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant', {
                "FOOTPRINT": array([[0, 1, 0], [1, 1, 1], [0, 1, 0]]),
                'HU': 1,
            })

        land_cover = dataset_pool.get_dataset('land_cover')
        land_cover.compute_variables(variable_name, dataset_pool=dataset_pool)
        values = land_cover.get_attribute(variable_name)

        should_be = array([2, 2, 2, 2], dtype=float32)
        should_be = ln(should_be + 1) / SSSmps.standardization_constant_MPS

        self.assert_(ma.allclose(values, should_be, rtol=1e-7),
                     msg="Error in " + variable_name)
Exemplo n.º 40
0
    def test_no_translation(self):
        variable_name = "biocomplexity.land_cover.xmps"
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(table_name='land_covers',
                            table_data={
                                'relative_x':
                                array([1, 2, 3, 1, 2, 3, 1, 2, 3]),
                                'relative_y':
                                array([1, 1, 1, 2, 2, 2, 3, 3, 3]),
                                "lct": array([1, 2, 3, 4, 5, 4, 3, 5, 1]),
                            })

        dataset_pool = DatasetPool(package_order=['biocomplexity'],
                                   storage=storage)
        dataset_pool._add_dataset(
            'constant', {
                "FOOTPRINT": array([[0, 1, 0], [1, 1, 1], [0, 1, 0]]),
                "ALL_URBAN": ['HU', 'MU', 'LU'],
                'HU': 1,
                'MU': 2,
                'LU': 3
            })

        land_cover = dataset_pool.get_dataset('land_cover')
        self.assertRaises(RuntimeError,
                          land_cover.compute_variables,
                          variable_name,
                          dataset_pool=dataset_pool)
Exemplo n.º 41
0
    def prepare_dataset_pool(self, recent_years):
        cache_dir = os.path.join(self.urbansim_tmp, 'urbansim_cache')
        SimulationState().set_cache_directory(cache_dir)

        storage = StorageFactory().get_storage('dict_storage')
        dataset_pool = DatasetPool(package_order=['urbansim'], storage=storage)

        storage.write_table(table_name='gridcells',
                            table_data={
                                'grid_id': array([1, 2, 3, 4]),
                                'industrial_sqft': array([4, 0, 1, 0]),
                            })
        gridcell = dataset_pool.get_dataset('gridcell')
        self._write_dataset_to_cache(gridcell, cache_dir, 1998)
        dataset_pool.remove_all_datasets()

        storage.write_table(table_name='gridcells',
                            table_data={
                                'grid_id': array([1, 2, 3, 4]),
                                'industrial_sqft': array([3, 0, 2, 1]),
                            })
        gridcell = dataset_pool.get_dataset('gridcell')
        self._write_dataset_to_cache(gridcell, cache_dir, 1999)
        dataset_pool.remove_all_datasets()

        storage.write_table(table_name='gridcells',
                            table_data={
                                'grid_id': array([1, 2, 3, 4]),
                                'industrial_sqft': array([3, 0, 3, 1]),
                            })
        gridcell = dataset_pool.get_dataset('gridcell')
        self._write_dataset_to_cache(gridcell, cache_dir, 2000)
        dataset_pool.remove_all_datasets()

        storage.write_table(table_name='urbansim_constants',
                            table_data={
                                'recent_years': array([recent_years]),
                            })

        SimulationState().set_current_time(2001)
        attribute_cache = AttributeCache()
        SessionConfiguration(new_instance=True,
                             package_order=['urbansim'],
                             in_storage=attribute_cache)
        dataset_pool = DatasetPool(package_order=['urbansim'],
                                   storage=attribute_cache)

        # Can't write urbansim_constant, so directly add it to the pool.
        temp_dataset_pool = DatasetPool(package_order=['urbansim'],
                                        storage=storage)
        dataset_pool._add_dataset(
            'urbansim_constant',
            temp_dataset_pool.get_dataset('urbansim_constant'))
        return dataset_pool
Exemplo n.º 42
0
    def prepare_dataset_pool(self, recent_years):
        self.cache_dir = os.path.join(self.urbansim_tmp, 'urbansim_cache')
        SimulationState().set_cache_directory(self.cache_dir)

        data = {
            1997: {
                'grid_id': array([1, 2, 3, 4]),
                'is_in_development_type_group_developed': array([0, 0, 0, 0]),
            },
            1998: {
                'grid_id': array([1, 2, 3, 4]),
                'is_in_development_type_group_developed': array([0, 1, 0, 0]),
            },
            1999: {
                'grid_id': array([1, 2, 3, 4]),
                'is_in_development_type_group_developed': array([0, 0, 1, 0])
            },
            2000: {
                'grid_id': array([1, 2, 3, 4]),
                'is_in_development_type_group_developed': array([1, 1, 1, 0])
            },
            2001: {
                'grid_id': array([1, 2, 3, 4]),
                'is_in_development_type_group_developed':
                array([1, 1, 1, 0]) + 1
            }
        }
        self.write_gridcell_data_to_cache(data)

        storage = StorageFactory().get_storage('dict_storage')
        storage.write_table(table_name='urbansim_constants',
                            table_data={
                                'recent_years': array([recent_years]),
                            })

        SimulationState().set_current_time(2001)
        attribute_cache = AttributeCache()
        SessionConfiguration(new_instance=True,
                             package_order=['urbansim'],
                             in_storage=attribute_cache)
        dataset_pool = DatasetPool(package_order=['urbansim'],
                                   storage=attribute_cache)

        # Can't write urbansim_constant, so directly add it to the pool.
        temp_dataset_pool = DatasetPool(package_order=['urbansim'],
                                        storage=storage)
        dataset_pool._add_dataset(
            'urbansim_constant',
            temp_dataset_pool.get_dataset('urbansim_constant'))
        return dataset_pool
    def prepare_dataset_pool(self, recent_years):
        self.cache_dir = os.path.join(self.urbansim_tmp, 'urbansim_cache')
        SimulationState().set_cache_directory(self.cache_dir)

        data = {
            1997:{
                'grid_id':array([1,2,3,4]),
                'is_in_development_type_group_developed':array([0,0,0,0]),
                },
            1998:{
                'grid_id':array([1,2,3,4]),
                'is_in_development_type_group_developed':array([0,1,0,0]),
            },
            1999:{
                'grid_id':array([1,2,3,4]),
                'is_in_development_type_group_developed':array([0,0,1,0])
            },
            2000:{
                'grid_id':array([1,2,3,4]),
                'is_in_development_type_group_developed':array([1,1,1,0])
            },
            2001:{
                'grid_id':array([1,2,3,4]),
                'is_in_development_type_group_developed':array([1,1,1,0])+1
            }
        }
        self.write_gridcell_data_to_cache(data)

        storage = StorageFactory().get_storage('dict_storage')
        storage.write_table(
            table_name='urbansim_constants',
            table_data={
                'recent_years': array([recent_years]),
            }
        )

        SimulationState().set_current_time(2001)
        attribute_cache = AttributeCache()
        SessionConfiguration(new_instance=True,
                             package_order=['urbansim'],
                             in_storage=attribute_cache)
        dataset_pool = DatasetPool(package_order=['urbansim'],
                                   storage=attribute_cache)

        # Can't write urbansim_constant, so directly add it to the pool.
        temp_dataset_pool = DatasetPool(package_order=['urbansim'],
                                        storage=storage)
        dataset_pool._add_dataset('urbansim_constant',
                                  temp_dataset_pool.get_dataset('urbansim_constant'))
        return dataset_pool
 def test_aggregate_all(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
         table_data={'my_variable': array([4,8,0.5,1]), 'id': array([1,2,3,4])})
     storage.write_table(table_name='regions',
         table_data={'id': array([1])})
     ds = Dataset(in_storage=storage, in_table_name='zones', id_name="id", dataset_name="myzone")
     ds2 = Dataset(in_storage=storage, in_table_name='regions', id_name="id", dataset_name="myregion")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds)
     dataset_pool._add_dataset('myregion', ds2)
     ds2.compute_variables(["myvar = myregion.aggregate_all(myzone.my_variable)"], dataset_pool=dataset_pool)         
     values = ds2.get_attribute("myvar")
     should_be = array([13.5])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in aggregate_all")
    def test_my_inputs( self ):
        storage = StorageFactory().get_storage('dict_storage')

        storage.write_table(
            table_name='jobs',
            table_data={
                'job_id': array([1,2,3,4]),
                'sector_id': array([1, 3, 2, 3]),
            }
        )
        storage.write_table(
            table_name='employment_sectors',
            table_data={
                'sector_id': array([1,2]),
                'name': array(["basic", "retail"]),
            }
        )
        storage.write_table(
            table_name='employment_adhoc_sector_groups',
            table_data={
                'group_id': array([1,2]),
                'name': array(["basic", "retail"]),
            }
        )
        storage.write_table(
            table_name='employment_adhoc_sector_group_definitions',
            table_data={
                'sector_id': array([1,2]),
                'group_id': array([1,2]),
            }
        )

        dataset_pool = DatasetPool(package_order=['urbansim'],
                                   storage=storage)
        dataset_pool._add_dataset('employment_sector',
                                 mock_employmentsector())

        job = dataset_pool.get_dataset('job')
        job.compute_variables(self.variable_name,
                              dataset_pool=dataset_pool)
        values = job.get_attribute(self.variable_name)

        should_be = array( [True, False, False, False] )

        self.assert_(ma.allequal(values, should_be),
                     msg="Error in " + self.variable_name)
 def test_aggregate_sum(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
         table_data={
             'my_variable':array([4,8,0.5,1]), 
             'id':array([1,2,3,4]),
             'id2':array([1,2,1,2]),
             }
         )
     storage.write_table(table_name='faz', table_data={"id2":array([1,2])})
     ds = Dataset(in_storage=storage, in_table_name='zones', id_name="id", dataset_name="myzone")
     ds2 = Dataset(in_storage=storage, in_table_name='faz', id_name="id2", dataset_name="myfaz")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     values = ds2.compute_variables(['myfaz.aggregate(myzone.my_variable, function=sum)'], dataset_pool=dataset_pool)
     should_be = array([4.5, 9]) 
     self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in aggregate_sum")
 def test_interaction_set_disaggregate(self):
     # Test doing a disaggregate on an interaction set component.  The interaction set is
     # test_agent_x_test_location, and test_location will be disaggregated from faz.
     # (It would be nicer if test_location were called zone, but we wanted to use the existing
     # test interactions set.)
     expr = "test_location.disaggregate(10.0*myfaz.my_variable)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='test_agents',
                         table_data={
                             'id': array([1, 2, 3]),
                             'income': array([1, 20, 500])
                         })
     storage.write_table(table_name='test_locations',
                         table_data={
                             'id': array([1, 2, 3, 4]),
                             'id2': array([1, 2, 1, 2])
                         })
     storage.write_table(table_name='faz',
                         table_data={
                             'my_variable': array([4, 8]),
                             'id2': array([1, 2])
                         })
     ds = Dataset(in_storage=storage,
                  in_table_name='test_locations',
                  id_name="id",
                  dataset_name="test_location")
     ds2 = Dataset(in_storage=storage,
                   in_table_name='faz',
                   id_name="id2",
                   dataset_name="myfaz")
     dataset_pool = DatasetPool(package_order=['opus_core'],
                                storage=storage)
     dataset_pool._add_dataset('test_location', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     test_agent_x_test_location = dataset_pool.get_dataset(
         'test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(
         expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4, 8, 4, 8]
     should_be = array([[40, 80, 40, 80], [40, 80, 40, 80],
                        [40, 80, 40, 80]])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6),
                  "Error in interaction_set_disaggregate")
 def test_interaction_set_number_of_agents2(self):
     # Similar to test_interaction_set_number_of_agents, except that it uses test_location_x_test_agent
     expr = "test_location.number_of_agents(myjob)"
     storage = StorageFactory().get_storage("dict_storage")
     storage.write_table(
         table_name="test_agents", table_data={"id": array([1, 2, 3]), "income": array([1, 20, 500])}
     )
     storage.write_table(table_name="test_locations", table_data={"id": array([1, 2, 3])})
     storage.write_table(table_name="jobs", table_data={"jid": arange(4) + 1, "id": array([2, 1, 3, 1])})
     ds = Dataset(in_storage=storage, in_table_name="test_locations", id_name="id", dataset_name="test_location")
     jobs = Dataset(in_storage=storage, in_table_name="jobs", id_name="jid", dataset_name="myjob")
     dataset_pool = DatasetPool(package_order=["opus_core"], storage=storage)
     dataset_pool._add_dataset("test_location", ds)
     dataset_pool._add_dataset("myjob", jobs)
     test_agent_x_test_location = dataset_pool.get_dataset("test_location_x_test_agent")
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [2, 1, 1]
     should_be = array([[2, 2, 2], [1, 1, 1], [1, 1, 1]])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in test_interaction_set_number_of_agents2")
Exemplo n.º 49
0
 def test_interaction_set_disaggregate_and_multiply(self):
     # test doing a disaggregate on an interaction set component and using the result in a multiply operation
     # this is the same as test_interaction_set_disaggregate except that we multiply by test_agent.income
     expr = "test_agent.income * test_location.disaggregate(myfaz.my_variable)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='test_agents',
                         table_data={
                             'id': array([1, 2, 3]),
                             'income': array([1, 20, 500])
                         })
     storage.write_table(table_name='test_locations',
                         table_data={
                             'id': array([1, 2, 3, 4]),
                             'id2': array([1, 2, 1, 2])
                         })
     storage.write_table(table_name='faz',
                         table_data={
                             'my_variable': array([4, 8]),
                             'id2': array([1, 2])
                         })
     ds = Dataset(in_storage=storage,
                  in_table_name='test_locations',
                  id_name="id",
                  dataset_name="test_location")
     ds2 = Dataset(in_storage=storage,
                   in_table_name='faz',
                   id_name="id2",
                   dataset_name="myfaz")
     dataset_pool = DatasetPool(package_order=['opus_core'],
                                storage=storage)
     dataset_pool._add_dataset('test_location', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     test_agent_x_test_location = dataset_pool.get_dataset(
         'test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(
         expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4, 8, 4, 8]
     should_be = array([[4, 8, 4, 8], [80, 160, 80, 160],
                        [2000, 4000, 2000, 4000]])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6),
                  "Error in interaction_set_disaggregate_and_multiply")
 def test_aggregate_sum_two_levels(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
                         table_data={
                             'my_variable':
                             array([4, 8, 2, 1, 40, 23, 78, 20, 25]),
                             'id0':
                             arange(9) + 1,
                             'id1':
                             array([1, 3, 1, 2, 3, 2, 1, 4, 4])
                         })
     storage.write_table(table_name='fazes',
                         table_data={
                             'id1': array([1, 2, 3, 4]),
                             'id2': array([1, 2, 1, 3])
                         })
     storage.write_table(table_name='fazdistrs',
                         table_data={
                             'id2': array([1, 2, 3]),
                             'id3': array([1, 2, 1])
                         })
     storage.write_table(table_name='neighborhoods',
                         table_data={"id3": array([1, 2])})
     ds0 = Dataset(in_storage=storage,
                   in_table_name='zones',
                   id_name="id0",
                   dataset_name="myzone")
     ds1 = Dataset(in_storage=storage,
                   in_table_name='fazes',
                   id_name="id1",
                   dataset_name="myfaz")
     ds2 = Dataset(in_storage=storage,
                   in_table_name='fazdistrs',
                   id_name="id2",
                   dataset_name="myfazdistr")
     ds3 = Dataset(in_storage=storage,
                   in_table_name='neighborhoods',
                   id_name="id3",
                   dataset_name="myneighborhood")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds0)
     dataset_pool._add_dataset('myfaz', ds1)
     dataset_pool._add_dataset('myfazdistr', ds2)
     dataset_pool._add_dataset('myneighborhood', ds3)
     values = ds3.compute_variables([
         'myneighborhood.aggregate(myzone.my_variable, intermediates=[myfaz,myfazdistr], function=sum)'
     ],
                                    dataset_pool=dataset_pool)
     should_be = array([177, 24])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6),
                  "Error in aggregate_sum_two_levels")
 def test_interaction_set_disaggregate2(self):
     # Similar to test_interaction_set_disaggregate, except that it uses test_location_x_test_agent
     expr = "test_location.disaggregate(myfaz.my_variable)"
     storage = StorageFactory().get_storage("dict_storage")
     storage.write_table(
         table_name="test_agents", table_data={"id": array([1, 2, 3]), "income": array([1, 20, 500])}
     )
     storage.write_table(
         table_name="test_locations", table_data={"id": array([1, 2, 3, 4]), "id2": array([1, 2, 1, 2])}
     )
     storage.write_table(table_name="faz", table_data={"my_variable": array([4, 8]), "id2": array([1, 2])})
     ds = Dataset(in_storage=storage, in_table_name="test_locations", id_name="id", dataset_name="test_location")
     ds2 = Dataset(in_storage=storage, in_table_name="faz", id_name="id2", dataset_name="myfaz")
     dataset_pool = DatasetPool(package_order=["opus_core"], storage=storage)
     dataset_pool._add_dataset("test_location", ds)
     dataset_pool._add_dataset("myfaz", ds2)
     test_agent_x_test_location = dataset_pool.get_dataset("test_location_x_test_agent")
     result = test_agent_x_test_location.compute_variables(expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4, 8, 4, 8]
     should_be = array([[4, 4, 4], [8, 8, 8], [4, 4, 4], [8, 8, 8]])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6), "Error in interaction_set_disaggregate2")
 def test_aggregate_two_datasets(self):
     # test aggregating an expression involving two different datasets (error should be caught in parse)
     expr = 'myfazdistr.aggregate(myzone.my_variable + myfaz.id2, intermediates=[myfaz])'
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
         table_data={
             'my_variable':array([4,8,2,1,40,23,78]), 
             'id0':arange(7)+1,
             'id1':array([1,3,1,2,3,2,1])
             }
         )
     storage.write_table(table_name='fazes',
         table_data={
             'id1':array([1,2,3]), 
             'id2':array([1,2,1])
             }
         )
     storage.write_table(table_name='fazdistr',
         table_data={
             'id2':array([1,2])
             }
         )
     ds0 = Dataset(in_storage=storage, in_table_name='zones', id_name="id0", dataset_name="myzone")
     ds1 = Dataset(in_storage=storage, in_table_name='fazes', id_name="id1", dataset_name="myfaz")             
     ds2 = Dataset(in_storage=storage, in_table_name='fazdistr', id_name="id2", dataset_name="myfazdistr")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds0)
     dataset_pool._add_dataset('myfaz',ds1)
     dataset_pool._add_dataset('myfazdistr',ds2)
     self.assertRaises(ValueError, ds2.compute_variables, [expr], dataset_pool=dataset_pool)
 def test_disaggregate_one_level(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
         table_data={
             'id0':arange(7)+1,
             'id1':array([1,3,1,2,3,2,1])
             }
         )
     storage.write_table(table_name='fazes',
         table_data={
             'id1':array([1,2,3]),
             'id2':array([1,2,1])
             }
         )
     storage.write_table(table_name='fazdistr',
         table_data={
             'my_variable':array([40,50]), 
             'id2':array([1,2])
             }
         )
     ds0 = Dataset(in_storage=storage, in_table_name='zones', id_name="id0", dataset_name="myzone")
     ds1 = Dataset(in_storage=storage, in_table_name='fazes', id_name="id1", dataset_name="myfaz")             
     ds2 = Dataset(in_storage=storage, in_table_name='fazdistr', id_name="id2", dataset_name="myfazdistr")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds0)
     dataset_pool._add_dataset('myfaz', ds1)
     dataset_pool._add_dataset('myfazdistr', ds2)
     values = ds0.compute_variables(["myzone.disaggregate(myfazdistr.my_variable, intermediates=[myfaz])"], dataset_pool=dataset_pool)
     should_be = array([40, 40, 40, 50, 40,50, 40])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in disaggregate_one_level") 
 def test_disaggregate_and_multiply(self):
     # Perform two different disaggregations and multiply the results.  This tests using a dataset name in both the
     # list of intermediates and as the dataset being disaggregated (myfaz in this case).
     expr = "myzone.disaggregate(myfaz.fazsqft) * myzone.disaggregate(myfazdistr.my_variable, intermediates=[myfaz])"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
         table_data={
             'id0':arange(7)+1,
             'id1':array([1,3,1,2,3,2,1])
             }
         )
     storage.write_table(table_name='fazes',
         table_data={
             'id1':array([1,2,3]),
             'id2':array([1,2,1]),
             'fazsqft':array([10,50,100])
             }
         )
     storage.write_table(table_name='fazdistrs',
         table_data={
             'my_variable':array([40,50]), 
             'id2':array([1,2])
             }
         )
     ds0 = Dataset(in_storage=storage, in_table_name='zones', id_name="id0", dataset_name="myzone")
     ds1 = Dataset(in_storage=storage, in_table_name='fazes', id_name="id1", dataset_name="myfaz")             
     ds2 = Dataset(in_storage=storage, in_table_name='fazdistrs', id_name="id2", dataset_name="myfazdistr")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds0)
     dataset_pool._add_dataset('myfaz', ds1)
     dataset_pool._add_dataset('myfazdistr', ds2)
     values = ds0.compute_variables([expr], dataset_pool=dataset_pool)
     should_be = array([400, 4000, 400, 2500, 4000, 2500, 400])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in disaggregate_and_multiply")
Exemplo n.º 55
0
 def test_interaction_set_aggregate(self):
     # Test doing an aggregate on an interaction set component.  The interaction set is
     # test_agent_x_test_location, and test_location will be aggregated from gridcell.
     # (It would be nicer if test_location were called zone, but we wanted to use the existing
     # test interactions set.)
     expr = "test_location.aggregate(gridcell.my_variable)"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='test_agents',
                         table_data={
                             'id': array([1, 2, 3]),
                             'income': array([1, 20, 500])
                         })
     storage.write_table(table_name='test_locations',
                         table_data={'location_id': array([1, 2])})
     storage.write_table(table_name='gridcells',
                         table_data={
                             'my_variable': array([4, 8, 0.5, 1]),
                             'grid_id': array([1, 2, 3, 4]),
                             'location_id': array([1, 2, 1, 2]),
                         })
     location_dataset = Dataset(in_storage=storage,
                                in_table_name='test_locations',
                                id_name="location_id",
                                dataset_name="test_location")
     gridcell_dataset = Dataset(in_storage=storage,
                                in_table_name='gridcells',
                                id_name="grid_id",
                                dataset_name='gridcell')
     dataset_pool = DatasetPool(package_order=['opus_core'],
                                storage=storage)
     dataset_pool._add_dataset('test_location', location_dataset)
     dataset_pool._add_dataset('gridcell', gridcell_dataset)
     test_agent_x_test_location = dataset_pool.get_dataset(
         'test_agent_x_test_location')
     result = test_agent_x_test_location.compute_variables(
         expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4.5, 9]
     should_be = array([[4.5, 9], [4.5, 9], [4.5, 9]])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6),
                  "Error in interaction_set_aggregate")
Exemplo n.º 56
0
 def test_interaction_set_disaggregate_and_multiply2(self):
     # Similar to test_interaction_set_disaggregate_and_multiply, except that it uses test_location_x_test_agent
     expr = "test_location.disaggregate(myfaz.my_variable) * test_agent.income"
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='test_agents',
                         table_data={
                             'id': array([1, 2, 3]),
                             'income': array([1, 20, 500])
                         })
     storage.write_table(table_name='test_locations',
                         table_data={
                             'id': array([1, 2, 3, 4]),
                             'id2': array([1, 2, 1, 2])
                         })
     storage.write_table(table_name='faz',
                         table_data={
                             'my_variable': array([4, 8]),
                             'id2': array([1, 2])
                         })
     ds = Dataset(in_storage=storage,
                  in_table_name='test_locations',
                  id_name="id",
                  dataset_name="test_location")
     ds2 = Dataset(in_storage=storage,
                   in_table_name='faz',
                   id_name="id2",
                   dataset_name="myfaz")
     dataset_pool = DatasetPool(package_order=['opus_core'],
                                storage=storage)
     dataset_pool._add_dataset('test_location', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     test_agent_x_test_location = dataset_pool.get_dataset(
         'test_location_x_test_agent')
     result = test_agent_x_test_location.compute_variables(
         expr, dataset_pool=dataset_pool)
     # result for just test_location would be [4, 8, 4, 8]
     should_be = array([[4, 80, 2000], [8, 160, 4000], [4, 80, 2000],
                        [8, 160, 4000]])
     self.assert_(ma.allclose(result, should_be, rtol=1e-6),
                  "Error in interaction_set_disaggregate_and_multiply2")
 def test_disaggregate_two_levels(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='gridcells',
                         table_data={
                             'id': arange(9) + 1,
                             'id0': array([7, 6, 1, 3, 4, 4, 5, 2, 5])
                         })
     storage.write_table(table_name='zones',
                         table_data={
                             'id0': arange(7) + 1,
                             'id1': array([1, 3, 1, 2, 3, 2, 1])
                         })
     storage.write_table(table_name='fazes',
                         table_data={
                             'id1': array([1, 2, 3]),
                             'id2': array([1, 2, 1])
                         })
     storage.write_table(table_name='fazdistrs',
                         table_data={
                             'my_variable': array([40, 50]),
                             'id2': array([1, 2])
                         })
     ds = Dataset(in_storage=storage,
                  in_table_name='gridcells',
                  id_name="id0",
                  dataset_name="mygridcell")
     ds0 = Dataset(in_storage=storage,
                   in_table_name='zones',
                   id_name="id0",
                   dataset_name="myzone")
     ds1 = Dataset(in_storage=storage,
                   in_table_name='fazes',
                   id_name="id1",
                   dataset_name="myfaz")
     ds2 = Dataset(in_storage=storage,
                   in_table_name='fazdistrs',
                   id_name="id2",
                   dataset_name="myfazdistr")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('mygridcell', ds)
     dataset_pool._add_dataset('myzone', ds0)
     dataset_pool._add_dataset('myfaz', ds1)
     dataset_pool._add_dataset('myfazdistr', ds2)
     values = ds.compute_variables([
         "mygridcell.disaggregate(myfazdistr.my_variable, intermediates=[myfaz,myzone])"
     ],
                                   dataset_pool=dataset_pool)
     should_be = array([40, 50, 40, 40, 50, 50, 40, 40, 40])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6),
                  "Error in disaggregate_two_levels")
 def test_disaggregate(self):
     storage = StorageFactory().get_storage('dict_storage')
     storage.write_table(table_name='zones',
         table_data={
             'id':array([1,2,3,4]),
             'id2':array([1,2,1,2])
             }
         )
     storage.write_table(table_name='faz',
         table_data={
             'my_variable':array([4,8]), 
             'id2':array([1,2])
             }
         )
     ds = Dataset(in_storage=storage, in_table_name='zones', id_name="id", dataset_name="myzone")
     ds2 = Dataset(in_storage=storage, in_table_name='faz', id_name="id2", dataset_name="myfaz")
     dataset_pool = DatasetPool()
     dataset_pool._add_dataset('myzone', ds)
     dataset_pool._add_dataset('myfaz', ds2)
     values = ds.compute_variables(["myzone.disaggregate(10.0*myfaz.my_variable)"], dataset_pool=dataset_pool)
     should_be = array([40, 80, 40, 80])
     self.assert_(ma.allclose(values, should_be, rtol=1e-6), "Error in disaggregate")