def test_compute_a_variable(self): """Return merged dataset for this set of years.""" test_data = { 1000: { 'tests': { 'id': array([1, 2, 3]), 'attr1': array([10, 20, 30]), }, }, 1001: { 'tests': { 'id': array([1, 2, 3]), 'attr1': array([40, 50, 60]), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self.temp_dir, test_data) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=['opus_core'], in_storage=attribute_cache) ds = MultipleYearDatasetView( name_of_dataset_to_merge='test', in_table_name='tests', years_to_merge=[1000, 1001], attribute_cache=attribute_cache, ) ds.compute_variables(['opus_core.test.attr1_times_2'])
def test_simple_lag_variable(self): test_data = { 1000:{ 'tests':{ 'id':array([1,2,3]), 'attr1':array([10,20,30]), }, }, 1001:{ 'tests':{ 'id':array([1,2,3]), 'attr1':array([111,222,333]), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self._temp_dir, test_data) SimulationState().set_current_time(1001) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=['opus_core'], in_storage=attribute_cache) ds = Dataset(in_storage = attribute_cache, in_table_name = 'tests', id_name = ['id'], dataset_name = 'tests') ds.compute_variables(['opus_core.tests.attr1']) self.assert_(ma.allequal(ds.get_attribute('attr1'), array([111,222,333]))) ds.compute_variables(['opus_core.tests.attr1_lag1']) self.assert_(ma.allequal(ds.get_attribute('attr1_lag1'), array([10,20,30])))
def test_with_different_ids_and_different_attributes_each_year(self): years_to_merge = [1000,1001] test_data = { 1000:{ 'tests':{ 'id':array([1,2,3]), 'attr1':array([10,20,30]), 'attr2':array([100,200,300]), }, }, 1001:{ 'tests':{ 'id':array([4,5,6]), 'attr1':array([11,21,31]), 'attr4':array([14,24,34]), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self.temp_dir, test_data) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=['opus_core'], in_storage=attribute_cache) # This should fail, since the set of primary attributes are different in the different years. self.assertRaises(AttributeError, MultipleYearDatasetView, name_of_dataset_to_merge = 'test', in_table_name = 'tests', attribute_cache = attribute_cache, years_to_merge = years_to_merge, )
def test_compute_a_variable(self): """Return merged dataset for this set of years.""" test_data = { 1000:{ 'tests':{ 'id':array([1,2,3]), 'attr1':array([10,20,30]), }, }, 1001:{ 'tests':{ 'id':array([1,2,3]), 'attr1':array([40,50,60]), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self.temp_dir, test_data) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=['opus_core'], in_storage=attribute_cache) ds = MultipleYearDatasetView( name_of_dataset_to_merge = 'test', in_table_name = 'tests', years_to_merge = [1000,1001], attribute_cache = attribute_cache, ) ds.compute_variables(['opus_core.test.attr1_times_2'])
def setUp(self): self.db_config = TestDatabaseConfiguration(protocol = self.protocol) self.db_config_node = self.db_config._database_configuration_node() self.db_server = DatabaseServer(self.db_config) self.test_db = 'OpusDatabaseTestDatabase' self.export_from_cache_opus_path = "opus_core.tools.do_export_cache_to_sql" self.export_to_cache_opus_path = "opus_core.tools.do_export_sql_to_cache" self.year = 1000 self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp') self.test_data = { self.year:{ 'table_a':{ 'tablea_id':array([1,2,3]), 'tablea_id_name': array(['1','2','3']), 'value1': array([1.0, 2.001, 3], dtype='float'), 'value2': array([True, False, False], dtype='i'), ## sqlit is having problem handling bool type }, 'table_b':{ 'tableb_id':array([1,2,3]), 'tableb_id_name': array(['one','two','three']), 'value3': array([1.0, 2.001, 3], dtype='float'), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self.temp_dir, self.test_data)
def setUp(self): self.db_config = TestDatabaseConfiguration(protocol=self.protocol) self.db_config_node = self.db_config._database_configuration_node() self.db_server = DatabaseServer(self.db_config) self.test_db = 'OpusDatabaseTestDatabase' self.export_from_cache_opus_path = "opus_core.tools.do_export_cache_to_sql" self.export_to_cache_opus_path = "opus_core.tools.do_export_sql_to_cache" self.year = 1000 self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp') self.test_data = { self.year: { 'table_a': { 'tablea_id': array([1, 2, 3]), 'tablea_id_name': array(['1', '2', '3']), 'value1': array([1.0, 2.001, 3], dtype='float'), 'value2': array([True, False, False], dtype='i' ), ## sqlit is having problem handling bool type }, 'table_b': { 'tableb_id': array([1, 2, 3]), 'tableb_id_name': array(['one', 'two', 'three']), 'value3': array([1.0, 2.001, 3], dtype='float'), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self.temp_dir, self.test_data)
def _create_data_with_different_ids_each_year(self, years_to_merge): """Return merged dataset for this set of years.""" test_data = { 1000: { 'tests': { 'id': array([1, 2, 3]), 'attr1': array([10, 20, 30]), 'attr2': array([100, 200, 300]), 'attr3': array([1000, 2000, 3000]), }, }, 1001: { 'tests': { 'id': array([3, 4, 5]), 'attr1': array([11, 21, 31]), 'attr2': array([111, 211, 311]), 'attr3': array([1111, 2111, 3111]), }, }, 1002: { 'tests': { 'id': array([3, 6, 7]), 'attr1': array([12, 22, 32]), 'attr2': array([122, 222, 322]), 'attr3': array([1222, 2222, 3222]), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self.temp_dir, test_data) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=['opus_core'], in_storage=attribute_cache) ds = MultipleYearDatasetView( name_of_dataset_to_merge='test', in_table_name='tests', years_to_merge=years_to_merge, attribute_cache=attribute_cache, ) return ds
def _create_data_with_different_ids_each_year(self, years_to_merge): """Return merged dataset for this set of years.""" test_data = { 1000:{ 'tests':{ 'id':array([1,2,3]), 'attr1':array([10,20,30]), 'attr2':array([100,200,300]), 'attr3':array([1000,2000,3000]), }, }, 1001:{ 'tests':{ 'id':array([3,4,5]), 'attr1':array([11,21,31]), 'attr2':array([111,211,311]), 'attr3':array([1111,2111,3111]), }, }, 1002:{ 'tests':{ 'id':array([3,6,7]), 'attr1':array([12,22,32]), 'attr2':array([122,222,322]), 'attr3':array([1222,2222,3222]), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self.temp_dir, test_data) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=['opus_core'], in_storage=attribute_cache) ds = MultipleYearDatasetView( name_of_dataset_to_merge = 'test', in_table_name = 'tests', years_to_merge = years_to_merge, attribute_cache = attribute_cache, ) return ds
def test_simple_lag_variable2(self): test_data = { 1000: {"tests": {"id": array([1, 2, 3, 4]), "attr1": array([10, 20, 30, 40])}}, 1001: {"tests": {"id": array([1, 2, 3, 5]), "attr1": array([111, 222, 333, 555])}}, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self._temp_dir, test_data) SimulationState().set_current_time(1001) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=["opus_core"], in_storage=attribute_cache) ds = Dataset(in_storage=attribute_cache, in_table_name="tests", id_name=["id"], dataset_name="tests") ds.compute_variables(["opus_core.tests.attr1"]) self.assert_(ma.allequal(ds.get_attribute("attr1"), array([111, 222, 333, 555]))) ds.compute_variables(["opus_core.tests.attr1_lag1"]) self.assert_(ma.allequal(ds.get_attribute("attr1_lag1"), array([10, 20, 30, 555])))
def test_simple_lag_variable2(self): test_data = { 1000: { 'tests': { 'id': array([1, 2, 3, 4]), 'attr1': array([10, 20, 30, 40]), }, }, 1001: { 'tests': { 'id': array([1, 2, 3, 5]), 'attr1': array([111, 222, 333, 555]), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self._temp_dir, test_data) SimulationState().set_current_time(1001) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=['opus_core'], in_storage=attribute_cache) ds = Dataset(in_storage=attribute_cache, in_table_name='tests', id_name=['id'], dataset_name='tests') ds.compute_variables(['opus_core.tests.attr1']) self.assert_( ma.allequal(ds.get_attribute('attr1'), array([111, 222, 333, 555]))) ds.compute_variables(['opus_core.tests.attr1_lag1']) self.assert_( ma.allequal(ds.get_attribute('attr1_lag1'), array([10, 20, 30, 555])))
def setUp(self): self.export_from_cache_opus_path = "opus_core.tools.do_export_cache_to_%s" % self.format self.export_to_cache_opus_path = "opus_core.tools.do_export_%s_to_cache" % self.format self.year = 1000 self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp') self.test_data = { self.year:{ 'table_a':{ 'id':array([1,2,3]), 'id_name': array(['1','2','3']), 'value1': array([1.0, 2.001, 3], dtype='float'), 'value2': array([True, False, False]), }, 'table_b':{ 'id':array([1,2,3]), 'id_name': array(['one','two','three']), 'value1': array([1.0, 2.001, 3], dtype='float'), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self.temp_dir, self.test_data)
def test_with_different_ids_and_different_attributes_each_year(self): years_to_merge = [1000, 1001] test_data = { 1000: { 'tests': { 'id': array([1, 2, 3]), 'attr1': array([10, 20, 30]), 'attr2': array([100, 200, 300]), }, }, 1001: { 'tests': { 'id': array([4, 5, 6]), 'attr1': array([11, 21, 31]), 'attr4': array([14, 24, 34]), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self.temp_dir, test_data) attribute_cache = AttributeCache() SessionConfiguration(new_instance=True, package_order=['opus_core'], in_storage=attribute_cache) # This should fail, since the set of primary attributes are different in the different years. self.assertRaises( AttributeError, MultipleYearDatasetView, name_of_dataset_to_merge='test', in_table_name='tests', attribute_cache=attribute_cache, years_to_merge=years_to_merge, )
def setUp(self): self.export_from_cache_opus_path = "opus_core.tools.do_export_cache_to_%s" % self.format self.export_to_cache_opus_path = "opus_core.tools.do_export_%s_to_cache" % self.format self.year = 1000 self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp') self.test_data = { self.year: { 'table_a': { 'id': array([1, 2, 3]), 'id_name': array(['1', '2', '3']), 'value1': array([1.0, 2.001, 3], dtype='float'), 'value2': array([True, False, False]), }, 'table_b': { 'id': array([1, 2, 3]), 'id_name': array(['one', 'two', 'three']), 'value1': array([1.0, 2.001, 3], dtype='float'), }, }, } cache_creator = CreateTestAttributeCache() cache_creator.create_attribute_cache_with_data(self.temp_dir, self.test_data)