def helper_test_float_type(expected): db = _dbf_class(self.storage._get_file_path_for_table(self.out_table_name)) for name, type, length, decimalcount in [field.fieldInfo() for field in db.header.fields]: self.assertEqual('FLOAT', name) self.assertEqual('F', type) self.assertEqual(18, length) self.assertEqual(expected, decimalcount) db.close()
def test(self): # Set up a test cache storage = AttributeCache(cache_directory=self._temp_dir) SimulationState().set_current_time(2000) table_name = 'foo' values = { 'attribute1': array([1, 2, 3], dtype=int32), 'attribute2': array([4, 5, 6], dtype=int32), } storage.write_table(table_name, values) table_dir = os.path.join(self._temp_dir, '2000', table_name) self.assert_(os.path.exists(table_dir)) actual = set(os.listdir(table_dir)) expected = set([ 'attribute1.%(endian)si4' % replacements, 'attribute2.%(endian)si4' % replacements ]) self.assertEqual(expected, actual) exporter = ExportCacheToDbfTableCommand( cache_directory=self._temp_dir, year='2000', table_name=table_name, dbf_directory=self._temp_dir, decimalcount=4, ) exporter.execute() out_storage = dbf_storage(self._temp_dir) db = _dbf_class(out_storage._get_file_path_for_table(table_name)) length = max([len(values[key]) for key in values.keys()]) i = 0 field_type = {} for name, type in [ field.fieldInfo()[:2] for field in db.header.fields ]: field_type[name] = type for rec in db: for key in values.keys(): if field_type[key.upper()] is 'F': self.assertAlmostEqual(values[key][i], rec[key], 4) else: self.assertEqual(values[key][i], rec[key]) i = i + 1 self.assertEquals( length, i, msg="More values expected than the dbf file contains") db.close()
def helper_test_float_type(expected): db = _dbf_class( self.storage._get_file_path_for_table(self.out_table_name)) for name, type, length, decimalcount in [ field.fieldInfo() for field in db.header.fields ]: self.assertEqual('FLOAT', name) self.assertEqual('F', type) self.assertEqual(18, length) self.assertEqual(expected, decimalcount) db.close()
def write_table(self, table_name, table_data, mode=Storage.OVERWRITE): #TODO: implement Storage.APPEND for dbfstore if mode != Storage.OVERWRITE: raise 'dbf_storage does not support anything except Storage.OVERWRITE' if not os.path.exists(self._directory): os.makedirs(self._directory) dbf_file_name = self._get_file_path_for_table(table_name) db = _dbf_class(dbf_file_name, new=True) number_of_rows, column_names = self._get_column_size_and_names( table_data) short_names = self._make_unique_names_list(column_names) for key in column_names: type = self.__NUMPY_TYPES_TO_DBFPY_TYPES[ table_data[key].dtype.char] if type == 'L': db.addField((short_names[key], type)) else: if type == 'N': digits = (18, ) ### TODO: Calculate actual digits needed. elif type == 'F': digits = ( 18, self._digits_to_right_of_decimal ) ### TODO: Calculate actual decimal places needed. elif type == 'C': digits = (table_data[key].dtype.itemsize, ) db.addField((short_names[key], type) + digits) for i in range(number_of_rows): rec = db.newRecord() for key in column_names: if str(table_data[key].dtype.char) == '?': # bool8 if table_data[key][i]: rec[short_names[key]] = True else: rec[short_names[key]] = False else: rec[short_names[key]] = table_data[key][i] rec.store() db.close() self._short_names = short_names
def test(self): # Set up a test cache storage = AttributeCache(cache_directory=self._temp_dir) SimulationState().set_current_time(2000) table_name = 'foo' values = { 'attribute1': array([1,2,3], dtype=int32), 'attribute2': array([4,5,6], dtype=int32), } storage.write_table(table_name, values) table_dir = os.path.join(self._temp_dir, '2000', table_name) self.assert_(os.path.exists(table_dir)) actual = set(os.listdir(table_dir)) expected = set(['attribute1.%(endian)si4' % replacements, 'attribute2.%(endian)si4' % replacements]) self.assertEqual(expected, actual) exporter = ExportCacheToDbfTableCommand( cache_directory = self._temp_dir, year = '2000', table_name = table_name, dbf_directory = self._temp_dir, decimalcount = 4, ) exporter.execute() out_storage = dbf_storage(self._temp_dir) db = _dbf_class(out_storage._get_file_path_for_table(table_name)) length = max([len(values[key]) for key in values.keys()]) i = 0 field_type = {} for name, type in [field.fieldInfo()[:2] for field in db.header.fields]: field_type[name] = type for rec in db: for key in values.keys(): if field_type[key.upper()] is 'F': self.assertAlmostEqual(values[key][i], rec[key], 4) else: self.assertEqual(values[key][i], rec[key]) i = i + 1 self.assertEquals(length, i, msg="More values expected than the dbf file contains") db.close()
def write_table(self, table_name, table_data, mode = Storage.OVERWRITE): #TODO: implement Storage.APPEND for dbfstore if mode != Storage.OVERWRITE: raise 'dbf_storage does not support anything except Storage.OVERWRITE' if not os.path.exists(self._directory): os.makedirs(self._directory) dbf_file_name = self._get_file_path_for_table(table_name) db = _dbf_class(dbf_file_name, new=True) number_of_rows, column_names = self._get_column_size_and_names(table_data) short_names = self._make_unique_names_list(column_names) for key in column_names: type = self.__NUMPY_TYPES_TO_DBFPY_TYPES[table_data[key].dtype.char] if type == 'L': db.addField( (short_names[key], type) ) else: if type == 'N': digits = (18, ) ### TODO: Calculate actual digits needed. elif type == 'F': digits = (18, self._digits_to_right_of_decimal) ### TODO: Calculate actual decimal places needed. elif type == 'C': digits = (table_data[key].dtype.itemsize, ) db.addField( (short_names[key], type) + digits ) for i in range(number_of_rows): rec = db.newRecord() for key in column_names: if str(table_data[key].dtype.char) == '?': # bool8 if table_data[key][i]: rec[short_names[key]] = True else: rec[short_names[key]] = False else: rec[short_names[key]] = table_data[key][i] rec.store() db.close() self._short_names = short_names
def helper_test_dbf_file(self, values, expected=None): if expected == None: expected = values db = _dbf_class(self.storage._get_file_path_for_table(self.out_table_name)) length = max([len(values[key]) for key in values.keys()]) i = 0 field_type = {} for name, type in [field.fieldInfo()[:2] for field in db.header.fields]: field_type[name] = type for rec in db: for key in expected.keys(): if field_type[key.upper()] is 'F': self.assertAlmostEqual(expected[key][i], rec[key]) else: self.assertEqual(expected[key][i], rec[key]) i = i + 1 self.assertEquals(length, i, msg="More values expected than the dbf file contains") db.close()
def helper_test_dbf_file(self, values, expected=None): if expected == None: expected = values db = _dbf_class( self.storage._get_file_path_for_table(self.out_table_name)) length = max([len(values[key]) for key in values.keys()]) i = 0 field_type = {} for name, type in [ field.fieldInfo()[:2] for field in db.header.fields ]: field_type[name] = type for rec in db: for key in expected.keys(): if field_type[key.upper()] is 'F': self.assertAlmostEqual(expected[key][i], rec[key]) else: self.assertEqual(expected[key][i], rec[key]) i = i + 1 self.assertEquals( length, i, msg="More values expected than the dbf file contains") db.close()