def test_large_lookup(self): # Check more space is allocated for the lookups when a lot of blank # lookups are added. src_path = tests.get_data_path(('FF', 'ancillary', 'qrparm.mask')) with self.temp_filename() as temp_path: shutil.copyfile(src_path, temp_path) ffv = FieldsFileVariant(temp_path, FieldsFileVariant.UPDATE_MODE) field = ffv.fields[0] original_field_data = field.get_data() blank_int_headers = field.int_headers.copy() blank_real_headers = field.real_headers.copy() blank_int_headers[:] = -99 # The 'invalid' signature blank_real_headers[:] = 0.0 # Work out how many lookups fills a file 'sector'. lookups_per_sector = ffv._WORDS_PER_SECTOR / field.num_values() # Make a new fields list with many "blank" and one "real" field. n_blank_lookups = 2 * int(np.ceil(lookups_per_sector)) new_fields = [Field(int_headers=blank_int_headers, real_headers=blank_real_headers, data_provider=None) for _ in range(n_blank_lookups)] new_fields.append(field) ffv.fields = new_fields ffv.close() ffv = FieldsFileVariant(temp_path) self.assertEqual(len(ffv.fields), n_blank_lookups + 1) # Check that the data of the last ("real") field is correct. field = ffv.fields[-1] self.assertArrayEqual(field.get_data(), original_field_data)
def test_copy(self): # Checks that copying all the attributes to a new file # re-creates the original with minimal differences. src_path = tests.get_data_path(('FF', 'ancillary', 'qrparm.mask')) ffv_src = FieldsFileVariant(src_path, FieldsFileVariant.READ_MODE) with self.temp_filename() as temp_path: ffv_dest = FieldsFileVariant(temp_path, FieldsFileVariant.CREATE_MODE) ffv_dest.fixed_length_header = ffv_src.fixed_length_header for name, kind in FieldsFileVariant._COMPONENTS: setattr(ffv_dest, name, getattr(ffv_src, name)) ffv_dest.fields = ffv_src.fields ffv_dest.close() # Compare the files at a binary level. src = np.fromfile(src_path, dtype='>i8', count=-1) dest = np.fromfile(temp_path, dtype='>i8', count=-1) changed_indices = np.where(src != dest)[0] # Allow for acceptable differences. self.assertArrayEqual(changed_indices, [110, 111, 125, 126, 130, 135, 140, 142, 144, 160]) # All but the last difference is from the use of IMDI # instead of 1 to mark an unused dimension length. self.assertArrayEqual(dest[changed_indices[:-1]], [IMDI] * 9) # The last difference is to the length of the DATA component # because we've padded the last field. self.assertEqual(dest[160], 956416)
def test_copy(self): # Checks that copying all the attributes to a new file # re-creates the original with minimal differences. src_path = tests.get_data_path(('FF', 'ancillary', 'qrparm.mask')) ffv_src = FieldsFileVariant(src_path, FieldsFileVariant.READ_MODE) with self.temp_filename() as temp_path: ffv_dest = FieldsFileVariant(temp_path, FieldsFileVariant.CREATE_MODE) ffv_dest.fixed_length_header = ffv_src.fixed_length_header for name, kind in FieldsFileVariant._COMPONENTS: setattr(ffv_dest, name, getattr(ffv_src, name)) ffv_dest.fields = ffv_src.fields ffv_dest.close() # Compare the files at a binary level. src = np.fromfile(src_path, dtype='>i8', count=-1) dest = np.fromfile(temp_path, dtype='>i8', count=-1) changed_indices = np.where(src != dest)[0] # Allow for acceptable differences. self.assertArrayEqual( changed_indices, [110, 111, 125, 126, 130, 135, 140, 142, 144, 160]) # All but the last difference is from the use of IMDI # instead of 1 to mark an unused dimension length. self.assertArrayEqual(dest[changed_indices[:-1]], [IMDI] * 9) # The last difference is to the length of the DATA component # because we've padded the last field. self.assertEqual(dest[160], 956416)
def test_large_lookup(self): # Check more space is allocated for the lookups when a lot of blank # lookups are added. src_path = tests.get_data_path(('FF', 'ancillary', 'qrparm.mask')) with self.temp_filename() as temp_path: shutil.copyfile(src_path, temp_path) ffv = FieldsFileVariant(temp_path, FieldsFileVariant.UPDATE_MODE) field = ffv.fields[0] original_field_data = field.get_data() blank_int_headers = field.int_headers.copy() blank_real_headers = field.real_headers.copy() blank_int_headers[:] = -99 # The 'invalid' signature blank_real_headers[:] = 0.0 # Work out how many lookups fills a file 'sector'. lookups_per_sector = ffv._WORDS_PER_SECTOR / field.num_values() # Make a new fields list with many "blank" and one "real" field. n_blank_lookups = 2 * int(np.ceil(lookups_per_sector)) new_fields = [ Field(int_headers=blank_int_headers, real_headers=blank_real_headers, data_provider=None) for _ in range(n_blank_lookups) ] new_fields.append(field) ffv.fields = new_fields ffv.close() ffv = FieldsFileVariant(temp_path) self.assertEqual(len(ffv.fields), n_blank_lookups + 1) # Check that the data of the last ("real") field is correct. field = ffv.fields[-1] self.assertArrayEqual(field.get_data(), original_field_data)
def test_create(self): # Check we can create a new file from scratch, with the correct # cross-referencing automatically applied to the headers to # enable it to load again. with self.temp_filename() as temp_path: ffv = FieldsFileVariant(temp_path, FieldsFileVariant.CREATE_MODE) ffv.fixed_length_header = FixedLengthHeader([-1] * 256) ffv.fixed_length_header.data_set_format_version = 20 ffv.fixed_length_header.sub_model = 1 ffv.fixed_length_header.dataset_type = 3 constants = IMDI * np.ones(46, dtype=int) constants[5] = 4 constants[6] = 5 ffv.integer_constants = constants ints = IMDI * np.ones(45, dtype=int) ints[17] = 4 # LBROW ints[18] = 5 # LBNPT ints[20] = 0 # LBPACK ints[21] = 2 # LBREL ints[38] = 1 # LBUSER(1) reals = range(19) src_data = np.arange(20, dtype='f4').reshape((4, 5)) ffv.fields = [Field2(ints, reals, src_data)] ffv.close() ffv = FieldsFileVariant(temp_path) # Fill with -1 instead of IMDI so we can detect where IMDI # values are being automatically set. expected = -np.ones(256, dtype=int) expected[0] = 20 expected[1] = 1 expected[4] = 3 expected[99:101] = (257, 46) # Integer constants expected[104:106] = IMDI expected[109:112] = IMDI expected[114:117] = IMDI expected[119:122] = IMDI expected[124:127] = IMDI expected[129:131] = IMDI expected[134:136] = IMDI expected[139:145] = IMDI expected[149:152] = (303, 64, 1) # 303 = 256 + 46 + 1 expected[159:161] = (2049, 2048) # Compare using lists because we get more helpful error messages! self.assertEqual(list(ffv.fixed_length_header.raw), list(expected)) self.assertArrayEqual(ffv.integer_constants, constants) self.assertIsNone(ffv.real_constants) self.assertEqual(len(ffv.fields), 1) for field in ffv.fields: data = field.get_data() self.assertArrayEqual(data, src_data)
def test_create(self): # Check we can create a new file from scratch, with the correct # cross-referencing automatically applied to the headers to # enable it to load again. with self.temp_filename() as temp_path: ffv = FieldsFileVariant(temp_path, FieldsFileVariant.CREATE_MODE) ffv.fixed_length_header = FixedLengthHeader([-1] * 256) ffv.fixed_length_header.data_set_format_version = 20 ffv.fixed_length_header.sub_model = 1 ffv.fixed_length_header.dataset_type = 3 constants = IMDI * np.ones(46, dtype=int) constants[5] = 4 constants[6] = 5 ffv.integer_constants = constants ints = IMDI * np.ones(45, dtype=int) ints[17] = 4 # LBROW ints[18] = 5 # LBNPT ints[20] = 0 # LBPACK ints[21] = 2 # LBREL ints[38] = 1 # LBUSER(1) reals = list(range(19)) src_data = np.arange(20, dtype='f4').reshape((4, 5)) ffv.fields = [Field2(ints, reals, src_data)] ffv.close() ffv = FieldsFileVariant(temp_path) # Fill with -1 instead of IMDI so we can detect where IMDI # values are being automatically set. expected = -np.ones(256, dtype=int) expected[0] = 20 expected[1] = 1 expected[4] = 3 expected[99:101] = (257, 46) # Integer constants expected[104:106] = IMDI expected[109:112] = IMDI expected[114:117] = IMDI expected[119:122] = IMDI expected[124:127] = IMDI expected[129:131] = IMDI expected[134:136] = IMDI expected[139:145] = IMDI expected[149:152] = (303, 64, 1) # 303 = 256 + 46 + 1 expected[159:161] = (2049, 2048) # Compare using lists because we get more helpful error messages! self.assertEqual(list(ffv.fixed_length_header.raw), list(expected)) self.assertArrayEqual(ffv.integer_constants, constants) self.assertIsNone(ffv.real_constants) self.assertEqual(len(ffv.fields), 1) for field in ffv.fields: data = field.get_data() self.assertArrayEqual(data, src_data)
def test_fail_save_with_packing(self): # Check that trying to save data as packed causes an error. src_path = tests.get_data_path(('FF', 'n48_multi_field')) with self.temp_filename() as temp_path: # Make a copy and open for UPDATE. shutil.copyfile(src_path, temp_path) ffv = FieldsFileVariant(temp_path, FieldsFileVariant.UPDATE_MODE) # Reduce to just one field. field = ffv.fields[0] ffv.fields = [field] # Actualise the data to a concrete array. field.set_data(field.get_data()) # Attempt to save back to a packed format. field.lbpack = 1 msg = 'Cannot save.*lbpack=1.*packing not supported' with self.assertRaisesRegexp(ValueError, msg): ffv.close()
def test_save_packed_mixed(self): # Check all save options, and show we can "partially" unpack a file. src_path = tests.get_data_path(('FF', 'n48_multi_field')) with self.temp_filename() as temp_path: # Make a copy and open for UPDATE. shutil.copyfile(src_path, temp_path) ffv = FieldsFileVariant(temp_path, FieldsFileVariant.UPDATE_MODE) # Reduce to only the first 3 fields. ffv.fields = ffv.fields[:3] # Check that these fields are all WGDOS packed. self.assertTrue(all(fld.lbpack == 1 for fld in ffv.fields)) # Modify the fields to exercise all 3 saving 'styles'. # Field#0 : store packed as unpacked. data_0 = ffv.fields[0].get_data() ffv.fields[0].lbpack = 2000 # Field#1 : pass-through packed as packed. data_1 = ffv.fields[1].get_data() # Field#2 : save array as unpacked. shape2 = (ffv.fields[2].lbrow, ffv.fields[2].lbnpt) data_2 = np.arange(np.prod(shape2)).reshape(shape2) ffv.fields[2].set_data(data_2) ffv.fields[2].lbpack = 3000 ffv.close() # Read the test file back in, and check all is as expected. ffv = FieldsFileVariant(temp_path) self.assertEqual(len(ffv.fields), 3) # Field#0. self.assertEqual(ffv.fields[0].lbpack, 2000) self.assertArrayAllClose(ffv.fields[0].get_data(), data_0) # Field#1. self.assertEqual(ffv.fields[1].lbpack, 1) self.assertArrayAllClose(ffv.fields[1].get_data(), data_1) # Field#2. self.assertEqual(ffv.fields[2].lbpack, 3000) self.assertArrayAllClose(ffv.fields[2].get_data(), data_2)