def test_gi_state(self): # offsets 346640-347663, 348176-348943, 349456-349711 test_file1 = os.path.join(INPUT_GI_PATH, 'gi_477-2015-006-0-0.mdd') mdd.procall([test_file1]) file_state = self.get_file_state('node14p1.dat') # first full record starts 32 bytes later, 346640+32=346672, then 250 bytes valid, then no more valid records expected_file_state_1 = {StateKey.UNPROCESSED_DATA: [[0, 346672], [346922, 349712]], StateKey.FILE_SIZE: 349712, StateKey.OUTPUT_INDEX: 1} if file_state != expected_file_state_1: print "file state try 1: '%s'" % file_state self.fail("Expected file state 1 does not match") test_file2 = os.path.join(INPUT_GI_PATH, 'gi_477-2014-355-0-0.mdd') mdd.procall([test_file2]) file_state = self.get_file_state('node14p1.dat') expected_file_state_2 = {StateKey.UNPROCESSED_DATA: [[0, 296406], [298500, 311147], [311328, 330181], [330514, 346672], [346922, 349712]], StateKey.FILE_SIZE: 349712, StateKey.OUTPUT_INDEX: 2} if file_state != expected_file_state_2: print "file state try 2: '%s'" % file_state self.fail("Expected file state 2 does not match")
def test_verify_mdd(self): # offsets 346640-347663, 348176-348943, 349456-349711 test_file1 = os.path.join(INPUT_GI_PATH, 'gi_477-2015-006-0-0.mdd') mdd.procall([test_file1]) fid = open(test_file1) mdd_data = fid.read() fid.close() fid = open(os.path.join(OUTPUT_PATH, 'node14p1.dat')) node_data = fid.read() fid.close() # manually inspected file to locate start and end offset of data block # start/end offset 346640 - 347663 if node_data[346640:347664] != mdd_data[240:1264]: print "First data block does not match" self.fail('First data block does not match') # start/end offset 348176 - 348943 if node_data[348176:348944] != mdd_data[1311:2079]: print "Second data block does not match" self.fail('Second data block does not match') # start/end offset 349456 - 349711 if node_data[349456:349712] != mdd_data[2126:2382]: print "Third data block does not match" self.fail('Third data block does not match')
def test_gi_state(self): # offsets 346640-347663, 348176-348943, 349456-349711 test_file1 = os.path.join(INPUT_GI_PATH, 'gi_477-2015-006-0-0.mdd') mdd.procall([test_file1]) file_state = self.get_file_state('node14p1.dat') # first full record starts 32 bytes later, 346640+32=346672, then 250 bytes valid, then no more valid records expected_file_state_1 = { StateKey.UNPROCESSED_DATA: [[0, 346672], [346922, 349712]], StateKey.FILE_SIZE: 349712, StateKey.OUTPUT_INDEX: 1 } if file_state != expected_file_state_1: print "file state try 1: '%s'" % file_state self.fail("Expected file state 1 does not match") test_file2 = os.path.join(INPUT_GI_PATH, 'gi_477-2014-355-0-0.mdd') mdd.procall([test_file2]) file_state = self.get_file_state('node14p1.dat') expected_file_state_2 = { StateKey.UNPROCESSED_DATA: [[0, 296406], [298500, 311147], [311328, 330181], [330514, 346672], [346922, 349712]], StateKey.FILE_SIZE: 349712, StateKey.OUTPUT_INDEX: 2 } if file_state != expected_file_state_2: print "file state try 2: '%s'" % file_state self.fail("Expected file state 2 does not match")
def test_state(self): """ Parse two files, check that the state saved in the pickle file matches the expected, then parse another file and check that the state updated correctly. """ # blocks [0 3583] [3840 4058] test_file1 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-2-0.mdd') # blocks [0 1279] [1536 1791] [2048 2303] [2560 2815] [3072 4059] test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-3-0.mdd') # parse the two .mdd files into the node and instrument group files mdd.procall([test_file1, test_file2]) file_state = self.get_file_state('node58p1.dat') # there is an unprocessed '/n' in between records expected_file_state = { StateKey.UNPROCESSED_DATA: [[4059, 4060]], StateKey.FILE_SIZE: 4060, StateKey.OUTPUT_INDEX: 1 } if file_state != expected_file_state: print file_state self.fail("Expected file state 1 does not match") # blocks [0 2047] [2304 4095] [4096 7451] test_file3 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-6-0.mdd') # parse another .mdd file adding on to the node file, and making # another sequence of instrument group files mdd.procall([test_file3]) file_state = self.get_file_state('node58p1.dat') expected_file_state = { StateKey.UNPROCESSED_DATA: [[4059, 4060]], StateKey.FILE_SIZE: 7452, StateKey.OUTPUT_INDEX: 2 } if file_state != expected_file_state: print "file state: '%s'" % file_state self.fail("Expected file state 2 does not match") data_orig = self.read_full_file('node58p1.dat') # read the data from all generated files into one data string data_out = self.read_full_file('node58p1_0.status_1236801.dat') data_out += self.read_full_file('node58p1_0.wa_wfp_1236820.dat') data_out += self.read_full_file('node58p1_0.wc_wfp_1236820.dat') data_out += self.read_full_file('node58p1_0.we_wfp_1236820.dat') data_out += self.read_full_file('node58p1_1.status_1236801.dat') data_out += self.read_full_file('node58p1_1.wa_wfp_1236822.dat') data_out += self.read_full_file('node58p1_1.wc_wfp_1236822.dat') data_out += self.read_full_file('node58p1_1.we_wfp_1236822.dat') # confirm data in the node file matches those output in the instrument groups if not TestSioUnpack.compare_sio_matches(data_orig, data_out): self.fail("Failed sio block compare")
def test_duplicate(self): """ Test to fix duplicates in output """ test_file = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-225-1-0.mdd') mdd.procall([test_file]) self.compare_node58()
def test_full_hypm(self): """ Test with all the hypm files """ test_files = glob.glob(INPUT_HYPM_PATH + '/*.mdd') mdd.procall(test_files) self.compare_node58()
def test_recent_format(self): """ Test that the recent format can also be parsed """ test_files = glob.glob(INPUT_GI_PATH + '/unit_*.mdd') mdd.procall(test_files) self.compare_node14() self.compare_node16() self.compare_node17()
def test_state(self): """ Parse two files, check that the state saved in the pickle file matches the expected, then parse another file and check that the state updated correctly. """ # blocks [0 3583] [3840 4058] test_file1 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-2-0.mdd') # blocks [0 1279] [1536 1791] [2048 2303] [2560 2815] [3072 4059] test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-3-0.mdd') # parse the two .mdd files into the node and instrument group files mdd.procall([test_file1, test_file2]) file_state = self.get_file_state('node58p1.dat') # there is an unprocessed '/n' in between records expected_file_state = {StateKey.UNPROCESSED_DATA: [[4059, 4060]], StateKey.FILE_SIZE: 4060, StateKey.OUTPUT_INDEX: 1} if file_state != expected_file_state: print file_state self.fail("Expected file state 1 does not match") # blocks [0 2047] [2304 4095] [4096 7451] test_file3 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-6-0.mdd') # parse another .mdd file adding on to the node file, and making # another sequence of instrument group files mdd.procall([test_file3]) file_state = self.get_file_state('node58p1.dat') expected_file_state = {StateKey.UNPROCESSED_DATA: [[4059, 4060]], StateKey.FILE_SIZE: 7452, StateKey.OUTPUT_INDEX: 2} if file_state != expected_file_state: print "file state: '%s'" % file_state self.fail("Expected file state 2 does not match") data_orig = self.read_full_file('node58p1.dat') # read the data from all generated files into one data string data_out = self.read_full_file('node58p1_0.status.dat') data_out += self.read_full_file('node58p1_0.wa_wfp.dat') data_out += self.read_full_file('node58p1_0.wc_wfp.dat') data_out += self.read_full_file('node58p1_0.we_wfp.dat') data_out += self.read_full_file('node58p1_1.status.dat') data_out += self.read_full_file('node58p1_1.wa_wfp.dat') data_out += self.read_full_file('node58p1_1.wc_wfp.dat') data_out += self.read_full_file('node58p1_1.we_wfp.dat') # confirm data in the node file matches those output in the instrument groups if not TestSioUnpack.compare_sio_matches(data_orig, data_out): self.fail("Failed sio block compare")
def test_large_hypm(self): """ Test with a larger set of hypm files """ test_files_225 = glob.glob(INPUT_HYPM_PATH + '/unit_364-2013-225*.mdd') mdd.procall(test_files_225) # compare the node58p1 data and that in the 1st sequence of instrument group files data_out = self.compare_node58() # test with a second set of files test_files_237 = glob.glob(INPUT_HYPM_PATH + '/unit_364-2013-237*.mdd') mdd.procall(test_files_237) # compare the node58p1 data and that in the 2nd sequence of instrument group files self.compare_node58(1, data_out)
def test_old_format_for_tags(self): """ Same as test for tags to see if there are header tags in the data for the older deployments """ test_files = glob.glob(INPUT_HYPM_PATH + '/unit_*.mdd') test_files.extend(glob.glob(INPUT_FLMB_PATH + '/unit_*.mdd')) mdd.procall(test_files) data = self.read_full_file('node58p1.dat') if not self.check_for_tags(data): self.fail("Found header tag in data file") data = self.read_full_file('node59p1.dat') if not self.check_for_tags(data): self.fail("Found header tag in data file")
def test_hypm_flmb(self): """ Test with data in two different locations at the same time """ # test with two different locations at the same time test_files = glob.glob(INPUT_FLMB_PATH + '/unit_363-2013-218*.mdd') test_files_225 = glob.glob(INPUT_HYPM_PATH + '/unit_364-2013-225*.mdd') test_files.extend(test_files_225) mdd.procall(test_files) # this one can take a while to process all the files, sleep for a bit to # make sure it is done before checking the output files time.sleep(3) self.compare_node58() self.compare_node59()
def test_no_tags(self): """ test that the data files do not contain the header tags """ test_files = glob.glob(INPUT_GI_PATH + '/gi_*.mdd') mdd.procall(test_files) data = self.read_full_file('node16p1.dat') if not self.check_for_tags(data): self.fail("Found header tag in data file") data = self.read_full_file('node17p1.dat') if not self.check_for_tags(data): self.fail("Found header tag in data file") data = self.read_full_file('node14p1.dat') if not self.check_for_tags(data): self.fail("Found header tag in data file")
def test_sects(self): """ Test that a processing done in the getmdd script succeeds, since we don't have enough config to run the script """ test_files = glob.glob(INPUT_HYPM_PATH + '/unit_*.mdd') test_files.extend(glob.glob(INPUT_FLMB_PATH + '/unit_*.mdd')) sects = mdd.procall(test_files) TestSioUnpack.latest(sects)
def test_sects(self): """ Test that a processing done in the getmdd script succeeds, since we don't have enough config to run the script """ test_files = glob.glob(INPUT_HYPM_PATH + '/*.mdd') test_files.extend(glob.glob(INPUT_FLMB_PATH + '/*.mdd')) sects = mdd.procall(test_files) TestSioUnpack.latest(sects)
def test_simple(self): """ Run a simple test which parses two .mdd files into a node file and its individual instrument group files. Confirm the data types in each individual file only contain the allowed IDs. """ # blocks [0 3583] [3840 4058] test_file1 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-2-0.mdd') # blocks [0 1279] [1536 1791] [2048 2303] [2560 2815] [3072 4059] test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-3-0.mdd') # parse the two test files into the node and instrument group files mdd.procall([test_file1, test_file2]) data_orig = self.read_full_file('node58p1.dat') # read the data from all generated files into one data string data_out = self.read_full_file('node58p1_0.status_1236801.dat') # confirm this file only has the allowed instrument IDs self.check_sio_type(data_out, ['PS', 'CS']) data_out_wa = self.read_full_file('node58p1_0.wa_wfp_1236820.dat') # confirm this file only has the allowed instrument IDs self.check_sio_type(data_out_wa, ['WA']) data_out += data_out_wa data_out_wc = self.read_full_file('node58p1_0.wc_wfp_1236820.dat') # confirm this file only has the allowed instrument IDs self.check_sio_type(data_out_wc, ['WC']) data_out += data_out_wc data_out_we = self.read_full_file('node58p1_0.we_wfp_1236820.dat') # confirm this file only has the allowed instrument IDs self.check_sio_type(data_out_wc, ['WE']) data_out += data_out_we # confirm that all data blocks from the node data file made it # into the instrument group files if not TestSioUnpack.compare_sio_matches(data_orig, data_out): self.fail("Failed sio block compare")
def test_simple(self): """ Run a simple test which parses two .mdd files into a node file and its individual instrument group files. Confirm the data types in each individual file only contain the allowed IDs. """ # blocks [0 3583] [3840 4058] test_file1 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-2-0.mdd') # blocks [0 1279] [1536 1791] [2048 2303] [2560 2815] [3072 4059] test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-3-0.mdd') # parse the two test files into the node and instrument group files mdd.procall([test_file1, test_file2]) data_orig = self.read_full_file('node58p1.dat') # read the data from all generated files into one data string data_out = self.read_full_file('node58p1_0.status.dat') # confirm this file only has the allowed instrument IDs self.check_sio_type(data_out, ['PS', 'CS']) data_out_wa = self.read_full_file('node58p1_0.wa_wfp.dat') # confirm this file only has the allowed instrument IDs self.check_sio_type(data_out_wa, ['WA']) data_out += data_out_wa data_out_wc = self.read_full_file('node58p1_0.wc_wfp.dat') # confirm this file only has the allowed instrument IDs self.check_sio_type(data_out_wc, ['WC']) data_out += data_out_wc data_out_we = self.read_full_file('node58p1_0.we_wfp.dat') # confirm this file only has the allowed instrument IDs self.check_sio_type(data_out_wc, ['WE']) data_out += data_out_we # confirm that all data blocks from the node data file made it # into the instrument group files if not TestSioUnpack.compare_sio_matches(data_orig, data_out): self.fail("Failed sio block compare")
def test_large_flmb(self): """ Test with a larger set of flmb files, confirming that the instrument files generated only contain the allowed instrument IDs """ test_files_218 = glob.glob(INPUT_FLMB_PATH + '/unit_363-2013-218*.mdd') mdd.procall(test_files_218) data_orig = self.read_full_file('node59p1.dat') data_out = self.read_full_file('node59p1_0.status.dat') self.check_sio_type(data_out, ['CS', 'PS']) data_adcps = self.read_full_file('node59p1_0.adcps.dat') self.check_sio_type(data_adcps, ['AD']) data_out += data_adcps data_ctdmo = self.read_full_file('node59p1_0.ctdmo.dat') self.check_sio_type(data_ctdmo, ['CT', 'CO']) data_out += data_ctdmo data_dosta = self.read_full_file('node59p1_0.dosta.dat') self.check_sio_type(data_dosta, ['DO']) data_out += data_dosta data_flort = self.read_full_file('node59p1_0.flort.dat') self.check_sio_type(data_flort, ['FL']) data_out += data_flort data_phsen = self.read_full_file('node59p1_0.phsen.dat') self.check_sio_type(data_phsen, ['PH']) data_out += data_phsen if not TestSioUnpack.compare_sio_matches(data_orig, data_out): self.fail("Failed sio block compare") test_files = glob.glob(INPUT_FLMB_PATH + '/unit_363-2013-205*.mdd') test_files_217 = glob.glob(INPUT_FLMB_PATH + '/unit_363-2013-217*.mdd') test_files_219 = glob.glob(INPUT_FLMB_PATH + '/unit_363-2013-219*.mdd') test_files.extend(test_files_217) test_files.extend(test_files_219) mdd.procall(test_files) data_out = self.compare_node59(1, data_out) test_files = glob.glob(INPUT_FLMB_PATH + '/unit_363-2013-233*.mdd') test_files_231 = glob.glob(INPUT_FLMB_PATH + '/unit_363-2013-231*.mdd') test_files.extend(test_files_231) mdd.procall(test_files) self.compare_node59(2, data_out)
def test_update_file_state(self): """ Test the missing update file state cases """ # blocks [0 4012], based on unit_362-2013-202-2-0.mdd test_file1 = os.path.join(INPUT_HYPM_PATH, 'first.mdd') # parse the first .mdd files into the node and instrument group files mdd.procall([test_file1]) file_state = self.get_file_state('node60p1.dat') expected_file_state_1 = {StateKey.UNPROCESSED_DATA: [], StateKey.FILE_SIZE: 4012, StateKey.OUTPUT_INDEX: 1} if file_state != expected_file_state_1: print "file state try 1: '%s'" % file_state self.fail("Expected file state 1 does not match") test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_362-2013-202-2-0.mdd') # parse the first .mdd files into the node and instrument group files mdd.procall([test_file2]) file_state = self.get_file_state('node60p1.dat') expected_file_state_2 = {StateKey.UNPROCESSED_DATA: [[4736, 8192]], StateKey.FILE_SIZE: 8192, StateKey.OUTPUT_INDEX: 2} if file_state != expected_file_state_2: print "file state try 2: '%s'" % file_state self.fail("Expected file state 2 does not match") # start second test, switch to node58 # blocks [0 3583] [3840 4058] test_file1 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-2-0.mdd') mdd.procall([test_file1]) file_state = self.get_file_state('node58p1.dat') expected_file_state = {StateKey.UNPROCESSED_DATA: [[3189, 3945]], StateKey.FILE_SIZE: 4059, StateKey.OUTPUT_INDEX: 1} if file_state != expected_file_state: print file_state self.fail("Expected file state 3 does not match") # blocks [0 1279] [1536 1791] [2048 2303] [2560 2815] [3072 4059] test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-3-0.mdd') # parse the two .mdd files into the node and instrument group files mdd.procall([test_file2]) file_state = self.get_file_state('node58p1.dat') # there is an unprocessed '/n' in between records expected_file_state = {StateKey.UNPROCESSED_DATA: [[4059, 4060]], StateKey.FILE_SIZE: 4060, StateKey.OUTPUT_INDEX: 2} if file_state != expected_file_state: print file_state self.fail("Expected file state 4 does not match")
def test_update_file_state(self): """ Test the missing update file state cases """ # blocks [0 4012], based on unit_362-2013-202-2-0.mdd test_file1 = os.path.join(INPUT_HYPM_PATH, 'first.mdd') # parse the first .mdd files into the node and instrument group files mdd.procall([test_file1]) file_state = self.get_file_state('node60p1.dat') expected_file_state_1 = { StateKey.UNPROCESSED_DATA: [], StateKey.FILE_SIZE: 4012, StateKey.OUTPUT_INDEX: 1 } if file_state != expected_file_state_1: print "file state try 1: '%s'" % file_state self.fail("Expected file state 1 does not match") test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_362-2013-202-2-0.mdd') # parse the first .mdd files into the node and instrument group files mdd.procall([test_file2]) file_state = self.get_file_state('node60p1.dat') expected_file_state_2 = { StateKey.UNPROCESSED_DATA: [[4736, 8192]], StateKey.FILE_SIZE: 8192, StateKey.OUTPUT_INDEX: 2 } if file_state != expected_file_state_2: print "file state try 2: '%s'" % file_state self.fail("Expected file state 2 does not match") # start second test, switch to node58 # blocks [0 3583] [3840 4058] test_file1 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-2-0.mdd') mdd.procall([test_file1]) file_state = self.get_file_state('node58p1.dat') expected_file_state = { StateKey.UNPROCESSED_DATA: [[3189, 3945]], StateKey.FILE_SIZE: 4059, StateKey.OUTPUT_INDEX: 1 } if file_state != expected_file_state: print file_state self.fail("Expected file state 3 does not match") # blocks [0 1279] [1536 1791] [2048 2303] [2560 2815] [3072 4059] test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-3-0.mdd') # parse the two .mdd files into the node and instrument group files mdd.procall([test_file2]) file_state = self.get_file_state('node58p1.dat') # there is an unprocessed '/n' in between records expected_file_state = { StateKey.UNPROCESSED_DATA: [[4059, 4060]], StateKey.FILE_SIZE: 4060, StateKey.OUTPUT_INDEX: 2 } if file_state != expected_file_state: print file_state self.fail("Expected file state 4 does not match")
def test_empty_sequence(self): """ Test to ensure empty sequence files are not created if no new data is found """ # blocks [0 3583] [3840 4058] test_file1 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-2-0.mdd') # blocks [0 1279] [1536 1791] [2048 2303] [2560 2815] [3072 4059] test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-3-0.mdd') # parse the two .mdd files into the node and instrument group files mdd.procall([test_file1, test_file2]) file_state = self.get_file_state('node58p1.dat') # there is an unprocessed '/n' in between records expected_file_state_1 = {StateKey.UNPROCESSED_DATA: [[4059, 4060]], StateKey.FILE_SIZE: 4060, StateKey.OUTPUT_INDEX: 1} if file_state != expected_file_state_1: print "file state try 1: '%s'" % file_state self.fail("Expected file state 1 does not match") # try to parse again with the same files mdd.procall([test_file1, test_file2]) file_state = self.get_file_state('node58p1.dat') if file_state != expected_file_state_1: print "file state try 2: '%s'" % file_state self.fail("Expected file state 2 does not match") # blocks [0 2047] [2304 4095] [4096 7451] test_file3 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-6-0.mdd') # parse another .mdd file adding on to the node file, and making # another sequence of instrument group files mdd.procall([test_file3]) file_state = self.get_file_state('node58p1.dat') expected_file_state_2 = {StateKey.UNPROCESSED_DATA: [[4059, 4060]], StateKey.FILE_SIZE: 7452, StateKey.OUTPUT_INDEX: 2} if file_state != expected_file_state_2: print "file state try 3: '%s'" % file_state self.fail("Expected file state 3 does not match") # parse the same file a second time mdd.procall([test_file3]) # the state should stay the same as before file_state = self.get_file_state('node58p1.dat') if file_state != expected_file_state_2: print "file state try 4: '%s'" % file_state self.fail("Expected file state 3 does not match") # try the first ones again, should still stay the same mdd.procall([test_file1, test_file2]) # the state should stay the same as before file_state = self.get_file_state('node58p1.dat') if file_state != expected_file_state_2: print "file state try 5: '%s'" % file_state self.fail("Expected file state 3 does not match")
def test_empty_sequence(self): """ Test to ensure empty sequence files are not created if no new data is found """ # blocks [0 3583] [3840 4058] test_file1 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-2-0.mdd') # blocks [0 1279] [1536 1791] [2048 2303] [2560 2815] [3072 4059] test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-3-0.mdd') # parse the two .mdd files into the node and instrument group files mdd.procall([test_file1, test_file2]) file_state = self.get_file_state('node58p1.dat') # there is an unprocessed '/n' in between records expected_file_state_1 = { StateKey.UNPROCESSED_DATA: [[4059, 4060]], StateKey.FILE_SIZE: 4060, StateKey.OUTPUT_INDEX: 1 } if file_state != expected_file_state_1: print "file state try 1: '%s'" % file_state self.fail("Expected file state 1 does not match") # try to parse again with the same files mdd.procall([test_file1, test_file2]) file_state = self.get_file_state('node58p1.dat') if file_state != expected_file_state_1: print "file state try 2: '%s'" % file_state self.fail("Expected file state 2 does not match") # blocks [0 2047] [2304 4095] [4096 7451] test_file3 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-6-0.mdd') # parse another .mdd file adding on to the node file, and making # another sequence of instrument group files mdd.procall([test_file3]) file_state = self.get_file_state('node58p1.dat') expected_file_state_2 = { StateKey.UNPROCESSED_DATA: [[4059, 4060]], StateKey.FILE_SIZE: 7452, StateKey.OUTPUT_INDEX: 2 } if file_state != expected_file_state_2: print "file state try 3: '%s'" % file_state self.fail("Expected file state 3 does not match") # parse the same file a second time mdd.procall([test_file3]) # the state should stay the same as before file_state = self.get_file_state('node58p1.dat') if file_state != expected_file_state_2: print "file state try 4: '%s'" % file_state self.fail("Expected file state 3 does not match") # try the first ones again, should still stay the same mdd.procall([test_file1, test_file2]) # the state should stay the same as before file_state = self.get_file_state('node58p1.dat') if file_state != expected_file_state_2: print "file state try 5: '%s'" % file_state self.fail("Expected file state 3 does not match")