def test_save_and_load(self): """ All .wcon files in the tests folder are loaded, saved, then loaded again, and compared with the original loaded file. This is one of the best and most comprehensive tests in this suite. """ print("BIG TEST: Test load and save and load and save") files_to_test = glob.glob('../../../tests/*.wcon') for JSON_path in files_to_test: for pretty in [True, False]: print("LOADING FOR TEST: " + JSON_path + " (PRETTY = " + str(pretty) + ")") w_loaded = WCONWorms.load_from_file( JSON_path, validate_against_schema=False) # Save these worm tracks to a file, then load that file test_path = 'test.wcon' w_loaded.save_to_file(test_path, pretty_print=pretty) w_from_saved = WCONWorms.load_from_file( test_path, validate_against_schema=False) self.assertEqual(w_loaded, w_from_saved) # then load and save AGAIN and do a file comparison to make # sure it's the same # this will test that we order the keys (even though this # isn't in the WCON standard it's nice for human # readability, i.e. to have "units" before "data", # "id" first in a data segment, etc.) w_loaded_again = WCONWorms.load_from_file( test_path, validate_against_schema=False) self.assertEqual(w_loaded, w_loaded_again) self.assertEqual(w_loaded, w_from_saved) self.assertEqual(w_loaded_again, w_from_saved) test_path2 = 'test2.wcon' w_loaded_again.save_to_file(test_path2, pretty_print=pretty) # As described in the above comment: check that running # load/save twice should generate an IDENTICAL file. self.assertTrue(filecmp.cmp(test_path, test_path2)) os.remove(test_path) os.remove(test_path2)
def test_empty_aspect_size(self): # Worms with a segment that is empty should still parse without issue. WCON_string = \ """ { "units":{"t":"s", "x":"mm", "y":"mm"}, "data":[{ "id":2, "t":1.4, "x":[125.11, 126.14, 117.12], "y":[23.3, 22.23, 21135.08] }, { "id":1, "t":1.4, "x":[1215.11, 1216.14, 1217.12], "y":[234.89, 265.23, 235.08] }, { "id":2, "t":1.5, "x":[1215.11, 1216.14, 1217.12], "y":[234.89, 265.23, 235.08] }, { "id":1, "t":[1.3,1.5], "x":[[],[1215.11, 1216.14, 1217.12]], "y":[[],[234.89, 265.23, 235.08]] } ] } """ w = WCONWorms.load(StringIO(WCON_string)) test_path = 'test_empty_aspect.wcon' w.save_to_file(test_path, pretty_print=True) w_from_saved = WCONWorms.load_from_file(test_path) self.assertEqual(w, w_from_saved) os.remove(test_path)
def test_chunks(self): """ Test load_from_file with two or more chunks """ # Define our chunks chunks = [] chunks.append( ('{"files":{"current":"0.wcon", "prev":null,' '"next":["1.wcon", "2.wcon"]},' '"units":{"t":"s","x":"mm","y":"mm"},' '"data":[{"id":"3", "t":[1.3], ' '"x":[[3,4]], "y":[[5.4,3]]}]}')) chunks.append(('{"units":{"t":"s","x":"mm","y":"mm"},' '"files":{"current":"1.wcon", "prev":["0.wcon"],' '"next":["2.wcon"]},' '"data":[{"id":"3", "t":[1.4], ' '"x":[[5,1]], "y":[[15.4,3]]}]}')) chunks.append( ('{"units":{"t":"s","x":"mm","y":"mm"},' '"files":{"current":"2.wcon", "prev":["1.wcon", "0.wcon"],' '"next":null},' '"data":[{"id":"3", "t":[1.5], ' '"x":[[8,4.2]], "y":[[35.4,3]]}]}')) # Create filenames for our chunks chunk_filenames = [''] * len(chunks) for (chunk_index, chunk) in enumerate(chunks): chunk_filenames[chunk_index] = \ 'test_chunk_' + str(chunk_index) + '.wcon' # Save these chunks as files for (chunk_index, chunk) in enumerate(chunks): with open(chunk_filenames[chunk_index], 'w') as outfile: outfile.write(chunk) # First load one of them worm_combined_manually = WCONWorms.load(StringIO(chunks[0])) # Then merge the others sequentially to the first one for chunk in chunks[1:]: worm_chunk = WCONWorms.load(StringIO(chunk)) worm_combined_manually += worm_chunk # Validate that the chunks together are __eq__ to the files # that find each other through their "files" object for chunk_filename in chunk_filenames: # Worm from files that found each other through # the "files" object worm_from_files = WCONWorms.load_from_file(chunk_filename) self.assertEqual(worm_from_files, worm_combined_manually) # Delete the files created for chunk_filename in chunk_filenames: os.remove(chunk_filename)
def test_merge(self): JSON_path = '../../../tests/minimax.wcon' w2 = WCONWorms.load_from_file(JSON_path) w3 = WCONWorms.load_from_file(JSON_path) # This should work, since the data is the same w4 = w2 + w3 # Modifying w2's data in just one spot is enough to make the data # clash and the merge should fail w2._data[1].loc[1.3, (1, 'x', 0)] = 4000 with self.assertRaises(AssertionError): w4 = w2 + w3 # But if we drop that entire row in w3, it should accomodate the new # figure w3._data[1].drop(1.3, axis=0, inplace=True) w4 = w2 + w3 self.assertEqual(w4.data.loc[1.3, (1, 'x', 0)], 4000)
def test_offset_example_files(self): """ All four .wcon test files starting with 'offset_' should represent the same spine points. """ offset_files = ['offset_and_centroid', 'offset_no_centroid_yes', 'offset_none', 'offset_only'] worm_list = [WCONWorms.load_from_file('../../../tests/%s.wcon' % f) for f in offset_files] # Assume neither commutativity nor transitivity, nor even identity # i.e. test all worms against all others for worm_pair in [(wA, wB) for wA in worm_list for wB in worm_list]: self.assertEqual(worm_pair[0], worm_pair[1])
def test_equality_operator(self): JSON_path = '../../../tests/minimax.wcon' w2 = WCONWorms.load_from_file(JSON_path) w2.units['y'] = MeasurementUnit.create('m') w2data = w2.data.copy() w2c = w2.to_canon # A change to_canon should change the data in one but equality # should remain self.assertFalse(WCONWorms.is_data_equal(w2, w2c, convert_units=False)) self.assertEqual(w2, w2) self.assertEqual(w2c, w2c) self.assertEqual(w2, w2c) # Change the units for w2 (not really what we should do but just # force a difference now, with w2c) w2.units['y'] = MeasurementUnit.create('mm') self.assertNotEqual(w2, w2c)
def test_load_from_file(self): """ Test that .load_from_file works identically to .load """ worm_file_text3 = ( ('{"units":{"t":"s","x":"mm","y":"mm"},"data":[{"id":3, "t":1.3, ' '"x":[3,4], "y":[5.4,3]}]}')) # STREAM worm_from_stream = WCONWorms.load(StringIO(worm_file_text3)) # FILE with open("test.wcon", 'w') as outfile: outfile.write(worm_file_text3) worm_from_file = WCONWorms.load_from_file("test.wcon") os.remove("test.wcon") # COMPARISON self.assertEqual(worm_from_file, worm_from_stream)
def test_load_from_file(self): """ Test that .load_from_file works identically to .load """ worm_file_text3 = (( '{"units":{"t":"s","x":"mm","y":"mm"},"data":[{"id":3, "t":1.3, ' '"x":[3,4], "y":[5.4,3]}]}')) # STREAM worm_from_stream = WCONWorms.load(StringIO(worm_file_text3)) # FILE with open("test.wcon", 'w') as outfile: outfile.write(worm_file_text3) worm_from_file = WCONWorms.load_from_file("test.wcon") os.remove("test.wcon") # COMPARISON self.assertEqual(worm_from_file, worm_from_stream)
import sys sys.path.append('../../Python') import wcon from wcon import WCONWorms from wcon import MeasurementUnit worm = WCONWorms.load_from_file('../../../tests/minimax.wcon') worm.save_to_file('pythonNonWrapperTest.wcon', pretty_print=True) mergeable = WCONWorms.load_from_file('extra-test-data/minimax-mergeable.wcon') merged = worm + mergeable merged.save_to_file('pythonMerged.wcon', pretty_print=True) merged2 = mergeable + worm merged2.save_to_file('pythonMerged2.wcon', pretty_print=True) nometa = WCONWorms.load_from_file('../../../tests/minimal.wcon') print(nometa.metadata)
return time.monotonic() else: return time.time() if __name__ == '__main__': # def test_big_file(): print("BIG TEST: Test load and save and load and save") files_to_test = [sys.argv[1]] for JSON_path in files_to_test: for pretty in [True]: # , False]: print("LOADING FOR TEST: " + JSON_path + " (PRETTY = " + str(pretty) + ")") start_time = timing_function() w1 = WCONWorms.load_from_file(JSON_path, validate_against_schema=False) print("Time to load w1: " + str(timing_function() - start_time)) # Save these worm tracks to a file, then load that file test_path = 'test.wcon' start_time = timing_function() w1.save_to_file(test_path, pretty_print=pretty) print("Time to save w1: " + str(timing_function() - start_time)) start_time = timing_function() w2 = WCONWorms.load_from_file(test_path, validate_against_schema=False) print("Time to load w2: " + str(timing_function() - start_time)) # x1 = w1.data.loc[:, idx[0, 'x', 0]].fillna(0) # x2 = w2.data.loc[:, idx[0, 'x', 0]].fillna(0)
else: return time.time() if __name__ == '__main__': # def test_big_file(): print("BIG TEST: Test load and save and load and save") files_to_test = [sys.argv[1]] for JSON_path in files_to_test: for pretty in [True]: # , False]: print("LOADING FOR TEST: " + JSON_path + " (PRETTY = " + str(pretty) + ")") start_time = timing_function() w1 = WCONWorms.load_from_file(JSON_path, validate_against_schema=False) print("Time to load w1: " + str(timing_function() - start_time)) # Save these worm tracks to a file, then load that file test_path = 'test.wcon' start_time = timing_function() w1.save_to_file(test_path, pretty_print=pretty) print("Time to save w1: " + str(timing_function() - start_time)) start_time = timing_function() w2 = WCONWorms.load_from_file(test_path, validate_against_schema=False) print("Time to load w2: " + str(timing_function() - start_time)) # x1 = w1.data.loc[:, idx[0, 'x', 0]].fillna(0) # x2 = w2.data.loc[:, idx[0, 'x', 0]].fillna(0)
# -*- coding: utf-8 -*- import sys sys.path.append('..') from wcon import WCONWorms, MeasurementUnit file_name = 'asic-1 (ok415) on food L_2010_07_08__11_46_40___7___5.wcon' w = WCONWorms.load_from_file(file_name)
from six import StringIO import json from wcon import WCONWorms, MeasurementUnit pd.set_option('display.expand_frame_repr', False) # Suppress RuntimeWarning warnings in Spider because it's a known bug # http://stackoverflow.com/questions/30519487/ # warnings.simplefilter(action = "ignore", category = RuntimeWarning) if __name__ == '__main__': JSON_path = '../../tests/minimax.wcon' w2 = WCONWorms.load_from_file(JSON_path) w2.save_to_file('example_saved_file.WCON', pretty_print=True) w3 = WCONWorms.load_from_file('example_saved_file.WCON') u = MeasurementUnit.create('cm') # io=StringIO() # json.dump([None], io) # io.getvalue() if __name__ == '__main__2': worm_file_text2 = (('{"units":{"t":"s","x":"m","y":"m"},' '"data":[{"id":3, "t":1.3, ' '"x":[3,4], "y":[5.4,3]}]}'))
import json from wcon import WCONWorms, MeasurementUnit pd.set_option('display.expand_frame_repr', False) # Suppress RuntimeWarning warnings in Spider because it's a known bug # http://stackoverflow.com/questions/30519487/ # warnings.simplefilter(action = "ignore", category = RuntimeWarning) if __name__ == '__main__': JSON_path = '../../tests/minimax.wcon' w2 = WCONWorms.load_from_file(JSON_path) w2.save_to_file('example_saved_file.WCON', pretty_print=True) w3 = WCONWorms.load_from_file('example_saved_file.WCON') u = MeasurementUnit.create('cm') # io=StringIO() # json.dump([None], io) # io.getvalue() if __name__ == '__main__2': worm_file_text2 = (('{"units":{"t":"s","x":"m","y":"m"},' '"data":[{"id":3, "t":1.3, ' '"x":[3,4], "y":[5.4,3]}]}'))