def test_save_invalid_file(self): """ Confirms exception is thrown when not given a file :return: """ with self.assertRaises(easygdf.GDFIOError): easygdf.save(100, [])
def test_header_write(self): """ Saves a header and confirms it is the same after reading it back :return: """ # Create a header to save my_time = datetime.datetime.fromtimestamp(int( datetime.datetime.timestamp(datetime.datetime.now())), tz=datetime.timezone.utc) fh = { "creation_time": my_time, "creator": "easygdf", "destination": "hardgdf", "gdf_version": (1, 1), "creator_version": (3, 4), "destination_version": (5, 6), "dummy": (7, 8), } # Save it and reload it testfile = os.path.join(tempfile.gettempdir(), "save_header.gdf") with open(testfile, "wb") as f: easygdf.save(f, [], **fh) with open(testfile, "rb") as f: test = easygdf.load(f) # Check that the headers are the same test.pop("blocks") self.assertEqual(fh, test)
def trim_initial_distribution(): # Load the initial distribution file with open(initial_distribution_file, "rb") as f: d = easygdf.load(f) # Clip any arrays to the correct size trimmed_blocks = [] for b in d["blocks"]: if isinstance(b["param"], np.ndarray): trimmed_blocks.append({ "name": b["name"], "param": round_sigfigs(b["param"][:n_particles], 4) }) d["blocks"] = trimmed_blocks # Save the file with open(os.path.join(data_files_path, "initial_distribution.gdf"), "wb") as f: easygdf.save(f, **d) # Create an example of the data structure we want ds = {x["name"]: x["param"] for x in d["blocks"]} d.pop("blocks") ds.update(d) print(ds)
def test_int_array_overflow(self): """ Confirms error is thrown if an integer larger than GDFs max size is passed, array version :return: """ # Test overflowing int32 with open( os.path.join(tempfile.gettempdir(), "save_int_array_overflow_1.gdf"), "wb") as f: with self.assertRaises(ValueError): easygdf.save(f, [ { 'name': 'ID', 'value': np.array([0x80000000, 0, 0, 0], dtype=np.int64), 'children': [] }, ]) # Test overflowing int64 with open( os.path.join(tempfile.gettempdir(), "save_int_array_overflow_2.gdf"), "wb") as f: with self.assertRaises(ValueError): easygdf.save(f, [ { 'name': 'ID', 'value': np.array([0x100000000, 0, 0, 0], dtype=np.uint64), 'children': [] }, ])
def trim_screens_tout(): # Load the screen/tout file with open(screen_tout_file, "rb") as f: d = easygdf.load(f) # Trim down the blocks trimmed_blocks = [] seen_touts = 0 seen_screens = 0 for b in d["blocks"]: if b["name"] == "position": if seen_screens < n_blocks: trimmed_blocks.append(b) seen_screens += 1 elif b["name"] == "time": if seen_touts < n_blocks: trimmed_blocks.append(b) seen_touts += 1 else: trimmed_blocks.append(b) # Trim down the arrays to the correct number of particles particle_blocks = [] for b in trimmed_blocks: new_block = {"name": b["name"], "param": b["param"], "children": []} for c in b["children"]: new_block["children"].append({ "name": c["name"], "param": round_sigfigs(c["param"][:n_particles], 4), "children": [] }) particle_blocks.append(new_block) d["blocks"] = particle_blocks # Save the trimmed down blocks with open(os.path.join(data_files_path, "screens_touts.gdf"), "wb") as f: easygdf.save(f, **d) # Convert the blocks to the format we expect screens_touts = {"screens": [], "touts": []} conv = {"time": "touts", "position": "screens"} for b in particle_blocks: # If it's a screen or tout, extract the children and add it if b["name"] in conv: group = {} for c in b["children"]: group[c["name"]] = c["param"] group[b["name"]] = b["param"] screens_touts[conv[b["name"]]].append(group) # If it's an auxiliary block, add it to the root else: screens_touts[b["name"]] = b["param"] # Convert it to print(screens_touts)
def test_save_block_bad(self): """ Try throwing random stuff into the blocks section :return: """ testfile = os.path.join(tempfile.gettempdir(), "save_has_bad_key.gdf") with open(testfile, "wb") as f: with self.assertRaises(TypeError): easygdf.save(f, "some blocks")
def test_save_block_has_bad_key(self): """ Confirms exception is thrown when block is given with an incorrect key :return: """ testfile = os.path.join(tempfile.gettempdir(), "save_has_bad_key.gdf") with open(testfile, "wb") as f: with self.assertRaises(ValueError): easygdf.save(f, [{"Bad Key": None}])
def test_save_wrong_file_mode(self): """ Confirms exception is thrown when file open in wrong mode :return: """ testfile = os.path.join(tempfile.gettempdir(), "save_wrong_file_mode.gdf") with open(testfile, "w") as f: with self.assertRaises(easygdf.GDFIOError): easygdf.save(f, [])
def test_save_all_dtypes(self): """ Saves each possible valid datatype and :return: """ # Make a place to hold blocks which we will write ref_blocks = [] # Dump all of the possible numpy array types into blocks dtypes = [ "int8", "int16", "int32", "int64", "uint8", "uint16", "uint32", "uint64", "float_", "float32", "float64" ] for dtype in dtypes: ref_blocks.append({ "name": "array_" + dtype, "value": np.linspace(0, 5, 6, dtype=dtype), }) # Add each single type ref_blocks.append({ "name": "single_str", "value": "deadbeef", }) ref_blocks.append({ "name": "single_int", "value": 1729, }) ref_blocks.append({ "name": "single_float", "value": 3.14, }) ref_blocks.append({ "name": "single_none", "value": None, }) # Save everything as a GDF file testfile = os.path.join(tempfile.gettempdir(), "save_all_dtypes.gdf") with open(testfile, "wb") as f: easygdf.save(f, ref_blocks) # Read it back in with open(testfile, "rb") as f: data = easygdf.load(f) # Go through the blocks and make sure they match for test, ref in zip(data["blocks"], ref_blocks): self.assertEqual(test["name"], ref["name"]) if isinstance(ref["value"], np.ndarray): np.testing.assert_equal(test["value"], ref["value"]) else: self.assertEqual(test["value"], ref["value"])
def test_save_bad_numpy_dtype(self): """ Confirm exception when writing numpy array with dtype incompatible with GDF :return: """ # Save the file and check the exception testfile = os.path.join(tempfile.gettempdir(), "save_bad_numpy_dtype.gdf") with open(testfile, "wb") as f: with self.assertRaises(TypeError): easygdf.save( f, [{ "value": np.linspace(0, 5, 6, dtype=np.complex) }])
def test_save_recursion_limit(self): """ Try hitting the recursion limit (16 levels by default) :return: """ # Create a super deep set of blocks blocks = [{}] for _ in range(17): blocks = [{"children": blocks}] # Save the file and check the exception testfile = os.path.join(tempfile.gettempdir(), "save_recursion_limit.gdf") with open(testfile, "wb") as f: with self.assertRaises(RecursionError): easygdf.save(f, blocks)
def test_uniform_interface(self): """ This test confirms that the output of the load function can be dumped directly into the input of the save function of the library with no modifications. This is one of my requirements for the library and would like a check on it. :return: None """ # Open up one of our test problems with pkg_resources.resource_stream("easygdf.tests", "data/test.gdf") as f: all_data = easygdf.load(f) # Save it again test_file = os.path.join(tempfile.gettempdir(), "easygdf_interface_test.gdf") with open(test_file, "wb") as f: easygdf.save(f, **all_data)
def test_save_groups(self): """ Try some blocks that have children :return: """ # The reference blocks ref = [{ "name": "A", "value": 0, "children": [{ "name": "B", "value": "string", "children": [{ "name": "C", "value": 1.2, "children": [{ "name": "D", "value": "another string", "children": [] }] }] }] }] # Write it and read it back testfile = os.path.join(tempfile.gettempdir(), "save_groups.gdf") with open(testfile, "wb") as f: easygdf.save(f, ref) with open(testfile, "rb") as f: test = easygdf.load(f)["blocks"] # Check that they match self.assertEqual(test, ref)
def test_integer_casting(self): """ Confirms that integer 64 bit arrays get cast to 32 bits and saved. This is related to github issue 5 because GDF appears to not support 64 bit integers. :return: """ # Test conversion from int64 -> int32 test_file = os.path.join( tempfile.gettempdir(), "save_initial_distribution_test_integer_casting_1.gdf") with open(test_file, "wb") as f: easygdf.save(f, [ { 'name': 'ID', 'value': np.zeros(32, dtype=np.int64), 'children': [] }, ]) self.assertEqual( easygdf.load_initial_distribution(test_file)['ID'].dtype, np.dtype('int32')) # Test conversion from uint64 -> uint32 test_file = os.path.join( tempfile.gettempdir(), "save_initial_distribution_test_integer_casting_2.gdf") with open(test_file, "wb") as f: easygdf.save(f, [ { 'name': 'ID', 'value': np.zeros(32, dtype=np.uint64), 'children': [] }, ]) self.assertEqual( easygdf.load_initial_distribution(test_file)['ID'].dtype, np.dtype('uint32'))
def test_save_block_bad_dtype(self): """ Confirms exception is thrown when block is given with incorrect datatype for key :return: """ testfile = os.path.join(tempfile.gettempdir(), "save_has_bad_key.gdf") with open(testfile, "wb") as f: with self.assertRaises(TypeError): easygdf.save(f, [{"name": None}]) with open(testfile, "wb") as f: with self.assertRaises(TypeError): easygdf.save(f, [{"children": "my children"}]) with open(testfile, "wb") as f: with self.assertRaises(TypeError): easygdf.save(f, [{"value": {"a": "dictionary"}}])
def test_int_single_overflow(self): """ Confirms error is thrown if an integer larger than GDFs max size is passed. Test this for both positive and negative values. The positive integer should be cast to uint32 and the negative to int32. :return: """ # Test overflowing the negative value with open( os.path.join(tempfile.gettempdir(), "save_int_single_overflow_1.gdf"), "wb") as f: with self.assertRaises(ValueError): easygdf.save(f, [ { 'name': 'ID', 'value': -0x80000000, 'children': [] }, ]) # Confirm something bigger than the max int32 but smaller than the max uint32 doesn't overflow with open( os.path.join(tempfile.gettempdir(), "save_int_single_overflow_2.gdf"), "wb") as f: easygdf.save(f, [ { 'name': 'ID', 'value': 0x80000000, 'children': [] }, ]) # Test overflowing the positive value with open( os.path.join(tempfile.gettempdir(), "save_int_single_overflow_3.gdf"), "wb") as f: with self.assertRaises(ValueError): easygdf.save(f, [ { 'name': 'ID', 'value': 0x100000000, 'children': [] }, ])
import easygdf import numpy as np # Let's write an example file with a variety of data types blocks = [{ "name": "an array", "value": np.array([0, 1, 2, 3]) }, { "name": "a string", "value": "Hello world!" }, { "name": "a group", "value": 3.14, "children": [{ "name": "child", "value": 1.0 }] }] easygdf.save("minimal.gdf", blocks) # Now we'll read it back and print out some info about each block d = easygdf.load("minimal.gdf") for b in d["blocks"]: print("name='{0}'; value='{1}'; n_children={2}".format( b["name"], b["value"], len(b["children"])))
def test_save_str_URI(self): """ Saves same data as stored in a known existing binary file and compares the two, but uses a string filename instead of an open stream. :return: """ # Write the header expected for the reference file fh = { 'creation_time': datetime.datetime(2020, 11, 25, 17, 34, 24, tzinfo=datetime.timezone.utc), 'creator': 'ASCI2GDF', 'destination': '', 'gdf_version': (1, 1), 'creator_version': (1, 0), 'destination_version': (0, 0), 'dummy': (0, 0) } # Write out the block data for the reference file blocks = [ { "name": "X", "value": np.linspace(0, 5, 6), "children": [] }, { "name": "Y", "value": np.linspace(5, 0, 6), "children": [] }, ] # Write it to the temp directory test_file = os.path.join(tempfile.gettempdir(), "easygdf_test_URI.gdf") easygdf.save(test_file, blocks, **fh) # Read it back and read back the reference file with open(test_file, "rb") as f: test_data = bytearray(f.read()) with pkg_resources.resource_stream("easygdf.tests", "data/test.gdf") as f: reference_data = bytearray(f.read()) # Blank out the last bit of the string buffers that weren't written with zeros in the reference copy reference_data[31] = 0x00 reference_data[32] = 0x00 reference_data[33] = 0x00 reference_data[34] = 0x00 reference_data[56] = 0x00 reference_data[57] = 0x00 reference_data[58] = 0x00 reference_data[59] = 0x00 reference_data[60] = 0x00 reference_data[61] = 0x00 reference_data[128] = 0x00 reference_data[129] = 0x00 reference_data[130] = 0x00 reference_data[131] = 0x00 reference_data[132] = 0x00 reference_data[133] = 0x00 # Check the files against each other self.assertEqual(test_data, reference_data)