def create_symbol_tables(self): st = SymbolTableHeader() st.add('root') st.add('time') st.add('time.long_name:universal time') st.add('time.reference:UTC') st.add('time.units:minutes since 2000-01-01 00:00') st.add('ncells') st.add('ncells.long_name:sequential cell count') st.add('lon') st.add('lon.long_name:Cell longitude') st.add('lon.units:degrees_east') st.add('lat') st.add('lat.long_name:Cell latitude') st.add('lat.units:degrees_north') st.add('U') st.add('U.long_name:eastward water velocity') st.add('V') st.add('V.long_name:northward water velocity') with TemporaryFile('w+b') as f: st.serialize().fwrite(f.fileno()) f.seek(0) sb = StringBuffer.from_file(f.fileno(),16) header = MFSObjectHeader.deserialize(sb) symbol_buf = StringBuffer.from_file(f.fileno(), header.total_size) header.deserialize_table(symbol_buf) i = random.randint(0, len(st.symbols)-1) self.assertEquals(st.symbols[i].symbol, header.symbols[i].symbol)
def create_symbol_tables(self): st = SymbolTableHeader() st.add('root') st.add('time') st.add('time.long_name:universal time') st.add('time.reference:UTC') st.add('time.units:minutes since 2000-01-01 00:00') st.add('ncells') st.add('ncells.long_name:sequential cell count') st.add('lon') st.add('lon.long_name:Cell longitude') st.add('lon.units:degrees_east') st.add('lat') st.add('lat.long_name:Cell latitude') st.add('lat.units:degrees_north') st.add('U') st.add('U.long_name:eastward water velocity') st.add('V') st.add('V.long_name:northward water velocity') with TemporaryFile('w+b') as f: st.serialize().fwrite(f.fileno()) f.seek(0) sb = StringBuffer.from_file(f.fileno(), 16) header = MFSObjectHeader.deserialize(sb) symbol_buf = StringBuffer.from_file(f.fileno(), header.total_size) header.deserialize_table(symbol_buf) i = random.randint(0, len(st.symbols) - 1) self.assertEquals(st.symbols[i].symbol, header.symbols[i].symbol)
def create_dataspace(self): ds = DataspaceHeader((40,40,40)) sb = ds.serialize() with TemporaryFile('w+b') as f: sb.fwrite(f.fileno()) f.seek(0) sb = StringBuffer.from_file(f.fileno(), 16) ds_header = MFSObjectHeader.deserialize(sb) sb = StringBuffer.from_file(f.fileno(), ds_header.total_size) ds_header.deserialize_dataspaces(sb) for i in xrange(3): self.assertEquals(ds_header.dataspaces[i].dim_size, 40)
def create_dataspace(self): ds = DataspaceHeader((40, 40, 40)) sb = ds.serialize() with TemporaryFile('w+b') as f: sb.fwrite(f.fileno()) f.seek(0) sb = StringBuffer.from_file(f.fileno(), 16) ds_header = MFSObjectHeader.deserialize(sb) sb = StringBuffer.from_file(f.fileno(), ds_header.total_size) ds_header.deserialize_dataspaces(sb) for i in xrange(3): self.assertEquals(ds_header.dataspaces[i].dim_size, 40)
def test_large_hash(self): with open('/dev/urandom', 'r+b') as f: buf = StringBuffer.from_file(f.fileno(), 4096 * 3) buf_sha = buf.hash() from hashlib import sha1 outside = StringBuffer(sha1(buf.raw_read()).digest()) self.assertEquals(outside.raw_read(), buf_sha.raw_read())
def test_from_file(self): with TemporaryFile('w+b') as f: b = 'hello world' f.write(b) f.seek(0) sb = StringBuffer.from_file(f.fileno(), len(b)+1) self.assertEquals(sb.read(), b)
def test_dataspace_header(self): with TemporaryFile('w+b') as f: dataspace = DataspaceHeader((2,10)) sb = dataspace.serialize() sb.fwrite(f.fileno()) f.seek(0) sb = StringBuffer.from_file(f.fileno(), 16) dataspace = MFSObjectHeader.deserialize(sb) self.assertIsInstance(dataspace,DataspaceHeader) self.assertEquals(dataspace.ver, 0) self.assertEquals(dataspace.dims, 2) self.assertEquals(dataspace.flags, 0) self.assertEquals(dataspace.total_size, 16) sb = StringBuffer.from_file(f.fileno(), dataspace.total_size) dataspace.deserialize_dataspaces(sb) self.assertEquals(dataspace.dataspaces[0].dim_size, 2) self.assertEquals(dataspace.dataspaces[1].dim_size, 10)
def test_dataspace_header(self): with TemporaryFile('w+b') as f: dataspace = DataspaceHeader((2, 10)) sb = dataspace.serialize() sb.fwrite(f.fileno()) f.seek(0) sb = StringBuffer.from_file(f.fileno(), 16) dataspace = MFSObjectHeader.deserialize(sb) self.assertIsInstance(dataspace, DataspaceHeader) self.assertEquals(dataspace.ver, 0) self.assertEquals(dataspace.dims, 2) self.assertEquals(dataspace.flags, 0) self.assertEquals(dataspace.total_size, 16) sb = StringBuffer.from_file(f.fileno(), dataspace.total_size) dataspace.deserialize_dataspaces(sb) self.assertEquals(dataspace.dataspaces[0].dim_size, 2) self.assertEquals(dataspace.dataspaces[1].dim_size, 10)
def test_merkle_node(self): buf = 'sample text' sha_bytes = sha1(buf).digest() o = MerkleNode(3, 0, 0, sha_bytes) mnode = MerkleNodeHeader() for i in xrange(5): mnode.add_child(o) with TemporaryFile('w+b') as f: mnode.serialize().fwrite(f.fileno()) f.seek(0) sb = StringBuffer.from_file(f.fileno(), 16) mnode_header = MFSObjectHeader.deserialize(sb) sb = StringBuffer.from_file(f.fileno(), mnode_header.total_size) mnode_header.deserialize_children(sb) self.assertEquals(mnode.objects[-1].sha, mnode_header.objects[-1].sha)
def test_symbol_table_write(self): st = SymbolTableHeader() st.add('root') st.add('') # Null symbol st.add('time') st.add('time.units:seconds since 1900-01-01') st.add('lat') st.add('lat.units:degrees_north') st.add('lon.units:degrees_east') with TemporaryFile('w+b') as f: sb = st.serialize() sb.fwrite(f.fileno()) f.seek(0) sb = StringBuffer.from_file(f.fileno(), 16) header = MFSObjectHeader.deserialize(sb) symbol_buf = StringBuffer.from_file(f.fileno(), header.total_size) header.deserialize_table(symbol_buf) for i in xrange(len(st.symbols)): self.assertEquals(st.symbols[i].symbol, header.symbols[i].symbol)