def test_incorrect_version(self): with closing(tables.open_file( self.existing_filename, mode="a")) as h5file: h5file.root._v_attrs.cuds_version = -1 with self.assertRaises(ValueError): H5CUDS.open(self.existing_filename)
def test_add_get_particle_container(self): filename = os.path.join(self.temp_dir, 'test.cuds') filename_copy = os.path.join(self.temp_dir, 'test-copy.cuds') with closing(H5CUDS.open(filename, 'w')) as handle: # add particle container and add points to it handle.add_dataset(Particles(name="test")) pc_test = handle.get_dataset("test") uids = pc_test.add_particles(self.particles) for particle in self.particles: uid = particle.uid self.assertIn(uid, uids) self.assertEqual( particle.coordinates, pc_test.get_particle(uid).coordinates) self.assertEqual( len(self.particles), sum(1 for _ in pc_test.iter_particles())) # add the particle container from the first file # into the second file with closing(H5CUDS.open(filename_copy, 'w')) as handle_copy: handle_copy.add_dataset(pc_test) pc_copy = handle_copy.get_dataset(pc_test.name) for particle in pc_test.iter_particles(): particle_copy = pc_copy.get_particle(particle.uid) self.assertEqual(particle_copy.uid, particle.uid) self.assertEqual( particle_copy.coordinates, particle.coordinates)
def test_valid(self): filename = os.path.join(self.temp_dir, 'test.cuds') with closing(H5CUDS.open(filename, 'w')) as handle: self.assertTrue(handle.valid()) self.assertFalse(handle.valid()) with closing(H5CUDS.open(filename, 'a')) as handle: self.assertTrue(handle.valid()) self.assertFalse(handle.valid())
def test_open_with_compression_off(self): filters = tables.Filters(complevel=0) filename = os.path.join(self.temp_dir, 'test.cuds') with closing(H5CUDS.open(filename, 'w', filters=filters)) as handle: self.assertTrue(handle.valid()) with closing(H5CUDS.open(filename, 'r', filters=filters)) as handle: self.assertTrue(handle.valid()) with closing(H5CUDS.open(filename, 'a', filters=filters)) as handle: self.assertTrue(handle.valid())
def update(self): dataset = self.dataset with closing(H5CUDS.open(str(self.file_path))) as handle: try: self.cuds = handle.get_dataset(dataset) except ValueError as exception: logger.warning(exception.message)
def test_add_get_particle_container_data(self): filename = os.path.join(self.temp_dir, 'test.cuds') original_pc = Particles(name="test") # Change data data = original_pc.data data[CUBA.NAME] = 'somename' original_pc.data = data # Store particle container along with its data with closing(H5CUDS.open(filename)) as handle: handle.add_dataset(original_pc) # Reopen the file and check the data if it is still there with closing(H5CUDS.open(filename, 'r')) as handle: pc = handle.get_dataset('test') self.assertIn(CUBA.NAME, pc.data) self.assertEqual(pc.data[CUBA.NAME], 'somename')
def initialize(self, filename): """ Initialise the CUDS file source. """ self.file_path = FilePath(filename) with closing(H5CUDS.open(filename)) as handle: names = [container.name for container in handle.iter_datasets()] if len(names) == 0: logger.warning('No datasets found in: %s', self.file_path) self.datasets = names self.initialized = True
def test_add_get_mesh(self): # add mesh and add points to it filename = os.path.join(self.temp_dir, 'test.cuds') filename_copy = os.path.join(self.temp_dir, 'test-copy.cuds') with closing(H5CUDS.open(filename)) as handle: handle.add_dataset(Mesh(name="test")) m_test = handle.get_dataset("test") for p in self.points: uid = m_test.add_points([p]) self.assertEqual(p.uid, uid[0]) self.assertEqual( p.coordinates, m_test.get_point(uid[0]).coordinates) num_points = sum(1 for _ in m_test.iter_points()) self.assertEqual(num_points, len(self.points)) # add the mesh from the first file into the second file with closing(H5CUDS.open(filename_copy)) as handle_copy: handle_copy.add_dataset(m_test) m_copy = handle_copy.get_dataset(m_test.name) for p in m_test.iter_points(): p1 = m_copy.get_point(p.uid) self.assertEqual(p1.uid, p.uid) self.assertEqual(p1.coordinates, p.coordinates) with self.assertRaises(Exception): m_test.delete(self.points[0].uid) with self.assertRaises(Exception): handle.get_dataset('test') # reopen file (in read only mode) with closing(H5CUDS.open(filename, 'r')) as handle: m_test = handle.get_dataset('test') for p in self.points: p1 = m_test.get_point(p.uid) self.assertEqual(p1.uid, p.uid) self.assertEqual(p1.coordinates, p.coordinates)
def test_closed_file_not_usable(self): filename = os.path.join(self.temp_dir, 'test.cuds') with closing(H5CUDS.open(filename)) as handle: handle.add_dataset(Mesh(name="test_1")) handle.add_dataset(Particles(name="test_2")) lattice = make_cubic_lattice("test_3", 1.0, (2, 3, 4)) handle.add_dataset(lattice) test_h1 = handle.get_dataset("test_1") test_h2 = handle.get_dataset("test_2") test_h3 = handle.get_dataset("test_3") with self.assertRaises(Exception): handle.get_dataset('test_h1') with self.assertRaises(Exception): test_h1.name = 'foo' with self.assertRaises(Exception): test_h2.name = 'foo' with self.assertRaises(Exception): test_h3.name = 'foo'
def __enter__(self): self._file = H5CUDS.open(self._filename) self._file.add_dataset(Particles("test")) return self._file.get_dataset("test")
def test_open_with_read_only_mode(self): filename = os.path.join(self.temp_dir, 'test.cuds') with closing(H5CUDS.open(filename, 'w')) as handle: self.assertTrue(handle.valid()) with closing(H5CUDS.open(filename, 'r')) as handle: self.assertTrue(handle.valid())
def engine_factory(self): filename = os.path.join(self.temp_dir, 'test.cuds') engine = H5CUDS.open(filename) self.engines.append(engine) return engine
def add_temperature(lattice): new_nodes = [] for node in lattice.iter(item_type=CUBA.NODE): index = numpy.array(node.index) + 1.0 node.data[CUBA.TEMPERATURE] = numpy.prod(index) new_nodes.append(node) lattice.update(new_nodes) # add some scalar data (i.e. temperature) add_temperature(hexagonal) add_temperature(orthorhombic) # save the data into cuds. with closing(H5CUDS.open('lattices.cuds', 'w')) as handle: handle.add_dataset(hexagonal) handle.add_dataset(orthorhombic) @mayavi2.standalone def view(): from mayavi import mlab from mayavi.modules.glyph import Glyph from simphony_mayavi.sources.api import CUDSFileSource mayavi.new_scene() # Mayavi Source src = CUDSFileSource() src.initialize('lattices.cuds')
def setUp(self): self.temp_dir = tempfile.mkdtemp() self.filename = os.path.join(self.temp_dir, 'test_file.cuds') self.addCleanup(self.cleanup) self.handle = H5CUDS.open(self.filename) CheckManipulatingBonds.setUp(self)
for index, element in enumerate(edges)) edge_uids = mesh.add(edge_iter) # add faces face_uids = mesh.add((Face(points=[uids[index] for index in element], data=DataContainer(TEMPERATURE=index + 30)) for index, element in enumerate(faces))) # add cells cell_uids = mesh.add((Cell(points=[uids[index] for index in element], data=DataContainer(TEMPERATURE=index + 40)) for index, element in enumerate(cells))) # save the data into cuds. with closing(H5CUDS.open('example.cuds', 'w')) as handle: handle.add_dataset(mesh) handle.add_dataset(particles) handle.add_dataset(hexagonal) handle.add_dataset(orthorhombic) handle.add_dataset(body_centered) # Now view the data. @mayavi2.standalone def view(): mayavi.new_scene() # noqa if __name__ == '__main__': view()
def setUp(self): self.temp_dir = tempfile.mkdtemp() self.filename = os.path.join(self.temp_dir, 'test_file.cuds') self.addCleanup(self.cleanup) self.handle = H5CUDS.open(self.filename) CheckAddingParticles.setUp(self)
def setUp(self): self.temp_dir = tempfile.mkdtemp() self.existing_filename = os.path.join(self.temp_dir, 'test.cuds') handle = H5CUDS.open(self.existing_filename) handle.close()