def test_calc_minimize_input(self): # Ensure defaults match control atoms = Atoms("Fe8", positions=np.zeros((8, 3)), cell=np.eye(3)) self.minimize_control_job.structure = atoms self.minimize_control_job.input.control.calc_minimize() self.minimize_control_job._interactive_lammps_input() self.minimize_job.structure = atoms self.minimize_job.calc_minimize() self.minimize_job._interactive_lammps_input() self.assertEqual( self.minimize_control_job._interactive_library._command, self.minimize_job._interactive_library._command ) # Ensure that pressure inputs are being parsed OK self.minimize_job.calc_minimize(pressure=0) self.minimize_job._interactive_lammps_input() self.assertTrue(("fix ensemble all box/relax iso 0.0" in self.minimize_job._interactive_library._command)) self.minimize_job.calc_minimize(pressure=[0.0, 0.0, 0.0]) self.minimize_job._interactive_lammps_input() self.assertTrue(("fix ensemble all box/relax x 0.0 y 0.0 z 0.0 couple none" in self.minimize_job._interactive_library._command)) self.minimize_job.calc_minimize(pressure=[1, 2, None, 0., 0., None]) self.minimize_job._interactive_lammps_input() self.assertTrue(("fix ensemble all box/relax x 10000.0 y 20000.0 xy 0.0 xz 0.0 couple none" in self.minimize_job._interactive_library._command))
def get_magnetic_structure(job): basis = Atoms().from_hdf(job["input"]) magmons = basis.get_initial_magnetic_moments() if all(magmons == None): return {"magnetic_structure": "non magnetic"} else: abs_sum_mag = sum(np.abs(magmons)) sum_mag = sum(magmons) if abs_sum_mag == 0 and sum_mag == 0: return {"magnetic_structure": "non magnetic"} elif abs_sum_mag == np.abs(sum_mag): return {"magnetic_structure": "ferro-magnetic"} elif abs_sum_mag > 0 and sum_mag == 0: return {"magnetic_structure": "para-magnetic"} else: return {"magnetic_structure": "unknown"}
def from_hdf(self, hdf=None, group_name=None): super().from_hdf(hdf=hdf, group_name=group_name) self._structure_from_hdf() self._backwards_compatible_input_from_hdf() with self.project_hdf5.open("output/structures") as hdf5_output: structure_names = hdf5_output.list_groups() for group in structure_names: with self.project_hdf5.open("output/structures/" + group) as hdf5_output: self._lst_of_struct.append(Atoms().from_hdf(hdf5_output))
def test_average(self): a_0 = 2.855312531 atoms = Atoms("Fe2", positions=[3*[0], 3*[0.5*a_0]], cell=a_0*np.eye(3)) self.job.structure = atoms self.job.potential = 'Fe_C_Becquart_eam' file_directory = os.path.join( self.execution_path, "..", "static", "lammps_test_files" ) self.job.collect_dump_file(cwd=file_directory, file_name="dump_average.out") self.job.collect_output_log(cwd=file_directory, file_name="log_average.lammps")
def _build_water(self, y0_shift=0.): density = 1.0e-24 # g/A^3 n_mols = 27 mol_mass_water = 18.015 # g/mol # Determining the supercell size size mass = mol_mass_water * n_mols / units.mol # g vol_h2o = mass / density # in A^3 a = vol_h2o ** (1.0 / 3.0) # A # Constructing the unitcell n = int(round(n_mols ** (1.0 / 3.0))) dx = 0.7 r_O = [0, 0, 0] r_H1 = [dx, dx, 0] r_H2 = [-dx, dx, 0] unit_cell = (a / n) * np.eye(3) unit_cell[0][1] += y0_shift water = Atoms(elements=["H", "H", "O"], positions=[r_H1, r_H2, r_O], cell=unit_cell, pbc=True) water.set_repeat([n, n, n]) return water
def _strain_axes(structure: Atoms, axes: List[str], volume_strain: float): """ Strain box along given axes to achieve given *volumetric* strain. Returns a copy. """ axes = np.array([a in axes for a in ("x", "y", "z")]) num_axes = sum(axes) strains = axes * (volume_strain**(1.0 / num_axes) - 1) return structure.apply_strain(strains, return_box=True)
def _get_structure(self, frame=-1, wrap_atoms=True): slc = self._get_per_atom_slice(frame) if "spins" in self._per_atom_arrays: magmoms = self._per_atom_arrays["spins"][slc] else: # not all structures have spins saved on them magmoms = None return Atoms(symbols=self._per_atom_arrays["symbols"][slc], positions=self._per_atom_arrays["positions"][slc], cell=self._per_structure_arrays["cell"][frame], pbc=self._per_structure_arrays["pbc"][frame], magmoms=magmoms)
def setUpClass(cls): super().setUpClass() cls.execution_path = os.path.dirname(os.path.abspath(__file__)) atoms = Atoms("Fe1", positions=np.zeros((1, 3)), cell=np.eye(3)) job = Gpaw( project=ProjectHDFio(project=cls.project, file_name="gpaw"), job_name="gpaw", ) job.structure = atoms job.encut = 300 job.set_kpoints([5, 5, 5]) cls.job = job
def get_initial_structure(self): """ Gets the initial structure from the simulation Returns: pyiron.atomistics.structure.atoms.Atoms: The initial structure """ try: el_list = self.vasprun_dict["atominfo"]["species_list"] cell = self.vasprun_dict["init_structure"]["cell"] positions = self.vasprun_dict["init_structure"]["positions"] if len(positions[positions > 1.01]) > 0: basis = Atoms(el_list, positions=positions, cell=cell, pbc=True) else: basis = Atoms(el_list, scaled_positions=positions, cell=cell, pbc=True) if "selective_dynamics" in self.vasprun_dict[ "init_structure"].keys(): basis.add_tag(selective_dynamics=[True, True, True]) for i, val in enumerate(self.vasprun_dict["init_structure"] ["selective_dynamics"]): basis[i].selective_dynamics = val return basis except KeyError: state.logger.warning( "The initial structure could not be extracted from vasprun properly" ) return
def test_calc_md_input(self): # Ensure that defaults match control defaults atoms = Atoms("Fe8", positions=np.zeros((8, 3)), cell=np.eye(3)) self.md_control_job.structure = atoms self.md_control_job.input.control.calc_md() self.md_job.sturcture = atoms self.md_job._prism = UnfoldingPrism(atoms.cell) self.md_job.calc_md() for k in self.job.input.control.keys(): self.assertEqual(self.md_job.input.control[k], self.md_control_job.input.control[k]) # Ensure that pressure inputs are being parsed OK self.md_control_job.calc_md(temperature=300.0, pressure=0) self.assertEqual(self.md_control_job.input.control['fix___ensemble'], "all npt temp 300.0 300.0 0.1 iso 0.0 0.0 1.0") self.md_control_job.calc_md(temperature=300.0, pressure=[0.0, 0.0, 0.0]) self.assertEqual( self.md_control_job.input.control['fix___ensemble'], "all npt temp 300.0 300.0 0.1 x 0.0 0.0 1.0 y 0.0 0.0 1.0 z 0.0 0.0 1.0" ) cnv = LAMMPS_UNIT_CONVERSIONS[ self.md_control_job.input.control["units"]]["pressure"] self.md_control_job.calc_md(temperature=300.0, pressure=-2.0) m = re.match( r"all +npt +temp +300.0 +300.0 +0.1 +iso +([-\d.]+) +([-\d.]+) 1.0$", self.md_control_job.input.control['fix___ensemble'].strip()) self.assertTrue(m) self.assertTrue(np.isclose(float(m.group(1)), -2.0 * cnv)) self.assertTrue(np.isclose(float(m.group(2)), -2.0 * cnv)) self.md_control_job.calc_md(temperature=300.0, pressure=[1, 2, None, 3., 0., None]) m = re.match( r"all +npt +temp +300.0 +300.0 +0.1 +" r"x +([\d.]+) +([\d.]+) +1.0 +y +([\d.]+) +([\d.]+) +1.0 +" r"xy +([\d.]+) +([\d.]+) +1.0 +xz +([\d.]+) +([\d.]+) +1.0$", self.md_control_job.input.control['fix___ensemble'].strip()) self.assertTrue(m) self.assertTrue(np.isclose(float(m.group(1)), 1.0 * cnv)) self.assertTrue(np.isclose(float(m.group(2)), 1.0 * cnv)) self.assertTrue(np.isclose(float(m.group(3)), 2.0 * cnv)) self.assertTrue(np.isclose(float(m.group(4)), 2.0 * cnv)) self.assertTrue(np.isclose(float(m.group(5)), 3.0 * cnv)) self.assertTrue(np.isclose(float(m.group(6)), 3.0 * cnv)) self.assertTrue(np.isclose(float(m.group(7)), 0.0 * cnv)) self.assertTrue(np.isclose(float(m.group(8)), 0.0 * cnv))
def setUpClass(cls): cls.count = 12 cls.file_location = os.path.dirname(os.path.abspath(__file__)) cls.project = Project( os.path.join(cls.file_location, "random_testing_atomistic")) cls.job = cls.project.create_job("AtomisticExampleJob", "job_test_atomistic_run") cls.job.input["count"] = cls.count cls.job.structure = Atoms(positions=[[0, 0, 0], [1, 1, 1]], elements=["Fe", "Fe"], cell=2 * np.eye(3)) cls.job.interactive_open() cls.job.run()
def setUpClass(cls): cls.file_location = os.path.dirname(os.path.abspath(__file__)) cls.project = Project(os.path.join(cls.file_location, "../static/sphinx")) cls.sphinx = cls.project.create_job("Sphinx", "job_sphinx") cls.sphinx.structure = Atoms(elements=['Fe']*2, scaled_positions=[3*[0.0], 3*[0.5]], cell=2.6*np.eye(3)) cls.sphinx.structure.set_initial_magnetic_moments(np.ones(2)) cls.current_dir = os.path.abspath(os.getcwd()) cls.sphinx._create_working_directory() cls.sphinx.input["VaspPot"] = False cls.sphinx.load_default_groups() cls.sphinx.write_input() cls.sphinx.version = "2.6.1" cls.sphinx.server.run_mode.interactive = True
def phonopy_to_atoms(ph_atoms): """ Convert Phonopy Atoms to ASE-like Atoms Args: ph_atoms: Phonopy Atoms object Returns: ASE-like Atoms object """ return Atoms(symbols=list(ph_atoms.get_chemical_symbols()), positions=list(ph_atoms.get_positions()), cell=list(ph_atoms.get_cell()), pbc=True)
def test_bonds_input(self): potential = pd.DataFrame({'Name': ['Morse'], 'Filename': [[]], 'Model' : ['Morse'], 'Species' : [['Al']], 'Config' : [['atom_style bond\n', 'bond_style morse\n', 'bond_coeff 1 0.1 1.5 2.0\n', 'bond_coeff 2 0.1 1.5 2.0']]}) cell = Atoms(elements=4*['Al'], positions=[[0., 0., 0.], [0., 2., 2.], [2., 0., 2.], [2., 2., 0.]], cell=4*np.eye(3)) self.job.structure = cell.repeat(2) self.job.structure.bonds = [[1, 2, 1], [1, 3, 2]] self.job.potential = potential self.job.calc_static() self.job.run(run_mode="manual") bond_str = "2 bond types\n" self.assertTrue(self.job["structure.inp"][4][-1], bond_str)
def test_write_cube(self): cd_obj = VaspVolumetricData() file_name = os.path.join( self.execution_path, "../../static/vasp_test_files/chgcar_samples/CHGCAR_no_spin", ) cd_obj.from_file(filename=file_name) data_before = cd_obj.total_data.copy() cd_obj.write_cube_file( filename=os.path.join(self.execution_path, "chgcar.cube") ) cd_obj.read_cube_file(filename=os.path.join(self.execution_path, "chgcar.cube")) data_after = cd_obj.total_data.copy() self.assertTrue(np.allclose(data_before, data_after)) n_x, n_y, n_z = (3, 4, 2) random_array = np.random.rand(n_x, n_y, n_z) rd_obj = VolumetricData() rd_obj.atoms = Atoms("H2O", cell=np.eye(3) * 10, positions=np.eye(3)) rd_obj.total_data = random_array rd_obj.write_vasp_volumetric( filename=os.path.join(self.execution_path, "random_CHGCAR") ) cd_obj.from_file(filename=os.path.join(self.execution_path, "random_CHGCAR")) self.assertTrue( np.allclose( cd_obj.total_data * cd_obj.atoms.get_volume(), rd_obj.total_data ) ) file_name = os.path.join( self.execution_path, "../../static/vasp_test_files/chgcar_samples/CHGCAR_water", ) cd_obj = VaspVolumetricData() cd_obj.from_file(file_name) data_before = cd_obj.total_data.copy() cd_obj.write_cube_file( filename=os.path.join(self.execution_path, "chgcar.cube") ) cd_obj.read_cube_file(filename=os.path.join(self.execution_path, "chgcar.cube")) self.assertIsNotNone(cd_obj.atoms) data_after = cd_obj.total_data.copy() self.assertTrue(np.allclose(data_before, data_after)) data_before = cd_obj.total_data.copy() cd_obj.write_vasp_volumetric( filename=os.path.join(self.execution_path, "random_CHGCAR") ) cd_obj.from_file( filename=os.path.join(self.execution_path, "random_CHGCAR"), normalize=False ) data_after = cd_obj.total_data.copy() self.assertTrue(np.allclose(data_before, data_after))
def from_hdf(self, hdf=None, group_name=None): """ Restore the StructureListMaster object in the HDF5 File Args: hdf (ProjectHDFio): HDF5 group object - optional group_name (str): HDF5 subgroup name - optional """ super(StructureListMaster, self).from_hdf(hdf=hdf, group_name=group_name) with self.project_hdf5.open("input/structures") as hdf5_input: self._structure_lst = [ Atoms().from_hdf(hdf5_input, group_name) for group_name in sorted(hdf5_input.list_groups()) ]
def test_manip_contcar(self): for f in self.file_list: if "CONTCAR_Mg" in f: struct = read_atoms(f) Mg_indices = struct.select_index("Mg") add_pos = np.zeros_like(struct.positions) max_Mg = np.argmax(struct.positions[Mg_indices, 2]) init_z = struct.positions[max_Mg, 2] add_pos[np.argsort(vasp_sorter(struct))[max_Mg], 2] += 5.0 manip_contcar(filename=f, new_filename="manip_file", add_pos=add_pos) new_struct = read_atoms("manip_file") Mg_indices = new_struct.select_index("Mg") max_Mg = np.argmax(new_struct.positions[Mg_indices, 2]) final_z = new_struct.positions[max_Mg, 2] self.assertEqual(round(final_z - init_z, 3), 5.0) os.remove("manip_file") break positions = np.ones((3, 3)) positions[0] = [5.0, 5.0, 5.0] positions[1] = [5.0, 5.7, 5.7] positions[2] = [5.0, -5.7, -5.7] struct = Atoms(["O", "H", "H"], positions=positions, cell=10.0 * np.eye(3)) write_poscar(structure=struct, filename="simple_water") add_pos = np.zeros_like(positions) poscar_order = np.argsort(vasp_sorter(struct)) add_pos[poscar_order[struct.select_index("O")], 2] += 3 manip_contcar("simple_water", "simple_water_new", add_pos) new_struct = read_atoms("simple_water_new") self.assertEqual(new_struct.positions[new_struct.select_index("O"), 2], 8) os.remove("simple_water") os.remove("simple_water_new")
def from_hdf(self, hdf=None, group_name=None): # keep hdf structure for version peeking in separate variable, so that # the inherited from_hdf() can properly deal with it h5 = hdf or self.project_hdf5 if group_name: h5 = h5[group_name] if "HDF_VERSION" in h5.list_nodes(): hdf_version = h5["HDF_VERSION"] else: # old versions didn't use to set a HDF version hdf_version = "0.1.0" if hdf_version == "0.1.0": super().from_hdf(hdf=hdf, group_name=group_name) with self.project_hdf5.open("input") as hdf5_input: self.append(Atoms().from_hdf(hdf5_input)) elif hdf_version == "0.2.0": GenericJob.from_hdf(self, hdf=hdf, group_name=group_name) hdf = self.project_hdf5["structures"] for group in sorted(hdf.list_groups()): self.append(Atoms().from_hdf(hdf=hdf, group_name=group)) else: super().from_hdf(hdf=hdf, group_name=group_name) self._container.from_hdf(hdf=self.project_hdf5, group_name="structures")
def from_hdf(self, hdf=None, group_name=None): """ Restore the ExampleJob object in the HDF5 File Args: hdf (ProjectHDFio): HDF5 group object - optional group_name (str): HDF5 subgroup name - optional """ super(SxUniqDispl, self).from_hdf(hdf=hdf, group_name=group_name) with self.project_hdf5.open("input") as hdf5_input: self.input.from_hdf(hdf5_input) if "output" in self.project_hdf5.list_groups(): with self.project_hdf5.open("output") as hdf5_output: self.structure_lst = [ Atoms().from_hdf(hdf5_output, group_name) for group_name in hdf5_output.list_groups() ]
def setUpClass(cls): cls.file_location = os.path.dirname(os.path.abspath(__file__)) poscar_directory = os.path.join( cls.file_location, "../static/vasp_test_files/poscar_samples") file_list = os.listdir(poscar_directory) cls.file_list = [ posixpath.join(poscar_directory, f) for f in file_list ] atom_numbers = np.random.randint(low=1, high=99, size=(1, 3)).flatten() cell = 10.0 * np.eye(3) pos = 0.5 * np.ones((3, 3)) - 0.5 * np.eye(3) cls.structure = Atoms(numbers=atom_numbers, cell=cell, positions=pos, pbc=True) cls.structure.repeat([2, 2, 2]) cls.element_list = cls.structure.get_chemical_elements()
def _get_structure(self, frame=-1, wrap_atoms=True): elements = self.get_elements() index_map = {e: i for i, e in enumerate(elements)} try: magmoms = self.get_array("spins", frame) except KeyError: # not all structures have spins saved on them magmoms = None symbols = self.get_array("symbols", frame) return Atoms( species=[Atom(e).element for e in elements], indices=[index_map[e] for e in symbols], positions=self.get_array("positions", frame), cell=self.get_array("cell", frame), pbc=self.get_array("pbc", frame), magmoms=magmoms, )
def test_run(self): basis = Atoms(elements=8 * ['Fe'], scaled_positions=np.random.random(24).reshape(-1, 3), cell=2.6 * np.eye(3)) job = self.project.create_job( self.project.job_type.AtomisticExampleJob, "job_single") job.server.run_mode.interactive = True job.structure = basis artint = self.project.create_job('ART', 'job_art') artint.ref_job = job with self.assertRaises(AssertionError): artint.validate_ready_to_run() artint.input.art_id = 0 artint.input.direction = np.ones(3) artint.run() self.assertEqual(artint.output.forces.shape, (1, 8, 3)) artint.interactive_close() self.assertTrue(artint.status.finished)
def test_cached_speed(self): """ Creating atoms should be faster after the first time, due to caches in periodictable/mendeleev. """ pos, cell = generate_fcc_lattice() expected_speedup_factor = 15 n_timing_loop = 5 t1, t2, t3, t4, t5, t6, t7 = [ np.array([0.0] * n_timing_loop) for _ in range(7) ] for i in range(n_timing_loop): element.cache_clear() PeriodicTable._get_periodic_table_df.cache_clear() t1[i] = time.perf_counter() Atoms(symbols="Al", positions=pos, cell=cell) t2[i] = time.perf_counter() Atoms(symbols="Al", positions=pos, cell=cell) t3[i] = time.perf_counter() Atoms(symbols="Cu", positions=pos, cell=cell) t4[i] = time.perf_counter() Atoms(symbols="CuAl", positions=[[0., 0., 0.], [0.5, 0.5, 0.5]], cell=cell) t5[i] = time.perf_counter() Atoms(symbols="MgO", positions=[[0., 0., 0.], [0.5, 0.5, 0.5]], cell=cell) t6[i] = time.perf_counter() Atoms(symbols="AlMgO", positions=[[0., 0., 0.], [0.5, 0.5, 0.5], [0.5, 0.5, 0.]], cell=cell) t7[i] = time.perf_counter() dt21 = np.mean(t2 - t1) dt32 = np.mean(t3 - t2) # check the simple case of structures with one element type self.assertGreater(dt21, dt32, "Atom creation not speed up by caches!") self.assertGreater( dt21 / dt32, expected_speedup_factor, "Atom creation not speed up to the required level by caches!") dt43 = np.mean(t4 - t3) dt54 = np.mean(t5 - t4) # check that speed up also holds when creating structures with multiple elements, but all the elements have been # seen before self.assertGreater( dt43 / dt54, expected_speedup_factor, "Atom creation not speed up to the required level by caches!") dt65 = np.mean(t6 - t5) dt76 = np.mean(t7 - t6) # check that again with three elements self.assertGreater( dt65 / dt76, expected_speedup_factor, "Atom creation not speed up to the required level by caches!")
def test_calc_minimize_input(self): # Ensure that defaults match control defaults atoms = Atoms("Fe8", positions=np.zeros((8, 3)), cell=np.eye(3)) self.minimize_control_job.structure = atoms self.minimize_control_job.input.control.calc_minimize() self.minimize_job.sturcture = atoms self.minimize_job._prism = UnfoldingPrism(atoms.cell) self.minimize_job.calc_minimize() for k in self.job.input.control.keys(): self.assertEqual(self.minimize_job.input.control[k], self.minimize_control_job.input.control[k]) # Ensure that pressure inputs are being parsed OK self.minimize_control_job.calc_minimize(pressure=0) self.assertEqual( self.minimize_control_job.input.control['fix___ensemble'], "all box/relax iso 0.0") self.minimize_control_job.calc_minimize(pressure=[0.0, 0.0, 0.0]) self.assertEqual( self.minimize_control_job.input.control['fix___ensemble'], "all box/relax x 0.0 y 0.0 z 0.0 couple none") cnv = LAMMPS_UNIT_CONVERSIONS[ self.minimize_control_job.input.control["units"]]["pressure"] self.minimize_control_job.calc_minimize(pressure=-2.0) m = re.match( r"all +box/relax +iso +([-\d.]+)$", self.minimize_control_job.input.control['fix___ensemble'].strip()) self.assertTrue(m) self.assertTrue(np.isclose(float(m.group(1)), -2.0 * cnv)) self.minimize_control_job.calc_minimize( pressure=[1, 2, None, 3., 0., None]) m = re.match( r"all +box/relax +x +([\d.]+) +y ([\d.]+) +xy +([\d.]+) +xz +([\d.]+) +couple +none$", self.minimize_control_job.input.control['fix___ensemble'].strip()) self.assertTrue(m) self.assertTrue(np.isclose(float(m.group(1)), 1.0 * cnv)) self.assertTrue(np.isclose(float(m.group(2)), 2.0 * cnv)) self.assertTrue(np.isclose(float(m.group(3)), 3.0 * cnv)) self.assertTrue(np.isclose(float(m.group(4)), 0.0 * cnv))
def from_hdf(self, hdf=None, group_name=None): """ Restore the ExampleJob object in the HDF5 File Args: hdf (ProjectHDFio): HDF5 group object - optional group_name (str): HDF5 subgroup name - optional """ super(RandSpg, self).from_hdf(hdf=hdf, group_name=group_name) with self.project_hdf5.open("input") as hdf5_input: self.input.from_hdf(hdf5_input) self._lst_of_struct = [] with self.project_hdf5.open("output/structures") as hdf5_output: structure_names = hdf5_output.list_groups() for group in structure_names: with self.project_hdf5.open("output/structures/" + group) as hdf5_output: self._lst_of_struct.append( [group, Atoms().from_hdf(hdf5_output)])
def test_validate(self): with self.assertRaises(ValueError): self.job.validate_ready_to_run() a_0 = 2.855312531 atoms = Atoms("Fe2", positions=[3 * [0], 3 * [0.5 * a_0]], cell=a_0 * np.eye(3), pbc=False) self.job.structure = atoms # with self.assertRaises(ValueError): # self.job.validate_ready_to_run() self.job.potential = self.job.list_potentials()[-1] self.job.validate_ready_to_run() self.job.structure.positions[0, 0] -= 2.855 with self.assertRaises(ValueError): self.job.validate_ready_to_run() self.job.structure.pbc = True self.job.validate_ready_to_run() self.job.structure.pbc = [True, True, False] self.job.validate_ready_to_run() self.job.structure.pbc = [False, True, True] with self.assertRaises(ValueError): self.job.validate_ready_to_run()
def test_get_layers(self): a_0 = 4 struct = CrystalStructure('Al', lattice_constants=a_0, bravais_basis='fcc').repeat(10) layers = struct.analyse.get_layers() self.assertAlmostEqual( np.linalg.norm(layers - np.rint(2 * struct.positions / a_0).astype(int)), 0) struct.append(Atoms(elements=['C'], positions=np.random.random( (1, 3)))) self.assertEqual( np.linalg.norm(layers - struct.analyse.get_layers( id_list=struct.select_index('Al'))), 0) self.assertEqual( np.linalg.norm(layers - struct.analyse.get_layers( id_list=struct.select_index('Al'), wrap_atoms=False)), 0) with self.assertRaises(ValueError): _ = struct.analyse.get_layers(distance_threshold=0) with self.assertRaises(ValueError): _ = struct.analyse.get_layers(id_list=[])
def _get_structure(self, frame=-1, wrap_atoms=True): if self.structure is None: raise AssertionError("Structure not set") if self.output.cells is not None: try: cell = self.output.cells[frame] except IndexError: if wrap_atoms: raise IndexError("cell at step ", frame, " not found") from None cell = None if self.output.indices is not None: try: indices = self.output.indices[frame] except IndexError: indices = None if indices is not None and len(indices) != len(self.structure): snapshot = Atoms( species=self.structure.species, indices=indices, positions=np.zeros(indices.shape + (3,)), cell=cell, pbc=self.structure.pbc, ) else: snapshot = self.structure.copy() if cell is not None: snapshot.cell = cell if indices is not None: snapshot.indices = indices if self.output.positions is not None: if wrap_atoms: snapshot.positions = self.output.positions[frame] snapshot.center_coordinates_in_unit_cell() elif len(self.output.unwrapped_positions) > max([frame, 0]): snapshot.positions = self.output.unwrapped_positions[frame] else: snapshot.positions += self.output.total_displacements[frame] return snapshot
def from_hdf(self, hdf=None, group_name=None): """ Restore the ParameterMaster from an HDF5 file Args: hdf (ProjectHDFio): HDF5 group object - optional group_name (str): HDF5 subgroup name - optional """ super(MapMaster, self).from_hdf(hdf=hdf, group_name=group_name) with self.project_hdf5.open("input") as hdf5_input: if "structures" in hdf5_input.list_groups(): with hdf5_input.open("structures") as hdf5_input_str: self.parameter_list = [ Atoms().from_hdf(hdf5_input_str, group_name) for group_name in sorted(hdf5_input_str.list_groups()) ] else: self.parameter_list = hdf5_input["parameters_list"] function_str = hdf5_input["map_function"] if function_str == "None": self._map_function = None else: self._map_function = get_function_from_string(function_str)
def setUpClass(cls): cls.file_location = os.path.dirname(os.path.abspath(__file__)) cls.project = Project( os.path.join(cls.file_location, "hessian_class") ) cls.project.remove_jobs_silently(recursive=True) cls.job = cls.project.create_job( "HessianJob", "job_test_hessian" ) structure = Atoms( positions=[[0, 0, 0], [1, 1, 1]], elements=["Fe", "Fe"], cell=2 * np.eye(3) ) cls.job.set_reference_structure(structure) cls.job.structure.apply_strain(0.01) cls.job.structure.positions[0, 0] = 0.1 cls.job.structure.center_coordinates_in_unit_cell() cls.job.set_force_constants(force_constants=1) cls.job.set_elastic_moduli(bulk_modulus=1, shear_modulus=1) cls.job.server.run_mode.interactive = True cls.job.run() cls.job.structure.positions[0, 1] -= 0.1 cls.job.run() cls.job.structure.positions[0,1] -= 0.1 cls.job.run()