def populate_restapi_database(clear_database_before_test): """Populates the database with a considerable set of nodes to test the restAPI""" # pylint: disable=unused-argument from aiida import orm struct_forcif = orm.StructureData().store() orm.StructureData().store() orm.StructureData().store() orm.Dict().store() orm.Dict().store() orm.CifData(ase=struct_forcif.get_ase()).store() orm.KpointsData().store() orm.FolderData().store() orm.CalcFunctionNode().store() orm.CalcJobNode().store() orm.CalcJobNode().store() orm.WorkFunctionNode().store() orm.WorkFunctionNode().store() orm.WorkChainNode().store()
def test_check_for_export_format_version(self): """Test the check for the export format version.""" # Creating a folder for the import/export files export_file_tmp_folder = tempfile.mkdtemp() unpack_tmp_folder = tempfile.mkdtemp() try: struct = orm.StructureData() struct.store() filename = os.path.join(export_file_tmp_folder, 'export.tar.gz') export([struct], outfile=filename, silent=True) with tarfile.open(filename, 'r:gz', format=tarfile.PAX_FORMAT) as tar: tar.extractall(unpack_tmp_folder) with open(os.path.join(unpack_tmp_folder, 'metadata.json'), 'r', encoding='utf8') as fhandle: metadata = json.load(fhandle) metadata['export_version'] = 0.0 with open(os.path.join(unpack_tmp_folder, 'metadata.json'), 'wb') as fhandle: json.dump(metadata, fhandle) with tarfile.open(filename, 'w:gz', format=tarfile.PAX_FORMAT) as tar: tar.add(unpack_tmp_folder, arcname='') self.tearDownClass() self.setUpClass() with self.assertRaises(exceptions.IncompatibleArchiveVersionError): import_data(filename, silent=True) finally: # Deleting the created temporary folders shutil.rmtree(export_file_tmp_folder, ignore_errors=True) shutil.rmtree(unpack_tmp_folder, ignore_errors=True)
def test_group_import_existing(self, temp_dir): """ Testing what happens when I try to import a group that already exists in the database. This should raise an appropriate exception """ grouplabel = 'node_group_existing' # Create another user new_email = '[email protected]' user = orm.User(email=new_email) user.store() # Create a structure data node sd1 = orm.StructureData() sd1.user = user sd1.label = 'sd' sd1.store() # Create a group and add the data inside group = orm.Group(label=grouplabel) group.store() group.add_nodes([sd1]) # At this point we export the generated data filename = os.path.join(temp_dir, 'export1.tar.gz') export([group], outfile=filename, silent=True) self.clean_db() self.insert_data() # Creating a group of the same name group = orm.Group(label='node_group_existing') group.store() import_data(filename, silent=True) # The import should have created a new group with a suffix # I check for this: builder = orm.QueryBuilder().append( orm.Group, filters={'label': { 'like': grouplabel + '%' }}) self.assertEqual(builder.count(), 2) # Now I check for the group having one member, and whether the name is different: builder = orm.QueryBuilder() builder.append(orm.Group, filters={'label': { 'like': grouplabel + '%' }}, tag='g', project='label') builder.append(orm.StructureData, with_group='g') self.assertEqual(builder.count(), 1) # I check that the group name was changed: self.assertTrue(builder.all()[0][0] != grouplabel) # I import another name, the group should not be imported again import_data(filename, silent=True) builder = orm.QueryBuilder() builder.append(orm.Group, filters={'label': { 'like': grouplabel + '%' }}) self.assertEqual(builder.count(), 2)
def test_check_for_export_format_version(aiida_profile, tmp_path): """Test the check for the export format version.""" # Creating a folder for the archive files export_file_tmp_folder = tmp_path / 'export_tmp' export_file_tmp_folder.mkdir() unpack_tmp_folder = tmp_path / 'unpack_tmp' unpack_tmp_folder.mkdir() aiida_profile.reset_db() struct = orm.StructureData() struct.store() filename = str(export_file_tmp_folder / 'export.aiida') export([struct], filename=filename, file_format='tar.gz') with tarfile.open(filename, 'r:gz', format=tarfile.PAX_FORMAT) as tar: tar.extractall(unpack_tmp_folder) with (unpack_tmp_folder / 'metadata.json').open( 'r', encoding='utf8') as fhandle: metadata = json.load(fhandle) metadata['export_version'] = 0.0 with (unpack_tmp_folder / 'metadata.json').open('wb') as fhandle: json.dump(metadata, fhandle) with tarfile.open(filename, 'w:gz', format=tarfile.PAX_FORMAT) as tar: tar.add(unpack_tmp_folder, arcname='') aiida_profile.reset_db() with pytest.raises(exceptions.IncompatibleArchiveVersionError): import_data(filename)
def generate_structure() -> orm.StructureData: """ Generates the structure for the calculation. It will create a bcc structure in a square lattice. :return: structure to be used in the calculation :rtype: orm.StructureData """ cell = [ [2.848116, 0.000000, 0.000000], [0.000000, 2.848116, 0.000000], [0.000000, 0.000000, 2.848116], ] positions = [ (0.0000000, 0.0000000, 0.0000000), (0.5000000, 0.5000000, 0.5000000), ] fractional = True symbols = ['Fe', 'Fe'] names = ['Fe1', 'Fe2'] structure = orm.StructureData(cell=cell) for position, symbol, name in zip(positions, symbols, names): if fractional: position = np.dot(position, cell).tolist() structure.append_atom(position=position, symbols=symbol, name=name) return structure
def test_pw_wrong_ibrav(fixture_sandbox, generate_calc_job, fixture_code, generate_kpoints_mesh, generate_upf_data): """Test that a `PwCalculation` with an incorrect `ibrav` raises.""" entry_point_name = 'quantumespresso.pw' parameters = {'CONTROL': {'calculation': 'scf'}, 'SYSTEM': {'ecutrho': 240.0, 'ecutwfc': 30.0, 'ibrav': 2}} # Here we use the wrong order of unit cell vectors on purpose. param = 5.43 cell = [[0, param / 2., param / 2.], [-param / 2., 0, param / 2.], [-param / 2., param / 2., 0]] structure = orm.StructureData(cell=cell) structure.append_atom(position=(0., 0., 0.), symbols='Si', name='Si') structure.append_atom(position=(param / 4., param / 4., param / 4.), symbols='Si', name='Si') upf = generate_upf_data('Si') inputs = { 'code': fixture_code(entry_point_name), 'structure': structure, 'kpoints': generate_kpoints_mesh(2), 'parameters': orm.Dict(dict=parameters), 'pseudos': { 'Si': upf }, 'metadata': { 'options': get_default_options() } } with pytest.raises(QEInputValidationError): generate_calc_job(fixture_sandbox, entry_point_name, inputs)
def scale_structure(structure: orm.StructureData, scale_factor: orm.Float) -> orm.StructureData: """Scale the structure with the given scaling factor.""" ase = structure.get_ase().copy() ase.set_cell(ase.get_cell() * float(scale_factor)**(1 / 3), scale_atoms=True) return orm.StructureData(ase=ase)
def create_diamond_fcc(element): """Calculation function to create the crystal structure of a given element. For simplicity, only Si and Ge are valid elements. :param element: The element to create the structure with. :return: The structure. """ from aiida import orm import numpy as np elem_alat = { 'Si': 5.431, # Angstrom 'Ge': 5.658, # Angstrom } # Validate input element symbol = element.value if symbol not in elem_alat: raise ValueError('Valid elements are only Si and Ge') # Create cell starting having lattice parameter alat corresponding to the element alat = elem_alat[symbol] cell = np.array([[0., 0.5, 0.5], [0.5, 0., 0.5], [0.5, 0.5, 0.]]) * alat # Create a structure data object structure = orm.StructureData(cell=cell) structure.append_atom(position=(0., 0., 0.), symbols=symbol) structure.append_atom(position=(0.25 * alat, 0.25 * alat, 0.25 * alat), symbols=symbol) return structure
def _make_supercell(structure, supercell): from itertools import product mag_atoms = supercell.get_array('mag_atoms') sc = tuple(supercell.get_array('data')) x, y, z = sc grid = np.array(list(product(range(x), range(y), range(z)))) cell = structure.cell new = orm.StructureData() new.set_cell((np.array(cell).T * sc).T) kinds = structure.kinds sites = structure.sites counter = {k.name: 0 for k in kinds} for R_cryst in grid: for n, site in enumerate(sites): pos = np.array(site.position) + np.dot(R_cryst, cell) name = site.kind_name counter[name] += 1 new.append_atom( position=pos, symbols=name, name=f'{name}{counter[name] if name in mag_atoms else ""}', ) return new
def launch(): """Launch the relax work chain for a basic silicon crystal structure at a range of scaling factors.""" print(f'Running {STRUCTURE} with {CODE}') pymatgen_structure = Structure.from_file( f'../../../common/data/{STRUCTURE}.cif') structure = orm.StructureData(pymatgen=pymatgen_structure) parameters_dict = { 'structure': structure, 'sub_process_class': 'common_workflows.relax.abinit', 'generator_inputs': { 'protocol': 'precise', 'relax_type': RelaxType.ATOMS, 'threshold_forces': 0.001, 'calc_engines': { 'relax': { 'code': CODE, 'options': { 'withmpi': True, 'max_wallclock_seconds': 24 * 60**2, 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 4 } } } }, **KWARGS } } run(EquationOfStateWorkChain, **parameters_dict)
def test_pw_ibrav_tol(fixture_sandbox, generate_calc_job, fixture_code, generate_kpoints_mesh, generate_upf_data): """Test that `IBRAV_TOLERANCE` controls the tolerance when checking cell consistency.""" entry_point_name = 'quantumespresso.pw' parameters = {'CONTROL': {'calculation': 'scf'}, 'SYSTEM': {'ecutrho': 240.0, 'ecutwfc': 30.0, 'ibrav': 2}} # The structure needs to be rotated in the same way QE does it for ibrav=2. param = 5.43 eps = 0.1 cell = [[-param / 2., eps, param / 2.], [-eps, param / 2. + eps, param / 2.], [-param / 2., param / 2., 0]] structure = orm.StructureData(cell=cell) structure.append_atom(position=(0., 0., 0.), symbols='Si', name='Si') structure.append_atom(position=(param / 4., param / 4., param / 4.), symbols='Si', name='Si') upf = generate_upf_data('Si') inputs = { 'code': fixture_code(entry_point_name), 'structure': structure, 'kpoints': generate_kpoints_mesh(2), 'parameters': orm.Dict(dict=parameters), 'pseudos': { 'Si': upf }, 'metadata': { 'options': get_default_options() }, } # Without adjusting the tolerance, the check fails. with pytest.raises(QEInputValidationError): generate_calc_job(fixture_sandbox, entry_point_name, inputs) # After adjusting the tolerance, the input validation no longer fails. inputs['settings'] = orm.Dict(dict={'ibrav_cell_tolerance': eps}) generate_calc_job(fixture_sandbox, entry_point_name, inputs)
def generate_inputs_default(): """Return only those inputs that the parser will expect to be there.""" a = 5.43 structure = orm.StructureData( cell=[[a / 2., a / 2., 0], [a / 2., 0, a / 2.], [0, a / 2., a / 2.]]) structure.append_atom(position=(0., 0., 0.), symbols='Si', name='Si1') structure.append_atom(position=(a / 4., a / 4., a / 4.), symbols='Si', name='Si2') structure.store() parameters = { 'CONTROL': { 'calculation': 'scf' }, 'SYSTEM': { 'ecutrho': 240.0, 'ecutwfc': 30.0 } } kpoints = orm.KpointsData() kpoints.set_cell_from_structure(structure) kpoints.set_kpoints_mesh_from_density(0.15) return AttributeDict({ 'structure': structure, 'kpoints': kpoints, 'parameters': orm.Dict(dict=parameters), 'settings': orm.Dict() })
def test_dangling_link_to_existing_db_node(self, temp_dir): """A dangling link that references a Node that is not included in the archive should `not` be importable""" struct = orm.StructureData() struct.store() struct_uuid = struct.uuid calc = orm.CalculationNode() calc.add_incoming(struct, LinkType.INPUT_CALC, 'input') calc.store() calc.seal() calc_uuid = calc.uuid filename = os.path.join(temp_dir, 'export.aiida') export([struct], filename=filename, file_format='tar.gz') unpack = SandboxFolder() with tarfile.open(filename, 'r:gz', format=tarfile.PAX_FORMAT) as tar: tar.extractall(unpack.abspath) with open(unpack.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle: data = json.load(fhandle) data['links_uuid'].append({ 'output': calc.uuid, 'input': struct.uuid, 'label': 'input', 'type': LinkType.INPUT_CALC.value }) with open(unpack.get_abs_path('data.json'), 'wb') as fhandle: json.dump(data, fhandle) with tarfile.open(filename, 'w:gz', format=tarfile.PAX_FORMAT) as tar: tar.add(unpack.abspath, arcname='') # Make sure the CalculationNode is still in the database builder = orm.QueryBuilder().append(orm.CalculationNode, project='uuid') self.assertEqual( builder.count(), 1, msg= f'There should be a single CalculationNode, instead {builder.count()} has been found' ) self.assertEqual(builder.all()[0][0], calc_uuid) with self.assertRaises(DanglingLinkError): import_data(filename) # Using the flag `ignore_unknown_nodes` should import it without problems import_data(filename, ignore_unknown_nodes=True) builder = orm.QueryBuilder().append(orm.StructureData, project='uuid') self.assertEqual( builder.count(), 1, msg= f'There should be a single StructureData, instead {builder.count()} has been found' ) self.assertEqual(builder.all()[0][0], struct_uuid)
def read_structure(structure_file): structure = orm.StructureData(ase=aseread(structure_file)) structure.store() print( 'Structure {} read and stored with pk {}.'.format( structure.get_formula(), structure.pk ) ) return structure
def silicon_builder(db_test_app): """Prepare a mock - ready calculation for silicon""" silicon = orm.StructureData() r_unit = 2.6954645 silicon.set_cell(np.array([[1, 1, 0], [1, 0, 1], [0, 1, 1]]) * r_unit) silicon.append_atom(symbols=["Si"], position=[0, 0, 0]) silicon.append_atom(symbols=["Si"], position=[r_unit * 0.5] * 3) silicon.label = "Si" silicon.description = "A silicon structure" param_dict = { # Notice that the keywords are group into two sub-dictionaries # just like you would do when preparing the inputs by hand "CELL": { "symmetry_generate": True, "snap_to_symmetry": True, # Pass a list of string to set a BLOCK inputs #"cell_constraints": #["0 0 0", "0 0 0"] }, "PARAM": { "task": "singlepoint", "basis_precision": "medium", "fix_occupancy": True, # Use bool type to make it easy for querying "opt_strategy": "memory", "num_dump_cycles": 0, "write_formatted_density": True } } # We need to create a Dict node that holds the dictionary param = orm.Dict(dict=param_dict) kpoints = orm.KpointsData() # Use gamma and 0.25, 0.25, 0.25 kpoints.set_kpoints_mesh((4, 4, 4), offset=(0, 0, 0)) c9 = OTFGData(otfg_entry="C9") CastepCalculation = CalculationFactory('castep.castep') code_path = check_output(['which', 'castep.mock'], universal_newlines=True).strip() castep_mock = orm.Code((db_test_app.localhost, code_path), input_plugin_name='castep.castep') builder = CastepCalculation.get_builder() builder.structure = silicon builder.parameters = param builder.kpoints = kpoints builder.code = castep_mock builder.pseudos = {'Si': c9} builder.metadata.options.withmpi = False builder.metadata.options.resources = { 'num_machines': 1, 'tot_num_mpiprocs': 2 } builder.metadata.options.max_wallclock_seconds = 600 builder.metadata.label = "Si SINGLEPOINT" builder.metadata.description = 'A Example CASTEP calculation for silicon' return builder
def strain_inputs(configure, strain_kind, strain_parameters, sample): import pymatgen from aiida import orm structure = orm.StructureData() structure.set_pymatgen(pymatgen.Structure.from_file(sample('POSCAR'))) return dict(structure=structure, strain_kind=orm.Str(strain_kind), strain_parameters=orm.Str(strain_parameters), strain_strengths=orm.List(list=[-0.2, -0.1, 0., 0.1, 0.2]))
def test_nodes_in_group(self, temp_dir): """ This test checks that nodes that belong to a specific group are correctly imported and exported. """ from aiida.common.links import LinkType # Create another user new_email = '[email protected]' user = orm.User(email=new_email) user.store() # Create a structure data node that has a calculation as output sd1 = orm.StructureData() sd1.user = user sd1.label = 'sd1' sd1.store() jc1 = orm.CalcJobNode() jc1.computer = self.computer jc1.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) jc1.user = user jc1.label = 'jc1' jc1.add_incoming(sd1, link_type=LinkType.INPUT_CALC, link_label='link') jc1.store() jc1.seal() # Create a group and add the data inside gr1 = orm.Group(label='node_group') gr1.store() gr1.add_nodes([sd1, jc1]) gr1_uuid = gr1.uuid # At this point we export the generated data filename1 = os.path.join(temp_dir, 'export1.tar.gz') export([sd1, jc1, gr1], outfile=filename1, silent=True) n_uuids = [sd1.uuid, jc1.uuid] self.clean_db() self.insert_data() import_data(filename1, silent=True) # Check that the imported nodes are correctly imported and that # the user assigned to the nodes is the right one for uuid in n_uuids: self.assertEqual(orm.load_node(uuid).user.email, new_email) # Check that the exported group is imported correctly builder = orm.QueryBuilder() builder.append(orm.Group, filters={'uuid': {'==': gr1_uuid}}) self.assertEqual(builder.count(), 1, 'The group was not found.')
def _generate_structure(): """Return a `StructureData` representing bulk GaAs.""" from aiida import orm param = 2.84 structure = orm.StructureData( cell=[[-param, 0, param], [0, param, param], [-param, param, 0]]) structure.append_atom(symbols='Ga', position=[0, 0, 0]) structure.append_atom( symbols='As', position=[-0.5 * param, 0.5 * param, 0.5 * param]) return structure
def rescale(structure, scale): """Calculation function to rescale a structure :param structure: An AiiDA structure to rescale :param scale: The scale factor (for the lattice constant) :return: The rescaled structure """ from aiida import orm ase = structure.get_ase() ase.set_cell(ase.get_cell() * float(scale), scale_atoms=True) return orm.StructureData(ase=ase)
def test_control_of_licenses(self): """Test control of licenses.""" from aiida.common.folders import SandboxFolder from aiida.tools.importexport.dbexport import export_tree struct = orm.StructureData() struct.source = {'license': 'GPL'} struct.store() folder = SandboxFolder() export_tree([struct], folder=folder, silent=True, allowed_licenses=['GPL']) # Folder should contain two files of metadata + nodes/ self.assertEqual(len(folder.get_content_list()), 3) folder = SandboxFolder() export_tree([struct], folder=folder, silent=True, forbidden_licenses=['Academic']) # Folder should contain two files of metadata + nodes/ self.assertEqual(len(folder.get_content_list()), 3) folder = SandboxFolder() with self.assertRaises(LicensingException): export_tree([struct], folder=folder, silent=True, allowed_licenses=['CC0']) folder = SandboxFolder() with self.assertRaises(LicensingException): export_tree([struct], folder=folder, silent=True, forbidden_licenses=['GPL']) def cc_filter(license_): return license_.startswith('CC') def gpl_filter(license_): return license_ == 'GPL' def crashing_filter(): raise NotImplementedError('not implemented yet') folder = SandboxFolder() with self.assertRaises(LicensingException): export_tree([struct], folder=folder, silent=True, allowed_licenses=cc_filter) folder = SandboxFolder() with self.assertRaises(LicensingException): export_tree([struct], folder=folder, silent=True, forbidden_licenses=gpl_filter) folder = SandboxFolder() with self.assertRaises(LicensingException): export_tree([struct], folder=folder, silent=True, allowed_licenses=crashing_filter) folder = SandboxFolder() with self.assertRaises(LicensingException): export_tree([struct], folder=folder, silent=True, forbidden_licenses=crashing_filter)
def test_control_of_licenses(): """Test control of licenses.""" struct = orm.StructureData() struct.source = {'license': 'GPL'} struct.store() folder = SandboxFolder() export([struct], file_format='folder', writer_init={'folder': folder}, allowed_licenses=['GPL']) # Folder should contain two files of metadata + nodes/ assert len(folder.get_content_list()) == 3 folder.erase(create_empty_folder=True) assert len(folder.get_content_list()) == 0 export([struct], file_format='folder', writer_init={'folder': folder}, forbidden_licenses=['Academic']) # Folder should contain two files of metadata + nodes/ assert len(folder.get_content_list()) == 3 with pytest.raises(LicensingException): export([struct], file_format='null', allowed_licenses=['CC0']) with pytest.raises(LicensingException): export([struct], file_format='null', forbidden_licenses=['GPL']) def cc_filter(license_): return license_.startswith('CC') def gpl_filter(license_): return license_ == 'GPL' def crashing_filter(): raise NotImplementedError('not implemented yet') with pytest.raises(LicensingException): export([struct], file_format='null', allowed_licenses=cc_filter) with pytest.raises(LicensingException): export([struct], file_format='null', forbidden_licenses=gpl_filter) with pytest.raises(LicensingException): export([struct], file_format='null', allowed_licenses=crashing_filter) with pytest.raises(LicensingException): export([struct], file_format='null', forbidden_licenses=crashing_filter)
def test_pw_ibrav( fixture_sandbox, generate_calc_job, fixture_code, generate_kpoints_mesh, generate_upf_data, file_regression ): """Test a `PwCalculation` where `ibrav` is explicitly specified.""" entry_point_name = 'quantumespresso.pw' parameters = {'CONTROL': {'calculation': 'scf'}, 'SYSTEM': {'ecutrho': 240.0, 'ecutwfc': 30.0, 'ibrav': 2}} # The structure needs to be rotated in the same way QE does it for ibrav=2. param = 5.43 cell = [[-param / 2., 0, param / 2.], [0, param / 2., param / 2.], [-param / 2., param / 2., 0]] structure = orm.StructureData(cell=cell) structure.append_atom(position=(0., 0., 0.), symbols='Si', name='Si') structure.append_atom(position=(param / 4., param / 4., param / 4.), symbols='Si', name='Si') upf = generate_upf_data('Si') inputs = { 'code': fixture_code(entry_point_name), 'structure': structure, 'kpoints': generate_kpoints_mesh(2), 'parameters': orm.Dict(dict=parameters), 'pseudos': { 'Si': upf }, 'metadata': { 'options': get_default_options() } } calc_info = generate_calc_job(fixture_sandbox, entry_point_name, inputs) cmdline_params = ['-in', 'aiida.in'] local_copy_list = [(upf.uuid, upf.filename, u'./pseudo/Si.upf')] retrieve_list = ['aiida.out', './out/aiida.save/data-file-schema.xml', './out/aiida.save/data-file.xml'] retrieve_temporary_list = [['./out/aiida.save/K*[0-9]/eigenval*.xml', '.', 2]] # Check the attributes of the returned `CalcInfo` assert isinstance(calc_info, datastructures.CalcInfo) assert sorted(calc_info.cmdline_params) == sorted(cmdline_params) assert sorted(calc_info.local_copy_list) == sorted(local_copy_list) assert sorted(calc_info.retrieve_list) == sorted(retrieve_list) assert sorted(calc_info.retrieve_temporary_list) == sorted(retrieve_temporary_list) assert sorted(calc_info.remote_symlink_list) == sorted([]) with fixture_sandbox.open('aiida.in') as handle: input_written = handle.read() # Checks on the files written to the sandbox folder as raw input assert sorted(fixture_sandbox.get_content_list()) == sorted(['aiida.in', 'pseudo', 'out']) file_regression.check(input_written, encoding='utf-8', extension='.in')
def _generate_structure(): """Return a `StructureData` representing bulk O2Sr.""" from aiida import orm structure = orm.StructureData( cell=[[-1.7828864010, 1.7828864010, 3.3905324933], [1.7828864010, -1.7828864010, 3.3905324933], [1.7828864010, 1.7828864010, -3.3905324933]]) structure.append_atom(symbols='Sr', position=[0, 0, 0]) structure.append_atom( symbols='O', position=[1.7828864010, 1.7828864010, 0.7518485043]) structure.append_atom(symbols='O', position=[0, 0, 2.6386839890]) return structure
def rescale(structure, scale): """ Calcfunction to rescale a structure by a scaling factor. Uses ase. :param structure: An AiiDA structure to rescale :param scale: The scale factor :return: The rescaled structure """ the_ase = structure.get_ase() new_ase = the_ase.copy() new_ase.set_cell(the_ase.get_cell() * float(scale), scale_atoms=True) new_structure = orm.StructureData(ase=new_ase) return new_structure
def test_calc_of_structuredata(aiida_profile, tmp_path, file_format): """Simple ex-/import of CalcJobNode with input StructureData""" aiida_profile.reset_db() struct = orm.StructureData() struct.store() computer = orm.Computer( label='localhost-test', description='localhost computer set up by test manager', hostname='localhost-test', workdir=str(tmp_path / 'workdir'), transport_type='local', scheduler_type='direct') computer.store() computer.configure() calc = orm.CalcJobNode() calc.computer = computer calc.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) calc.add_incoming(struct, link_type=LinkType.INPUT_CALC, link_label='link') calc.store() calc.seal() pks = [struct.pk, calc.pk] attrs = {} for pk in pks: node = orm.load_node(pk) attrs[node.uuid] = dict() for k in node.attributes.keys(): attrs[node.uuid][k] = node.get_attribute(k) filename = str(tmp_path / 'export.aiida') export([calc], filename=filename, file_format=file_format) aiida_profile.reset_db() import_data(filename) for uuid in attrs: node = orm.load_node(uuid) for k in attrs[uuid].keys(): assert attrs[uuid][k] == node.get_attribute(k)
def structure_init(): """ Workfunction to create structure of a given element taking it from a reference list of scructures and a reference volume. :param element: The element to create the structure with. :return: The structure and the kpoint mesh (from file, releted to the structure!). """ import pymatgen as mg structure_file = op.realpath(op.join(op.dirname(__file__), 'data/Si.cif')) in_structure = mg.Structure.from_file(structure_file, primitive=False) newreduced = in_structure.copy() newreduced.scale_lattice(float(20.4530) * in_structure.num_sites) structure = orm.StructureData(pymatgen_structure=newreduced) return structure
def test_calc_of_structuredata(self, temp_dir): """Simple ex-/import of CalcJobNode with input StructureData""" from aiida.common.links import LinkType struct = orm.StructureData() struct.store() calc = orm.CalcJobNode() calc.computer = self.computer calc.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) calc.add_incoming(struct, link_type=LinkType.INPUT_CALC, link_label='link') calc.store() calc.seal() pks = [struct.pk, calc.pk] attrs = {} for pk in pks: node = orm.load_node(pk) attrs[node.uuid] = dict() for k in node.attributes.keys(): attrs[node.uuid][k] = node.get_attribute(k) filename = os.path.join(temp_dir, 'export.aiida') export([calc], filename=filename, silent=True) self.clean_db() self.create_user() # NOTE: it is better to load new nodes by uuid, rather than assuming # that they will have the first 3 pks. In fact, a recommended policy in # databases is that pk always increment, even if you've deleted elements import_data(filename, silent=True) for uuid in attrs: node = orm.load_node(uuid) for k in attrs[uuid].keys(): self.assertEqual(attrs[uuid][k], node.get_attribute(k))
def generate_inputs(): """Return only those inputs that the parser will expect to be there.""" alat = 5.4 ase = crystal( 'Si', [(0, 0, 0)], spacegroup=227, cellpar=[alat, alat, alat, 90, 90, 90], primitive_cell=True, ) structure = orm.StructureData(ase=ase) parameters = { 'CONTROL': { 'calculation': 'cp', 'restart_mode': 'from_scratch', 'wf_collect': False, 'iprint': 1, 'isave': 100, 'dt': 3.0, 'max_seconds': 25 * 60, 'nstep': 10, }, 'SYSTEM': { 'ecutwfc': 30.0, 'ecutrho': 240.0, 'nr1b': 24, 'nr2b': 24, 'nr3b': 24, }, 'ELECTRONS': { 'electron_damping': 1.0e-1, 'electron_dynamics': 'damp', 'emass': 400.0, 'emass_cutoff': 3.0, }, 'IONS': { 'ion_dynamics': 'none' }, } return AttributeDict({ 'structure': structure, 'parameters': orm.Dict(dict=parameters), })
def test_symmetry_reduction(self): from aiida.tools.dbexporters.tcod import export_values from ase import Atoms a = Atoms('BaTiO3', cell=(4., 4., 4.)) a.set_scaled_positions(( (0.0, 0.0, 0.0), (0.5, 0.5, 0.5), (0.5, 0.5, 0.0), (0.5, 0.0, 0.5), (0.0, 0.5, 0.5), )) a.set_chemical_symbols(['Ba', 'Ti', 'O', 'O', 'O']) val = export_values(orm.StructureData(ase=a), reduce_symmetry=True, store=True)['0'] self.assertEqual(val['_atom_site_label'], ['Ba1', 'Ti1', 'O1']) self.assertEqual(val['_symmetry_space_group_name_H-M'], 'Pm-3m') self.assertEqual(val['_symmetry_space_group_name_Hall'], '-P 4 2 3')
def _apply_single_strain(structure: orm.StructureData, strain_kind: orm.Str, strain_parameters: orm.Str, strength_value: orm.Float) -> orm.StructureData: """ Applies a specific strain (kind, parameters, and value) to the given structure, and returns the strained structure. """ strain_classname = 'strain.structure.' + strain_kind.value strain_class = get_object_from_string(strain_classname) strain_parametername = 'strain.parameter.' + strain_parameters.value strain_parameters = get_object_from_string(strain_parametername) strain_instance = strain_class(**strain_parameters) pmg_structure = structure.get_pymatgen_structure() new_pmg_structure = strain_instance.apply(pmg_structure, strength_value.value) new_structure_data = orm.StructureData() new_structure_data.set_pymatgen(new_pmg_structure) return new_structure_data