def scaled_structure(structure, scale): new_structure = StructureData(cell=numpy.array(structure.cell)*float(scale)) for site in structure.sites: new_structure.append_atom(position=numpy.array(site.position)*float(scale), \ symbols=structure.get_kind(site.kind_name).symbol,\ name=site.kind_name) new_structure.label = 'auxiliary structure for stress tensor' new_structure.description = "created from the original structure with PK=%i, "\ "lattice constant scaling: %f"%(structure.pk, float(scale)) return new_structure
def scaled_structure(structure, scale): new_structure = StructureData(cell=np.array(structure.cell)*scale) for site in structure.sites: new_structure.append_atom(position=np.array(site.position)*scale, \ symbols=structure.get_kind(site.kind_name).symbol,\ name=site.kind_name) new_structure.label = 'created inside stress tensor run' new_structure.description = "auxiliary structure for stress tensor "\ "created from the original structure with PK=%i, "\ "lattice constant scaling: %f"%(structure.pk, scale) return new_structure
def load_example_structures(): """ Read input structures into the database Structures are read from subfolder "example-structures" and stored in the group "example-structures". :return: group of available structures """ from aiida.orm.group import Group try: group = Group.get(name=group_name) except NotExistent: import glob import os from ase.io import read from aiida.orm.data.structure import StructureData paths = glob.glob(group_name + '/*.cif') structure_nodes = [] for path in paths: fname = os.path.basename(path) name = os.path.splitext(fname)[0] structure = StructureData(ase=read(path)) if "ML" in name: # surface normal of monolayers should be oriented along z structure.set_pbc([True, True, False]) else: structure.set_pbc([True, True, True]) structure.label = name print("Storing {} in database".format(name)) structure.store() structure_nodes.append(structure) group = Group(name=group_name) group.store() group.description = "\ Set of atomic structures used by examples for AiiDA plugins of different codes" group.add_nodes(structure_nodes) return group
from aiida.common.example_helpers import test_and_get_code # noqa from aiida.orm.data.structure import StructureData # noqa from aiida.orm.data.parameter import ParameterData # noqa from aiida.orm.data.base import Str from aiida.work.run import submit import ase.build from aiida_cp2k.workflows import Cp2kGeoOptWorkChain atoms = ase.build.molecule('H2O') atoms.center(vacuum=2.0) structure = StructureData(ase=atoms) structure.label = 'H2O' structure.store() options_dict = { "resources": { "num_machines": 1, "num_mpiprocs_per_machine": 2, }, "max_wallclock_seconds": 3 * 60 * 60, } options = ParameterData(dict=options_dict) params_dict = { 'FORCE_EVAL': { 'DFT': { 'UKS': True, }, }, }
def test_6(self): """ This test checks that nodes belonging to user A (which is not the default user) can be correctly exported, imported, enriched with nodes from the default user, re-exported & re-imported and that in the end all the nodes that have been finally imported belonging to the right users. """ import os import shutil import tempfile from aiida.orm import load_node from aiida.orm.calculation.job import JobCalculation from aiida.orm.data.structure import StructureData from aiida.orm.importexport import export from aiida.common.datastructures import calc_states from aiida.common.links import LinkType from aiida.common.utils import get_configured_user_email from aiida.orm.user import User # Creating a folder for the import/export files temp_folder = tempfile.mkdtemp() try: # Create another user new_email = "[email protected]" user = User(email=new_email) user.force_save() # Create a structure data node that has a calculation as output sd1 = StructureData() sd1.dbnode.user = user._dbuser sd1.label = 'sd1' sd1.store() jc1 = JobCalculation() jc1.set_computer(self.computer) jc1.set_resources({"num_machines": 1, "num_mpiprocs_per_machine": 1}) jc1.dbnode.user = user._dbuser jc1.label = 'jc1' jc1.store() jc1.add_link_from(sd1) jc1._set_state(calc_states.PARSING) # Create some nodes from a different user sd2 = StructureData() sd2.dbnode.user = user._dbuser sd2.label = 'sd2' sd2.store() sd2.add_link_from(jc1, label='l1', link_type=LinkType.RETURN) # Set the jc1 to FINISHED jc1._set_state(calc_states.FINISHED) # At this point we export the generated data filename1 = os.path.join(temp_folder, "export1.tar.gz") export([sd2.dbnode], outfile=filename1, silent=True) uuids1 = [sd1.uuid, jc1.uuid, sd2.uuid] self.clean_db() self.insert_data() import_data(filename1, silent=True) # Check that the imported nodes are correctly imported and that # the user assigned to the nodes is the right one for uuid in uuids1: self.assertEquals(load_node(uuid).get_user().email, new_email) # Now we continue to generate more data based on the imported # data sd2_imp = load_node(sd2.uuid) jc2 = JobCalculation() jc2.set_computer(self.computer) jc2.set_resources({"num_machines": 1, "num_mpiprocs_per_machine": 1}) jc2.label = 'jc2' jc2.store() jc2.add_link_from(sd2_imp, label='l2') jc2._set_state(calc_states.PARSING) sd3 = StructureData() sd3.label = 'sd3' sd3.store() sd3.add_link_from(jc2, label='l3', link_type=LinkType.RETURN) # Set the jc2 to FINISHED jc2._set_state(calc_states.FINISHED) # Store the UUIDs of the nodes that should be checked # if they can be imported correctly. uuids2 = [jc2.uuid, sd3.uuid] filename2 = os.path.join(temp_folder, "export2.tar.gz") export([sd3.dbnode], outfile=filename2, silent=True) self.clean_db() self.insert_data() import_data(filename2, silent=True) # Check that the imported nodes are correctly imported and that # the user assigned to the nodes is the right one for uuid in uuids1: self.assertEquals(load_node(uuid).get_user().email, new_email) for uuid in uuids2: self.assertEquals(load_node(uuid).get_user().email, get_configured_user_email()) finally: # Deleting the created temporary folder shutil.rmtree(temp_folder, ignore_errors=True)
def test_5(self): """ This test checks that nodes belonging to different users are correctly exported & imported. """ import os import shutil import tempfile from aiida.orm import load_node from aiida.orm.calculation.job import JobCalculation from aiida.orm.data.structure import StructureData from aiida.orm.importexport import export from aiida.common.datastructures import calc_states from aiida.common.links import LinkType from aiida.orm.user import User from aiida.common.utils import get_configured_user_email # Creating a folder for the import/export files temp_folder = tempfile.mkdtemp() try: # Create another user new_email = "[email protected]" user = User(email=new_email) user.force_save() # Create a structure data node that has a calculation as output sd1 = StructureData() sd1.dbnode.user = user._dbuser sd1.label = 'sd1' sd1.store() jc1 = JobCalculation() jc1.set_computer(self.computer) jc1.set_resources({"num_machines": 1, "num_mpiprocs_per_machine": 1}) jc1.dbnode.user = user._dbuser jc1.label = 'jc1' jc1.store() jc1.add_link_from(sd1) jc1._set_state(calc_states.PARSING) # Create some nodes from a different user sd2 = StructureData() sd2.dbnode.user = user._dbuser sd2.label = 'sd2' sd2.store() sd2.add_link_from(jc1, label='l1', link_type=LinkType.RETURN) jc2 = JobCalculation() jc2.set_computer(self.computer) jc2.set_resources({"num_machines": 1, "num_mpiprocs_per_machine": 1}) jc2.label = 'jc2' jc2.store() jc2.add_link_from(sd2, label='l2') jc2._set_state(calc_states.PARSING) sd3 = StructureData() sd3.label = 'sd3' sd3.store() sd3.add_link_from(jc2, label='l3', link_type=LinkType.RETURN) uuids_u1 = [sd1.uuid, jc1.uuid, sd2.uuid] uuids_u2 = [jc2.uuid, sd3.uuid] filename = os.path.join(temp_folder, "export.tar.gz") export([sd3.dbnode], outfile=filename, silent=True) self.clean_db() import_data(filename, silent=True) # Check that the imported nodes are correctly imported and that # the user assigned to the nodes is the right one for uuid in uuids_u1: self.assertEquals(load_node(uuid).get_user().email, new_email) for uuid in uuids_u2: self.assertEquals(load_node(uuid).get_user().email, get_configured_user_email()) finally: # Deleting the created temporary folder shutil.rmtree(temp_folder, ignore_errors=True)
from aiida.common.example_helpers import test_and_get_code # noqa from aiida.orm.data.structure import StructureData # noqa from aiida.orm.data.parameter import ParameterData # noqa from aiida.orm.data.base import Str from aiida.work.run import submit from ase.io import read from aiida_cp2k.workflows import Cp2kRobustGeoOptWorkChain atoms = read( '/home/daniele/Programs/aiida-database/frameworks/test/Cu-MOF-74_h211.cif') structure = StructureData(ase=atoms) structure.label = 'Cu-MOF-74' structure.store() options_dict = { "resources": { "num_machines": 2, }, "max_wallclock_seconds": 1 * 60 * 60, 'prepend_text': '#SBATCH --partition=debug', } options = ParameterData(dict=options_dict) params_dict = { 'MOTION': { 'MD': { 'STEPS': 5, }, 'GEO_OPT': {
from aiida.common.example_helpers import test_and_get_code # noqa from aiida.orm.data.structure import StructureData # noqa from aiida.orm.data.parameter import ParameterData # noqa from aiida.work.run import submit from ase.io import read from aiida_lsmo_workflows.geoopt_charges import Cp2kGeoOptDdecWorkChain atoms = read('Fe-MOF-74.cif') structure = StructureData(ase=atoms) structure.label = 'Fe-MOF-74' structure.store() cp2k_options = { "resources": { "num_machines": 2, }, "max_wallclock_seconds": 1 * 60 * 60, } params_dict = { 'MOTION': { 'MD': { 'STEPS': 5, }, 'GEO_OPT': { 'MAX_ITER': 5, }, 'CELL_OPT': { 'MAX_ITER': 5, },
from aiida.common.example_helpers import test_and_get_code # noqa from aiida.orm.data.structure import StructureData # noqa from aiida.orm.data.parameter import ParameterData # noqa from aiida.orm.data.base import Str from aiida.work.run import submit from ase.io import read from aiida_cp2k.workflows import Cp2kRobustGeoOptWorkChain atoms = read( '/home/daniele/Dropbox (LSMO)/proj44_studycasesCP2K/CELL_OPT/13150N_CoRE-COF_4layers.cif' ) structure = StructureData(ase=atoms) structure.label = '13150Nx4layers' structure.store() options_dict = { "resources": { "num_machines": 2, }, "max_wallclock_seconds": 3 * 60 * 60, } options = ParameterData(dict=options_dict) params_dict = {} parameters = ParameterData(dict=params_dict) code = test_and_get_code('cp2k@fidis', expected_code_type='cp2k') submit( Cp2kRobustGeoOptWorkChain, code=code, structure=structure,