def _import_ase(filename, **kwargs): """ Imports a structure in a number of formats using the ASE routines. """ from os.path import abspath from aiida.orm.data.structure import StructureData try: import ase.io except ImportError: echo.echo_critical("You have not installed the package ase. \n" "You can install it with: pip install ase") store = kwargs.pop('store') view_in_ase = kwargs.pop('view') echo.echo('importing structure from: \n {}'.format(abspath(filename))) filepath = abspath(filename) try: asecell = ase.io.read(filepath) new_structure = StructureData(ase=asecell) if store: new_structure.store() if view_in_ase: from ase.visualize import view view(new_structure.get_ase()) echo.echo(' Succesfully imported structure {}, ' '(PK = {})'.format(new_structure.get_formula(), new_structure.pk)) except ValueError as err: echo.echo_critical(err)
def sub_create_bands_data(cls, user=None): from aiida.orm.data.array.kpoints import KpointsData from aiida.orm import JobCalculation from aiida.orm.data.structure import StructureData from aiida.common.links import LinkType from aiida.orm.data.array.bands import BandsData import numpy s = StructureData(cell=((2., 0., 0.), (0., 2., 0.), (0., 0., 2.))) s.append_atom(position=(0., 0., 0.), symbols=['Ba', 'Ti'], weights=(1., 0.), name='mytype') if user is not None: s.dbnode.user = user._dbuser s.store() c = JobCalculation(computer=cls.computer, resources={ 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) if user is not None: c.dbnode.user = user._dbuser c.store() c.add_link_from(s, "S1", LinkType.INPUT) c._set_state(calc_states.RETRIEVING) # define a cell alat = 4. cell = numpy.array([ [alat, 0., 0.], [0., alat, 0.], [0., 0., alat], ]) k = KpointsData() k.set_cell(cell) k.set_kpoints_path() if user is not None: k.dbnode.user = user._dbuser k.store() b = BandsData() b.set_kpointsdata(k) input_bands = numpy.array( [numpy.ones(4) * i for i in range(k.get_kpoints().shape[0])]) b.set_bands(input_bands, units='eV') if user is not None: b.dbnode.user = user._dbuser b.store() b.add_link_from(c, link_type=LinkType.CREATE) return b
def create_structure_data(): from aiida.orm.data.structure import StructureData, Site, Kind from aiida.orm.group import Group alat = 4. # angstrom cell = [ [ alat, 0., 0., ], [ 0., alat, 0., ], [ 0., 0., alat, ], ] # BaTiO3 cubic structure struc = StructureData(cell=cell) struc.append_atom(position=(0., 0., 0.), symbols='Ba') struc.append_atom(position=(alat / 2., alat / 2., alat / 2.), symbols='Ti') struc.append_atom(position=(alat / 2., alat / 2., 0.), symbols='O') struc.append_atom(position=(alat / 2., 0., alat / 2.), symbols='O') struc.append_atom(position=(0., alat / 2., alat / 2.), symbols='O') struc.store() # Create 2 groups and add the data to one of them g_ne = Group(name='non_empty_group') g_ne.store() g_ne.add_nodes(struc) g_e = Group(name='empty_group') g_e.store() return { TestVerdiDataListable.NODE_ID_STR: struc.id, TestVerdiDataListable.NON_EMPTY_GROUP_ID_STR: g_ne.id, TestVerdiDataListable.EMPTY_GROUP_ID_STR: g_e.id }
def load_example_structures(): """ Read input structures into the database Structures are read from subfolder "example-structures" and stored in the group "example-structures". :return: group of available structures """ from aiida.orm.group import Group try: group = Group.get(name=group_name) except NotExistent: import glob import os from ase.io import read from aiida.orm.data.structure import StructureData paths = glob.glob(group_name + '/*.cif') structure_nodes = [] for path in paths: fname = os.path.basename(path) name = os.path.splitext(fname)[0] structure = StructureData(ase=read(path)) if "ML" in name: # surface normal of monolayers should be oriented along z structure.set_pbc([True, True, False]) else: structure.set_pbc([True, True, True]) structure.label = name print("Storing {} in database".format(name)) structure.store() structure_nodes.append(structure) group = Group(name=group_name) group.store() group.description = "\ Set of atomic structures used by examples for AiiDA plugins of different codes" group.add_nodes(structure_nodes) return group
def test_subclassing(self): from aiida.orm.data.structure import StructureData from aiida.orm.data.parameter import ParameterData from aiida.orm import Node, Data from aiida.orm.querybuilder import QueryBuilder s = StructureData() s._set_attr('cat', 'miau') s.store() d = Data() d._set_attr('cat', 'miau') d.store() p = ParameterData(dict=dict(cat='miau')) p.store() n = Node() n._set_attr('cat', 'miau') n.store() # Now when asking for a node with attr.cat==miau, I want 4 esults: qb = QueryBuilder().append(Node, filters={'attributes.cat': 'miau'}) self.assertEqual(qb.count(), 4) qb = QueryBuilder().append(Data, filters={'attributes.cat': 'miau'}) self.assertEqual(qb.count(), 3) # If I'm asking for the specific lowest subclass, I want one result for cls in (StructureData, ParameterData): qb = QueryBuilder().append(cls, filters={'attributes.cat': 'miau'}) self.assertEqual(qb.count(), 1) # Now I am not allow the subclassing, which should give 1 result for each for cls in (StructureData, ParameterData, Node, Data): qb = QueryBuilder().append(cls, filters={'attributes.cat': 'miau'}, subclassing=False) self.assertEqual(qb.count(), 1)
def _import_xyz(filename, **kwargs): """ Imports an XYZ-file. """ from os.path import abspath from aiida.orm.data.structure import StructureData vacuum_addition = kwargs.pop('vacuum_addition') vacuum_factor = kwargs.pop('vacuum_factor') pbc = [bool(i) for i in kwargs.pop('pbc')] store = kwargs.pop('store') view_in_ase = kwargs.pop('view') echo.echo('importing XYZ-structure from: \n {}'.format(abspath(filename))) filepath = abspath(filename) with open(filepath) as fobj: xyz_txt = fobj.read() new_structure = StructureData() # pylint: disable=protected-access try: new_structure._parse_xyz(xyz_txt) new_structure._adjust_default_cell(vacuum_addition=vacuum_addition, vacuum_factor=vacuum_factor, pbc=pbc) if store: new_structure.store() if view_in_ase: from ase.visualize import view view(new_structure.get_ase()) echo.echo(' Succesfully imported structure {}, ' '(PK = {})'.format(new_structure.get_formula(), new_structure.pk)) except ValueError as err: echo.echo_critical(err)
def get_Lagrange_distorted_structure(structure_id, M_Lagrange_eps): import numpy as np s0 = load_node(structure_id) one = np.identity(3) deform = (np.dot(M_Lagrange_eps.T, M_Lagrange_eps) - one) / 2. #distorted_cell = np.dot((deform + one) , s0.cell) distorted_cell = np.dot(s0.cell, (deform + one)) s = StructureData(cell=distorted_cell) for site in s0.sites: kind_name = site.kind_name frac_coor = np.squeeze(np.asarray(list(np.matrix(s0.cell).T.I * np.matrix(site.position).T))) distorted_position = np.squeeze(np.asarray(list(np.matrix(s.cell).T * np.matrix(frac_coor).T))) s.append_atom(position=distorted_position, symbols=kind_name) s.store() return s
def create_structure_data(cls, cmd_to_nodeid_map, cmd_to_nodeid_map_for_groups, cmd_to_nodeid_map_for_nuser, group, new_user): from aiida.orm.data.structure import StructureData from aiida.cmdline.commands.data import _Structure s1 = StructureData(cell=((2., 0., 0.), (0., 2., 0.), (0., 0., 2.))) s1.append_atom(position=(0., 0., 0.), symbols=['Ba', 'Ti'], weights=(1., 0.), name='mytype') s1.store() s2 = StructureData(cell=((2., 0., 0.), (0., 2., 0.), (0., 0., 2.))) s2.append_atom(position=(0., 0., 0.), symbols=['Ba', 'Ti'], weights=(1., 0.), name='mytype') s2.store() # Keep track of the created objects cmd_to_nodeid_map[_Structure] = [s1.id, s2.id] # Add the second Kpoint & Bands data to the group group.add_nodes([s2]) # Keep track of the id of the node that you added to the group cmd_to_nodeid_map_for_groups[_Structure] = s2.id # Create a StructureData node belonging to another user s3 = StructureData(cell=((2., 0., 0.), (0., 2., 0.), (0., 0., 2.))) s3.append_atom(position=(0., 0., 0.), symbols=['Ba', 'Ti'], weights=(1., 0.), name='mytype') s3.dbnode.user = new_user._dbuser s3.store() # Put it is to the right map cmd_to_nodeid_map_for_nuser[_Structure] = [s3.id]
from aiida.common.example_helpers import test_and_get_code # noqa from aiida.orm.data.structure import StructureData # noqa from aiida.orm.data.parameter import ParameterData # noqa from aiida.orm.data.base import Str from aiida.work.run import submit import ase.build from aiida_cp2k.workflows import Cp2kGeoOptWorkChain atoms = ase.build.molecule('H2O') atoms.center(vacuum=2.0) structure = StructureData(ase=atoms) structure.label = 'H2O' structure.store() options_dict = { "resources": { "num_machines": 1, "num_mpiprocs_per_machine": 2, }, "max_wallclock_seconds": 3 * 60 * 60, } options = ParameterData(dict=options_dict) params_dict = { 'FORCE_EVAL': { 'DFT': { 'UKS': True, }, }, }
def test_6(self): """ This test checks that nodes belonging to user A (which is not the default user) can be correctly exported, imported, enriched with nodes from the default user, re-exported & re-imported and that in the end all the nodes that have been finally imported belonging to the right users. """ import os import shutil import tempfile from aiida.orm import load_node from aiida.orm.calculation.job import JobCalculation from aiida.orm.data.structure import StructureData from aiida.orm.importexport import export from aiida.common.datastructures import calc_states from aiida.common.links import LinkType from aiida.common.utils import get_configured_user_email from aiida.orm.user import User # Creating a folder for the import/export files temp_folder = tempfile.mkdtemp() try: # Create another user new_email = "[email protected]" user = User(email=new_email) user.force_save() # Create a structure data node that has a calculation as output sd1 = StructureData() sd1.dbnode.user = user._dbuser sd1.label = 'sd1' sd1.store() jc1 = JobCalculation() jc1.set_computer(self.computer) jc1.set_resources({"num_machines": 1, "num_mpiprocs_per_machine": 1}) jc1.dbnode.user = user._dbuser jc1.label = 'jc1' jc1.store() jc1.add_link_from(sd1) jc1._set_state(calc_states.PARSING) # Create some nodes from a different user sd2 = StructureData() sd2.dbnode.user = user._dbuser sd2.label = 'sd2' sd2.store() sd2.add_link_from(jc1, label='l1', link_type=LinkType.RETURN) # Set the jc1 to FINISHED jc1._set_state(calc_states.FINISHED) # At this point we export the generated data filename1 = os.path.join(temp_folder, "export1.tar.gz") export([sd2.dbnode], outfile=filename1, silent=True) uuids1 = [sd1.uuid, jc1.uuid, sd2.uuid] self.clean_db() self.insert_data() import_data(filename1, silent=True) # Check that the imported nodes are correctly imported and that # the user assigned to the nodes is the right one for uuid in uuids1: self.assertEquals(load_node(uuid).get_user().email, new_email) # Now we continue to generate more data based on the imported # data sd2_imp = load_node(sd2.uuid) jc2 = JobCalculation() jc2.set_computer(self.computer) jc2.set_resources({"num_machines": 1, "num_mpiprocs_per_machine": 1}) jc2.label = 'jc2' jc2.store() jc2.add_link_from(sd2_imp, label='l2') jc2._set_state(calc_states.PARSING) sd3 = StructureData() sd3.label = 'sd3' sd3.store() sd3.add_link_from(jc2, label='l3', link_type=LinkType.RETURN) # Set the jc2 to FINISHED jc2._set_state(calc_states.FINISHED) # Store the UUIDs of the nodes that should be checked # if they can be imported correctly. uuids2 = [jc2.uuid, sd3.uuid] filename2 = os.path.join(temp_folder, "export2.tar.gz") export([sd3.dbnode], outfile=filename2, silent=True) self.clean_db() self.insert_data() import_data(filename2, silent=True) # Check that the imported nodes are correctly imported and that # the user assigned to the nodes is the right one for uuid in uuids1: self.assertEquals(load_node(uuid).get_user().email, new_email) for uuid in uuids2: self.assertEquals(load_node(uuid).get_user().email, get_configured_user_email()) finally: # Deleting the created temporary folder shutil.rmtree(temp_folder, ignore_errors=True)
def test_5(self): """ This test checks that nodes belonging to different users are correctly exported & imported. """ import os import shutil import tempfile from aiida.orm import load_node from aiida.orm.calculation.job import JobCalculation from aiida.orm.data.structure import StructureData from aiida.orm.importexport import export from aiida.common.datastructures import calc_states from aiida.common.links import LinkType from aiida.orm.user import User from aiida.common.utils import get_configured_user_email # Creating a folder for the import/export files temp_folder = tempfile.mkdtemp() try: # Create another user new_email = "[email protected]" user = User(email=new_email) user.force_save() # Create a structure data node that has a calculation as output sd1 = StructureData() sd1.dbnode.user = user._dbuser sd1.label = 'sd1' sd1.store() jc1 = JobCalculation() jc1.set_computer(self.computer) jc1.set_resources({"num_machines": 1, "num_mpiprocs_per_machine": 1}) jc1.dbnode.user = user._dbuser jc1.label = 'jc1' jc1.store() jc1.add_link_from(sd1) jc1._set_state(calc_states.PARSING) # Create some nodes from a different user sd2 = StructureData() sd2.dbnode.user = user._dbuser sd2.label = 'sd2' sd2.store() sd2.add_link_from(jc1, label='l1', link_type=LinkType.RETURN) jc2 = JobCalculation() jc2.set_computer(self.computer) jc2.set_resources({"num_machines": 1, "num_mpiprocs_per_machine": 1}) jc2.label = 'jc2' jc2.store() jc2.add_link_from(sd2, label='l2') jc2._set_state(calc_states.PARSING) sd3 = StructureData() sd3.label = 'sd3' sd3.store() sd3.add_link_from(jc2, label='l3', link_type=LinkType.RETURN) uuids_u1 = [sd1.uuid, jc1.uuid, sd2.uuid] uuids_u2 = [jc2.uuid, sd3.uuid] filename = os.path.join(temp_folder, "export.tar.gz") export([sd3.dbnode], outfile=filename, silent=True) self.clean_db() import_data(filename, silent=True) # Check that the imported nodes are correctly imported and that # the user assigned to the nodes is the right one for uuid in uuids_u1: self.assertEquals(load_node(uuid).get_user().email, new_email) for uuid in uuids_u2: self.assertEquals(load_node(uuid).get_user().email, get_configured_user_email()) finally: # Deleting the created temporary folder shutil.rmtree(temp_folder, ignore_errors=True)
def create_structure_bands(): alat = 4. # angstrom cell = [ [ alat, 0., 0., ], [ 0., alat, 0., ], [ 0., 0., alat, ], ] s = StructureData(cell=cell) s.append_atom(position=(0., 0., 0.), symbols='Fe') s.append_atom(position=(alat / 2., alat / 2., alat / 2.), symbols='O') s.store() @wf def connect_structure_bands(structure): alat = 4. cell = np.array([ [alat, 0., 0.], [0., alat, 0.], [0., 0., alat], ]) k = KpointsData() k.set_cell(cell) k.set_kpoints_path([('G', 'M', 2)]) b = BandsData() b.set_kpointsdata(k) b.set_bands([[1.0, 2.0], [3.0, 4.0]]) k.store() b.store() return b b = connect_structure_bands(s) # Create 2 groups and add the data to one of them g_ne = Group(name='non_empty_group') g_ne.store() g_ne.add_nodes(b) g_e = Group(name='empty_group') g_e.store() return { TestVerdiDataListable.NODE_ID_STR: b.id, TestVerdiDataListable.NON_EMPTY_GROUP_ID_STR: g_ne.id, TestVerdiDataListable.EMPTY_GROUP_ID_STR: g_e.id }
''' Launch this script to load and store in the AiiDA database the necessary data ''' mypath = os.getcwd() files = [f for f in listdir(mypath) if isfile(join(mypath, f))] if 'Pd.cif' not in files or 'Pd_isolated-atom.cif' not in files or 'WIEN2k.txt' not in files: raise Exception( "Files are missing in the current directory! You need to have: 'Pd.cif', 'Pd_isolated-atom.cif' and 'Wien2k.txt' (to be taken from calcDelta package)" ) # Store structure of elemental Pd atoms = read('{}/Pd.cif'.format(mypath)) structure = StructureData(ase=atoms) n = structure.store() print 'StructureData of elemental Pd obtained from Pd.cif has pk = {}\n'.format( str(n.pk)) # Add structure of Pd atom in AiiDA group g, _ = Group.get_or_create(name='Isolated_atoms') atoms = read('{}/Pd_isolated-atom.cif'.format(mypath)) structure = StructureData(ase=atoms) n = structure.store() g.add_nodes(n) print "Added structure (pk = {}) obtained from Pd_isolated-atom.cif to AiiDA group 'Isolated_atoms'\n".format( n.pk) # Store SinglefileData with Wien2k results for the equations of states f = SinglefileData() f.set_file('{}/WIEN2k.txt'.format(mypath))