def scaled_structure(structure, scale):

    new_structure = StructureData(cell=numpy.array(structure.cell)*float(scale))

    for site in structure.sites:
        new_structure.append_atom(position=numpy.array(site.position)*float(scale), \
                                  symbols=structure.get_kind(site.kind_name).symbol,\
                                  name=site.kind_name)
    new_structure.label = 'auxiliary structure for stress tensor'
    new_structure.description = "created from the original structure with PK=%i, "\
                                "lattice constant scaling: %f"%(structure.pk, float(scale))

    return new_structure
Пример #2
0
    def get_structure(self):
        """
        Returns the structure used to calculate the NAC parameters

        :return: StructureData
        """

        structure = StructureData(cell=self.get_attr('cell'))

        symbols = self.get_attr('symbols')
        positions = self.get_attr('positions')
        for symbol, position in zip(symbols, positions):
            structure.append_atom(position=position, symbols=symbol)

        return structure
Пример #3
0
    def get_step_structure(self, index, custom_kinds=None):
        """
        Return an AiiDA :py:class:`aiida.orm.data.structure.StructureData` node
        (not stored yet!) with the coordinates of the given step, identified by
        its index. If you know only the step value, use the
        :py:meth:`.get_index_from_stepid` method to get the corresponding index.

        .. note:: The periodic boundary conditions are always set to True.

        .. versionadded:: 0.7
           Renamed from step_to_structure

        :param index: The index of the step that you want to retrieve, from
           0 to ``self.numsteps- 1``.
        :param custom_kinds: (Optional) If passed must be a list of
          :py:class:`aiida.orm.data.structure.Kind` objects. There must be one
          kind object for each different string in the ``symbols`` array, with
          ``kind.name`` set to this string.
          If this parameter is omitted, the automatic kind generation of AiiDA
          :py:class:`aiida.orm.data.structure.StructureData` nodes is used,
          meaning that the strings in the ``symbols`` array must be valid
          chemical symbols.
        """
        from aiida.orm.data.structure import StructureData, Kind, Site

        # ignore step, time, and velocities
        _, _, cell, symbols, positions, _ = self.get_step_data(index)

        if custom_kinds is not None:
            kind_names = []
            for k in custom_kinds:
                if not isinstance(k, Kind):
                    raise TypeError(
                        "Each element of the custom_kinds list must "
                        "be a aiida.orm.data.structure.Kind object")
                kind_names.append(k.name)
            if len(kind_names) != len(set(kind_names)):
                raise ValueError("Multiple kinds with the same name passed "
                                 "as custom_kinds")
            if set(kind_names) != set(symbols):
                raise ValueError("If you pass custom_kinds, you have to "
                                 "pass one Kind object for each symbol "
                                 "that is present in the trajectory. You "
                                 "passed {}, but the symbols are {}".format(
                                     sorted(kind_names), sorted(symbols)))

        struc = StructureData(cell=cell)
        if custom_kinds is not None:
            for k in custom_kinds:
                struc.append_kind(k)
            for s, p in zip(symbols, positions):
                struc.append_site(Site(kind_name=s, position=p))
        else:
            for s, p in zip(symbols, positions):
                # Automatic species generation
                struc.append_atom(symbols=s, position=p)

        return struc
Пример #4
0
    def _parse_trajectory(self, out_folder, new_nodes_list):
        fn = self._calc._RESTART_FILE_NAME
        if fn not in out_folder.get_folder_list():
            return  # not every run type produces a trajectory

        # read restart file
        abs_fn = out_folder.get_abs_path(fn)
        content = open(abs_fn).read()

        # parse coordinate section
        m = re.search('\n\s*&COORD\n(.*?)\n\s*&END COORD\n', content, DOTALL)
        coord_lines = [line.strip().split() for line in m.group(1).split("\n")]
        symbols = [line[0] for line in coord_lines]
        positions_str = [line[1:] for line in coord_lines]
        positions = np.array(positions_str, np.float64)

        # parse cell section
        m = re.search('\n\s*&CELL\n(.*?)\n\s*&END CELL\n', content, re.DOTALL)
        cell_lines = [line.strip().split() for line in m.group(1).split("\n")]
        cell_str = [line[1:] for line in cell_lines if line[0] in 'ABC']
        cell = np.array(cell_str, np.float64)

        # create StructureData
        atoms = ase.Atoms(symbols=symbols, positions=positions, cell=cell)
        pair = ('output_structure', StructureData(ase=atoms))
        new_nodes_list.append(pair)
Пример #5
0
def disassemble_poscar(poscar):

    try:
        poscar_struct = StructureData(pymatgen_structure=poscar.structure)
        structure_extras = ArrayData()

        opt = False
        # optional parameters
        if poscar.predictor_corrector:
            opt = True
            structure_extras.set_array('predictor_corrector',
                                       np.array(poscar.predictor_corrector))

        if poscar.selective_dynamics:
            opt = True
            structure_extras.set_array('selective_dynamics',
                                       np.array(poscar.selective_dynamics))

        if poscar.velocities:
            opt = True
            structure_extras.set_array('velocities',
                                       np.array(poscar.velocities))

    except Exception as e:
        msg = ("Failed to disassemble the POSCAR object "
               "with error message: {}".format(e))
        raise ValueError(msg)

    return {
        'structure': poscar_struct,
        'structure_extras': structure_extras if opt else None
    }
Пример #6
0
def get_closest_struc_django(distinct=True):
    nodes = ParameterData.query().with_entities('id')
    struc_type = StructureData._query_type_string

    depth = (sa.session.query(DbPath.depth).filter(
        DbPath.child_id.in_(nodes)).join(DbNode, DbPath.parent).filter(
            DbNode.type.like("{}%".format(struc_type))).order_by(DbPath.depth))

    if distinct:
        depth = depth.distinct()

    depth = depth[0][0]

    q = (DbPath.query.filter(DbPath.child_id.in_(nodes)).join(
        DbNode,
        DbPath.parent).filter(DbNode.type.like(
            "{}%".format(struc_type))).filter(DbPath.depth == depth))

    q = q.distinct()

    q = q.with_entities(DbPath.id)

    res = (StructureData.query().join(DbPath, DbNode.child_paths).filter(
        DbPath.child_id.in_(nodes)).filter(DbPath.id.in_(q)))

    res = res.distinct()

    res = res.order_by(DbNode.ctime)

    if not with_attr:
        res = res.options(defer(DbNode.attributes), defer(DbNode.extras))

    return res.all()
Пример #7
0
def create_supercells_with_displacements_using_phonopy(structure,
                                                       phonopy_input):
    """
    Create the supercells with the displacements to use the finite displacements methodology to calculate the
    force constants
    :param structure: Aiida StructureData Object
    :param phonopy_input: Aiida Parametersdata object containing a dictionary with the data needed to run phonopy:
            supercells matrix, primitive matrix and displacement distance.
    :return: dictionary of Aiida StructureData Objects containing the cells with displacements
    """
    from phonopy.structure.atoms import Atoms as PhonopyAtoms
    from phonopy import Phonopy
    import numpy as np

    # Generate phonopy phonon object
    bulk = PhonopyAtoms(symbols=[site.kind_name for site in structure.sites],
                        positions=[site.position for site in structure.sites],
                        cell=structure.cell)

    phonopy_input = phonopy_input.get_dict()
    phonon = Phonopy(bulk,
                     phonopy_input['supercell'],
                     primitive_matrix=phonopy_input['primitive'])

    phonon.generate_displacements(distance=phonopy_input['distance'])

    cells_with_disp = phonon.get_supercells_with_displacements()

    # Transform cells to StructureData and set them ready to return
    data_sets = phonon.get_displacement_dataset()
    data_sets_object = ArrayData()
    for i, first_atoms in enumerate(data_sets['first_atoms']):
        data_sets_array = np.array([
            first_atoms['direction'], first_atoms['number'],
            first_atoms['displacement']
        ])
        data_sets_object.set_array('data_sets_{}'.format(i), data_sets_array)

    disp_cells = {'data_sets': data_sets_object}
    for i, phonopy_supercell in enumerate(cells_with_disp):
        supercell = StructureData(cell=phonopy_supercell.get_cell())
        for symbol, position in zip(phonopy_supercell.get_chemical_symbols(),
                                    phonopy_supercell.get_positions()):
            supercell.append_atom(position=position, symbols=symbol)
        disp_cells["structure_{}".format(i)] = supercell

    return disp_cells
Пример #8
0
def refine_inline(node):
    """
    Refine (reduce) the cell of :py:class:`aiida.orm.data.cif.CifData`,
    find and remove symmetrically equivalent atoms.

    :param node: a :py:class:`aiida.orm.data.cif.CifData` instance.
    :return: dict with :py:class:`aiida.orm.data.cif.CifData`

    .. note:: can be used as inline calculation.
    """
    from aiida.orm.data.structure import StructureData, ase_refine_cell

    if len(node.values.keys()) > 1:
        raise ValueError("CifData seems to contain more than one data "
                         "block -- multiblock CIF files are not "
                         "supported yet")

    name = node.values.keys()[0]

    original_atoms = node.get_ase(index=None)
    if len(original_atoms) > 1:
        raise ValueError("CifData seems to contain more than one crystal "
                         "structure -- such refinement is not supported "
                         "yet")

    original_atoms = original_atoms[0]

    refined_atoms, symmetry = ase_refine_cell(original_atoms)

    cif = CifData(ase=refined_atoms)
    if name != str(0):
        cif.values.rename(str(0), name)

    # Remove all existing symmetry tags before overwriting:
    for tag in symmetry_tags:
        cif.values[name].RemoveCifItem(tag)

    cif.values[name]['_symmetry_space_group_name_H-M'] = symmetry['hm']
    cif.values[name]['_symmetry_space_group_name_Hall'] = symmetry['hall']
    cif.values[name]['_symmetry_Int_Tables_number'] = symmetry['tables']
    cif.values[name]['_symmetry_equiv_pos_as_xyz'] = \
        [symop_string_from_symop_matrix_tr(symmetry['rotations'][i],
                                           symmetry['translations'][i])
         for i in range(0, len(symmetry['rotations']))]

    # Summary formula has to be calculated from non-reduced set of atoms.
    cif.values[name]['_chemical_formula_sum'] = \
        StructureData(ase=original_atoms).get_formula(mode='hill',
                                                      separator=' ')

    # If the number of reduced atoms multiplies the number of non-reduced
    # atoms, the new Z value can be calculated.
    if '_cell_formula_units_Z' in node.values[name].keys():
        old_Z = node.values[name]['_cell_formula_units_Z']
        if len(original_atoms) % len(refined_atoms):
            new_Z = old_Z * len(original_atoms) / len(refined_atoms)
            cif.values[name]['_cell_formula_units_Z'] = new_Z

    return {'cif': cif}
Пример #9
0
def get_supercell(structure, supercell_shape):
    import itertools

    symbols = np.array([site.kind_name for site in structure.sites])
    positions = np.array([site.position for site in structure.sites])
    cell = np.array(structure.cell)
    supercell_shape = np.array(supercell_shape.dict.shape)

    supercell_array = np.dot(cell, np.diag(supercell_shape))

    supercell = StructureData(cell=supercell_array)
    for k in range(positions.shape[0]):
        for r in itertools.product(*[range(i) for i in supercell_shape[::-1]]):
            position = positions[k, :] + np.dot(np.array(r[::-1]), cell)
            symbol = symbols[k]
            supercell.append_atom(position=position, symbols=symbol)

    return supercell
Пример #10
0
    def test_1(self):
        import os
        import shutil
        import tempfile

        from aiida.orm import DataFactory
        from aiida.orm import load_node
        from aiida.orm.calculation.job import JobCalculation
        from aiida.orm.importexport import export

        # Creating a folder for the import/export files
        temp_folder = tempfile.mkdtemp()
        try:
            StructureData = DataFactory('structure')
            sd = StructureData()
            sd.store()

            calc = JobCalculation()
            calc.set_computer(self.computer)
            calc.set_resources({
                "num_machines": 1,
                "num_mpiprocs_per_machine": 1
            })
            calc.store()

            calc.add_link_from(sd)

            pks = [sd.pk, calc.pk]

            attrs = {}
            for pk in pks:
                node = load_node(pk)
                attrs[node.uuid] = dict()
                for k in node.attrs():
                    attrs[node.uuid][k] = node.get_attr(k)

            filename = os.path.join(temp_folder, "export.tar.gz")

            export([calc.dbnode], outfile=filename, silent=True)

            self.clean_db()

            # NOTE: it is better to load new nodes by uuid, rather than assuming
            # that they will have the first 3 pks. In fact, a recommended policy in
            # databases is that pk always increment, even if you've deleted elements
            import_data(filename, silent=True)
            for uuid in attrs.keys():
                node = load_node(uuid)
                # for k in node.attrs():
                for k in attrs[uuid].keys():
                    self.assertEquals(attrs[uuid][k], node.get_attr(k))
        finally:
            # Deleting the created temporary folder
            shutil.rmtree(temp_folder, ignore_errors=True)
Пример #11
0
def _get_aiida_structure_pymatgen_inline(cif, **kwargs):
    """
    Creates :py:class:`aiida.orm.data.structure.StructureData` using pymatgen.

    :param occupancy_tolerance: If total occupancy of a site is between 1 and occupancy_tolerance,
        the occupancies will be scaled down to 1.
    :param site_tolerance: This tolerance is used to determine if two sites are sitting in the same position,
        in which case they will be combined to a single disordered site. Defaults to 1e-4.

    .. note:: requires pymatgen module.
    """
    from pymatgen.io.cif import CifParser
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.data.structure import StructureData

    if 'parameters' in kwargs:
        parameters = kwargs['parameters']
    else:
        parameters = {}

    if isinstance(parameters, ParameterData):
        parameters = parameters.get_dict()

    constructor_kwargs = {}

    parameters['primitive'] = parameters.pop('primitive_cell', False)

    for argument in ['occupancy_tolerance', 'site_tolerance']:
        if argument in parameters:
            constructor_kwargs[argument] = parameters.pop(argument)

    parser = CifParser(cif.get_file_abs_path(), **constructor_kwargs)

    try:
        structures = parser.get_structures(**parameters)
    except ValueError:

        # Verify whether the failure was due to wrong occupancy numbers
        try:
            constructor_kwargs['occupancy_tolerance'] = 1E10
            parser = CifParser(cif.get_file_abs_path(), **constructor_kwargs)
            structures = parser.get_structures(**parameters)
        except ValueError:
            # If it still fails, the occupancies were not the reason for failure
            raise ValueError(
                'pymatgen failed to provide a structure from the cif file')
        else:
            # If it now succeeds, non-unity occupancies were the culprit
            raise InvalidOccupationsError(
                'detected atomic sites with an occupation number larger than the occupation tolerance'
            )

    return {'structure': StructureData(pymatgen_structure=structures[0])}
Пример #12
0
def load_example_structures():
    """ Read input structures into the database

    Structures are read from subfolder "example-structures"
    and stored in the group "example-structures".

    :return: group of available structures
    """
    from aiida.orm.group import Group

    try:
        group = Group.get(name=group_name)

    except NotExistent:
        import glob
        import os
        from ase.io import read
        from aiida.orm.data.structure import StructureData

        paths = glob.glob(group_name + '/*.cif')

        structure_nodes = []
        for path in paths:
            fname = os.path.basename(path)
            name = os.path.splitext(fname)[0]

            structure = StructureData(ase=read(path))
            if "ML" in name:
                # surface normal of monolayers should be oriented along z
                structure.set_pbc([True, True, False])
            else:
                structure.set_pbc([True, True, True])
            structure.label = name
            print("Storing {} in database".format(name))
            structure.store()
            structure_nodes.append(structure)

        group = Group(name=group_name)
        group.store()
        group.description = "\
        Set of atomic structures used by examples for AiiDA plugins of different codes"

        group.add_nodes(structure_nodes)

    return group
Пример #13
0
def _import_ase(filename, **kwargs):
    """
    Imports a structure in a number of formats using the ASE routines.
    """
    from os.path import abspath
    from aiida.orm.data.structure import StructureData

    try:
        import ase.io
    except ImportError:
        echo.echo_critical("You have not installed the package ase. \n"
                           "You can install it with: pip install ase")

    store = kwargs.pop('store')
    view_in_ase = kwargs.pop('view')

    echo.echo('importing structure from: \n  {}'.format(abspath(filename)))
    filepath = abspath(filename)

    try:
        asecell = ase.io.read(filepath)
        new_structure = StructureData(ase=asecell)

        if store:
            new_structure.store()
        if view_in_ase:
            from ase.visualize import view
            view(new_structure.get_ase())
        echo.echo('  Succesfully imported structure {}, '
                  '(PK = {})'.format(new_structure.get_formula(),
                                     new_structure.pk))

    except ValueError as err:
        echo.echo_critical(err)
Пример #14
0
    def test_3(self):
        """
        Test importing of nodes, that have links to unknown nodes.
        """
        import json
        import tarfile
        import os
        import shutil
        import tempfile

        from aiida.orm import DataFactory
        from aiida.orm.importexport import export
        from aiida.common.folders import SandboxFolder

        # Creating a folder for the import/export files
        temp_folder = tempfile.mkdtemp()
        try:
            StructureData = DataFactory('structure')
            sd = StructureData()
            sd.store()

            filename = os.path.join(temp_folder, "export.tar.gz")
            export([sd.dbnode], outfile=filename, silent=True)

            unpack = SandboxFolder()
            with tarfile.open(filename, "r:gz",
                              format=tarfile.PAX_FORMAT) as tar:
                tar.extractall(unpack.abspath)

            with open(unpack.get_abs_path('data.json'), 'r') as f:
                metadata = json.load(f)
            metadata['links_uuid'].append({
                'output': sd.uuid,
                'input': 'non-existing-uuid',
                'label': 'parent'
            })
            with open(unpack.get_abs_path('data.json'), 'w') as f:
                json.dump(metadata, f)

            with tarfile.open(filename, "w:gz",
                              format=tarfile.PAX_FORMAT) as tar:
                tar.add(unpack.abspath, arcname="")

            self.clean_db()

            with self.assertRaises(ValueError):
                import_data(filename, silent=True)

            import_data(filename, ignore_unknown_nodes=True, silent=True)
        finally:
            # Deleting the created temporary folder
            shutil.rmtree(temp_folder, ignore_errors=True)
Пример #15
0
    def setup_structures(self):
        """
        Very simple. Avoid seekpath for now
        """
        self.report('Running setup_structure')

        self.ctx.structure_initial_primitive = self.inputs.structure
        scell, xasc, specsc = buildsc(self.inputs.scarray,self.inputs.structure)
        nna=len(xasc)
        self.ctx.structure_supercell = StructureData(cell=scell)
        for i in range(nna):
            self.ctx.structure_supercell.append_atom(position=(xasc[i][0],\
                    xasc[i][1],xasc[i][2]),symbols=specsc[i])
Пример #16
0
def _get_aiida_structure_ase_inline(cif=None, parameters=None):
    """
    Creates :py:class:`aiida.orm.data.structure.StructureData` using ASE.

    .. note:: unable to correctly import structures of alloys.
    .. note:: requires ASE module.
    """
    from aiida.orm.data.structure import StructureData

    kwargs = {}
    if parameters is not None:
        kwargs = parameters.get_dict()
    return {'structure': StructureData(ase=cif.get_ase(**kwargs))}
Пример #17
0
    def test_2(self):
        """
        Test the check for the export format version.
        """
        import json
        import tarfile
        import os
        import shutil
        import tempfile

        from aiida.orm import DataFactory
        from aiida.orm.importexport import export

        # Creating a folder for the import/export files
        export_file_tmp_folder = tempfile.mkdtemp()
        unpack_tmp_folder = tempfile.mkdtemp()
        try:
            StructureData = DataFactory('structure')
            sd = StructureData()
            sd.store()

            filename = os.path.join(export_file_tmp_folder, "export.tar.gz")
            export([sd.dbnode], outfile=filename, silent=True)

            with tarfile.open(filename, "r:gz",
                              format=tarfile.PAX_FORMAT) as tar:
                tar.extractall(unpack_tmp_folder)

            with open(os.path.join(unpack_tmp_folder, 'metadata.json'),
                      'r') as f:
                metadata = json.load(f)
            metadata['export_version'] = 0.0
            with open(os.path.join(unpack_tmp_folder, 'metadata.json'),
                      'w') as f:
                json.dump(metadata, f)

            with tarfile.open(filename, "w:gz",
                              format=tarfile.PAX_FORMAT) as tar:
                tar.add(unpack_tmp_folder, arcname="")

            self.tearDownClass()
            self.setUpClass()

            with self.assertRaises(ValueError):
                import_data(filename, silent=True)
        finally:
            # Deleting the created temporary folders
            shutil.rmtree(export_file_tmp_folder, ignore_errors=True)
            shutil.rmtree(unpack_tmp_folder, ignore_errors=True)
Пример #18
0
def _import_xyz(filename, **kwargs):
    """
    Imports an XYZ-file.
    """
    from os.path import abspath
    from aiida.orm.data.structure import StructureData

    vacuum_addition = kwargs.pop('vacuum_addition')
    vacuum_factor = kwargs.pop('vacuum_factor')
    pbc = [bool(i) for i in kwargs.pop('pbc')]
    store = kwargs.pop('store')
    view_in_ase = kwargs.pop('view')

    echo.echo('importing XYZ-structure from: \n  {}'.format(abspath(filename)))
    filepath = abspath(filename)
    with open(filepath) as fobj:
        xyz_txt = fobj.read()
    new_structure = StructureData()
    # pylint: disable=protected-access
    try:
        new_structure._parse_xyz(xyz_txt)
        new_structure._adjust_default_cell(vacuum_addition=vacuum_addition,
                                           vacuum_factor=vacuum_factor,
                                           pbc=pbc)

        if store:
            new_structure.store()
        if view_in_ase:
            from ase.visualize import view
            view(new_structure.get_ase())
        echo.echo('  Succesfully imported structure {}, '
                  '(PK = {})'.format(new_structure.get_formula(),
                                     new_structure.pk))

    except ValueError as err:
        echo.echo_critical(err)
Пример #19
0
    def _get_output_nodes(self, output_path, error_path):
        """
        Extracts output nodes from the standard output and standard error
        files.
        """
        from pymatgen.io.nwchem import NwOutput
        from aiida.orm.data.structure import StructureData
        from aiida.orm.data.array.trajectory import TrajectoryData

        ret_dict = []
        nwo = NwOutput(output_path)
        for out in nwo.data:
            molecules = out.pop('molecules', None)
            structures = out.pop('structures', None)
            if molecules:
                structlist = [
                    StructureData(pymatgen_molecule=m) for m in molecules
                ]
                ret_dict.append(
                    ('trajectory', TrajectoryData(structurelist=structlist)))
            if structures:
                structlist = [
                    StructureData(pymatgen_structure=s) for s in structures
                ]
                ret_dict.append(
                    ('trajectory', TrajectoryData(structurelist=structlist)))
            ret_dict.append(('output', ParameterData(dict=out)))

        # Since ParameterData rewrites it's properties (using _set_attr())
        # with keys from the supplied dictionary, ``source`` has to be
        # moved to another key. See issue #9 for details:
        # (https://bitbucket.org/epfl_theos/aiida_epfl/issues/9)
        nwo.job_info['program_source'] = nwo.job_info.pop('source', None)
        ret_dict.append(('job_info', ParameterData(dict=nwo.job_info)))

        return ret_dict
Пример #20
0
    def sub_create_bands_data(cls, user=None):
        from aiida.orm.data.array.kpoints import KpointsData
        from aiida.orm import JobCalculation
        from aiida.orm.data.structure import StructureData
        from aiida.common.links import LinkType
        from aiida.orm.data.array.bands import BandsData
        import numpy

        s = StructureData(cell=((2., 0., 0.), (0., 2., 0.), (0., 0., 2.)))
        s.append_atom(position=(0., 0., 0.),
                      symbols=['Ba', 'Ti'],
                      weights=(1., 0.),
                      name='mytype')
        if user is not None:
            s.dbnode.user = user._dbuser
        s.store()

        c = JobCalculation(computer=cls.computer,
                           resources={
                               'num_machines': 1,
                               'num_mpiprocs_per_machine': 1
                           })
        if user is not None:
            c.dbnode.user = user._dbuser
        c.store()
        c.add_link_from(s, "S1", LinkType.INPUT)
        c._set_state(calc_states.RETRIEVING)

        # define a cell
        alat = 4.
        cell = numpy.array([
            [alat, 0., 0.],
            [0., alat, 0.],
            [0., 0., alat],
        ])

        k = KpointsData()
        k.set_cell(cell)
        k.set_kpoints_path()
        if user is not None:
            k.dbnode.user = user._dbuser
        k.store()

        b = BandsData()
        b.set_kpointsdata(k)
        input_bands = numpy.array(
            [numpy.ones(4) * i for i in range(k.get_kpoints().shape[0])])
        b.set_bands(input_bands, units='eV')
        if user is not None:
            b.dbnode.user = user._dbuser
        b.store()

        b.add_link_from(c, link_type=LinkType.CREATE)

        return b
Пример #21
0
def mounet1():
    pw_calc = Group.get(pk=1139193).nodes.next()
    structure = pw_calc.out.output_structure
    qstruc = StructureData.query(children__pk=structure.pk).with_entities(
        DbNode.id)
    n_children = aliased(DbNode)
    qic = (InlineCalculation.query(
    ).join(DbLink, DbNode.id == DbLink.output_id).filter(
        DbLink.input_id.in_(qstruc)).join(n_children, DbNode.inputs).filter(
            or_(
                n_children.attributes["radii_source"].astext.like("%alvarez"),
                n_children.attributes[(
                    "lowdim_dict",
                    "radii_source")].astext.like("%alvarez"))).distinct())

    return qic.with_entities(func.count(DbNode.id)).scalar()
Пример #22
0
def mounet1(with_key_filter=False):
    pw_calc = Group.get(pk=1139193).nodes.next()
    structure = pw_calc.out.output_structure
    qstruc = StructureData.query(children__pk=structure.pk)
    attr_filters = models.DbAttribute.objects.filter(tval__endswith='alvarez')

    # Because we can't reproduce a filter on the value only with a JSON table,
    # a fairer comparison would be with a filter on the key too.
    if with_key_filter:
        attr_filters = attr_filters.filter(
            Q(key="radii_source") | Q(key="lowdim_dict.radii_source"))

    qic = InlineCalculation.query(inputs__in=qstruc).filter(
        inputs__dbattributes__in=attr_filters).distinct()

    return qic.count()
Пример #23
0
def get_closest_struc():
    nodes = ParameterData.query()
    struc_type = StructureData._query_type_string

    depth = models.DbPath.objects.filter(
        child__in=nodes,
        parent__type__contains=struc_type).distinct().order_by(
            'depth').values_list('depth')[0][0]

    q = models.DbPath.objects.filter(parent__type__contains=struc_type,
                                     child__in=nodes,
                                     depth=depth).distinct()

    res = StructureData.query(children__in=nodes,
                              child_paths__in=q).distinct().order_by('ctime')

    return list(res)
Пример #24
0
def mounet2():
    StructureData = DataFactory('structure')
    structure = load_node(2304207)
    qstruc = StructureData.query(children__pk=structure.pk).with_entities(
        DbNode.id)

    n_children = aliased(DbNode)
    qic = (InlineCalculation.query().filter(
        DbNode.attributes["function_name"].astext == "lowdimfinder_inline"
    ).join(DbLink, DbNode.id == DbLink.output_id).filter(
        DbLink.input_id.in_(qstruc)).join(n_children, DbNode.inputs).filter(
            or_(
                n_children.attributes["radii_source"].astext.like("%alvarez"),
                n_children.attributes[(
                    "lowdim_dict",
                    "radii_source")].astext.like("%alvarez"))).distinct())

    return qic.with_entities(func.count(DbNode.id)).scalar()
Пример #25
0
def mounet2(with_key_filter=False):
    StructureData = DataFactory('structure')
    structure = load_node(2304207)
    qstruc = StructureData.query(children__pk=structure.pk)
    qattr = models.DbAttribute.objects.filter(key='function_name',
                                              tval='lowdimfinder_inline',
                                              dbnode__inputs__in=qstruc)

    attr_filters = models.DbAttribute.objects.filter(tval__endswith='alvarez')

    if with_key_filter:
        attr_filters = attr_filters.filter(
            Q(key="radii_source") | Q(key="lowdim_dict.radii_source"))

    qic = InlineCalculation.query(
        inputs__in=qstruc, dbattributes__in=qattr).filter(
            inputs__dbattributes__in=attr_filters).distinct()

    return qic.count()
def scaled_structure(structure, scale):

    new_structure = StructureData(cell=np.array(structure.cell)*scale)

    for site in structure.sites:
        new_structure.append_atom(position=np.array(site.position)*scale, \
                                  symbols=structure.get_kind(site.kind_name).symbol,\
                                  name=site.kind_name)
    new_structure.label = 'created inside stress tensor run'
    new_structure.description = "auxiliary structure for stress tensor "\
                                "created from the original structure with PK=%i, "\
                                "lattice constant scaling: %f"%(structure.pk, scale)

    return new_structure
Пример #27
0
    def store_structure(self, name, description=None):
        structure_ase = self.get_ase(self.tmp_folder + '/' + name)
        if structure_ase is None:
            return

        # determine data source
        if name.endswith('.cif'):
            source_format = 'CIF'
        else:
            source_format = 'ASE'

        # perform conversion
        if self.data_format.value == 'CifData':
            if source_format == 'CIF':
                from aiida.orm.data.cif import CifData
                structure_node = CifData(file=self.tmp_folder + '/' + name,
                                         scan_type='flex',
                                         parse_policy='lazy')
            else:
                from aiida.orm.data.cif import CifData
                structure_node = CifData()
                structure_node.set_ase(structure_ase)
        else:
            # Target format is StructureData
            from aiida.orm.data.structure import StructureData
            structure_node = StructureData(ase=structure_ase)

            #TODO: Figure out whether this is still necessary for StructureData
            # ensure that tags got correctly translated into kinds
            for t1, k in zip(structure_ase.get_tags(),
                             structure_node.get_site_kindnames()):
                t2 = int(k[-1]) if k[-1].isnumeric() else 0
                assert t1 == t2
        if description is None:
            structure_node.description = self.get_description(
                structure_ase, name)
        else:
            structure_node.description = description
        structure_node.label = ".".join(name.split('.')[:-1])
        structure_node.store()
        self.structure_node = structure_node
        print("Stored in AiiDA: " + repr(structure_node))
Пример #28
0
    def test_symmetry_reduction(self):
        from aiida.orm.data.structure import StructureData
        from aiida.tools.dbexporters.tcod import export_values
        from ase import Atoms

        a = Atoms('BaTiO3', cell=(4., 4., 4.))
        a.set_scaled_positions(
            ((0.0, 0.0, 0.0),
             (0.5, 0.5, 0.5),
             (0.5, 0.5, 0.0),
             (0.5, 0.0, 0.5),
             (0.0, 0.5, 0.5),
             )
        )

        a.set_chemical_symbols(['Ba', 'Ti', 'O', 'O', 'O'])
        val = export_values(StructureData(ase=a), reduce_symmetry=True, store=True)['0']
        self.assertEqual(val['_atom_site_label'], ['Ba1', 'Ti1', 'O1'])
        self.assertEqual(val['_symmetry_space_group_name_H-M'], 'Pm-3m')
        self.assertEqual(val['_symmetry_space_group_name_Hall'], '-P 4 2 3')
Пример #29
0
def _get_aiida_structure_pymatgen_inline(cif=None, parameters=None):
    """
    Creates :py:class:`aiida.orm.data.structure.StructureData` using
    pymatgen.

    .. note:: requires pymatgen module.
    """
    from pymatgen.io.cifio import CifParser
    from aiida.orm.data.structure import StructureData

    kwargs = {}
    if parameters is not None:
        kwargs = parameters.get_dict()
    kwargs['primitive'] = kwargs.pop('primitive_cell', False)
    parser = CifParser(cif.get_file_abs_path())
    try:
        struct = parser.get_structures(**kwargs)[0]
        return {'structure': StructureData(pymatgen_structure=struct)}
    except IndexError:
        raise ValueError("pymatgen failed to provide a structure from the cif file")
Пример #30
0
def test_prepare_and_validate(new_database, new_workdir):
    """test preparation of inputs"""
    code = get_main_code(new_workdir)

    inparams = {"scf.k_points": (8, 8)}

    from aiida.orm import DataFactory, CalculationFactory
    StructureData = DataFactory('structure')

    atoms = crystal(symbols=[12, 8],
                    basis=[[0, 0, 0], [0.5, 0.5, 0.5]],
                    spacegroup=225,
                    cellpar=[4.21, 4.21, 4.21, 90, 90, 90])
    instruct = StructureData(ase=atoms)

    from aiida_crystal17.workflows.symmetrise_3d_struct import (
        run_symmetrise_3d_structure)
    instruct, settings = run_symmetrise_3d_structure(instruct)

    calc_cls = CalculationFactory('crystal17.main')
    calc_cls.prepare_and_validate(inparams, instruct, settings, flattened=True)
Пример #31
0
def _get_aiida_structure_ase_inline(cif, **kwargs):
    """
    Creates :py:class:`aiida.orm.data.structure.StructureData` using ASE.

    .. note:: unable to correctly import structures of alloys.
    .. note:: requires ASE module.
    """
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.data.structure import StructureData

    if 'parameters' in kwargs:
        parameters = kwargs['parameters']
    else:
        parameters = {}

    if isinstance(parameters, ParameterData):
        parameters = parameters.get_dict()

    parameters.pop('occupancy_tolerance', None)
    parameters.pop('site_tolerance', None)

    return {'structure': StructureData(ase=cif.get_ase(**parameters))}