示例#1
0
    def get(self, computer, user):
        """Return an entry from the collection that is configured for the given computer and user

        :param computer: a :class:`aiida.orm.implementation.computers.BackendComputer` instance
        :param user: a :class:`aiida.orm.implementation.users.BackendUser` instance
        :return: :class:`aiida.orm.implementation.authinfos.BackendAuthInfo`
        :raise aiida.common.exceptions.NotExistent: if no entry exists for the computer/user pair
        :raise aiida.common.exceptions.MultipleObjectsError: if multiple entries exist for the computer/user pair
        """
        # pylint: disable=import-error,no-name-in-module
        from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned

        try:
            authinfo = DbAuthInfo.objects.get(dbcomputer=computer.id,
                                              aiidauser=user.id)
        except ObjectDoesNotExist:
            raise exceptions.NotExistent(
                f'User<{user.email}> has no configuration for Computer<{computer.name}>'
            )
        except MultipleObjectsReturned:
            raise exceptions.MultipleObjectsError(
                f'User<{user.email}> has multiple configurations for Computer<{computer.name}>'
            )
        else:
            return self.from_dbmodel(authinfo)
示例#2
0
    def set_reference_bandsdata(self, value):
        """
        Sets a reference bandsdata, creates a uuid link between this data
        object and a bandsdata object, must be set before any projection arrays

        :param value: a BandsData instance, a uuid or a pk
        :raise: exceptions.NotExistent if there was no BandsData associated with uuid or pk
        """
        from aiida.orm import load_node

        if isinstance(value, BandsData):
            uuid = value.uuid
        else:
            try:
                pk = int(value)
                bands = load_node(pk=pk)
                uuid = bands.uuid
            except ValueError:
                uuid = str(value)
                try:
                    bands = load_node(uuid=uuid)
                    uuid = bands.uuid
                except Exception:  # pylint: disable=bare-except
                    raise exceptions.NotExistent(
                        'The value passed to '
                        'set_reference_bandsdata was not '
                        'associated to any bandsdata')

        self.set_attribute('reference_bandsdata_uuid', uuid)
示例#3
0
    def get(cls, **kwargs):
        """
        Custom get for group which can be used to get a group with the given attributes

        :param kwargs: the attributes to match the group to

        :return: the group
        :type nodes: :class:`aiida.orm.Node` or list
        """
        from aiida.orm import QueryBuilder

        filters = {}
        if 'type_string' in kwargs:
            if not isinstance(kwargs['type_string'], six.string_types):
                raise exceptions.ValidationError(
                    'type_string must be {}, you provided an object of type '
                    '{}'.format(str, type(kwargs['type_string'])))

        query = QueryBuilder()
        for key, val in kwargs.items():
            filters[key] = val

        query.append(cls, filters=filters)
        results = query.all()
        if len(results) > 1:
            raise exceptions.MultipleObjectsError(
                "Found {} groups matching criteria '{}'".format(
                    len(results), kwargs))
        if not results:
            raise exceptions.NotExistent(
                "No group found matching criteria '{}'".format(kwargs))
        return results[0][0]
示例#4
0
    def get(self, computer, user):
        """
        Return a AuthInfo given a computer and a user

        :param computer: a Computer instance
        :param user: a User instance
        :return: an AuthInfo object associated with the given computer and user
        :raise NotExistent: if the user is not configured to use computer
        :raise sqlalchemy.orm.exc.MultipleResultsFound: if the user is configured
            more than once to use the computer! Should never happen
        """
        from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned

        try:
            authinfo = DbAuthInfo.objects.get(dbcomputer=computer.dbcomputer,
                                              aiidauser=user.id)

            return self.from_dbmodel(authinfo)
        except ObjectDoesNotExist:
            raise exceptions.NotExistent(
                "The aiida user {} is not configured to use computer {}".
                format(user.email, computer.name))
        except MultipleObjectsReturned:
            raise exceptions.ConfigurationError(
                "The aiida user {} is configured more than once to use "
                "computer {}! Only one configuration is allowed".format(
                    user.email, computer.name))
示例#5
0
    def get(self, computer, user):
        """
        Return a SqlaAuthInfo given a computer and a user

        :param computer: a Computer instance
        :param user: a User instance
        :return: an AuthInfo object associated with the given computer and user
        :raise NotExistent: if the user is not configured to use computer
        :raise sqlalchemy.orm.exc.MultipleResultsFound: if the user is configured
             more than once to use the computer! Should never happen
        """
        from aiida.backends.sqlalchemy.models.authinfo import DbAuthInfo
        from aiida.backends.sqlalchemy import get_scoped_session
        session = get_scoped_session()
        from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound

        try:
            authinfo = session.query(DbAuthInfo).filter_by(
                dbcomputer_id=computer.id,
                aiidauser_id=user.id,
            ).one()

            return self.from_dbmodel(authinfo)
        except NoResultFound:
            raise exceptions.NotExistent(
                "The aiida user {} is not configured to use computer {}".format(
                    user.email, computer.name))
        except MultipleResultsFound:
            raise exceptions.ConfigurationError(
                "The aiida user {} is configured more than once to use "
                "computer {}! Only one configuration is allowed".format(
                    user.email, computer.name))
示例#6
0
    def get(self, computer, user):
        """Return an entry from the collection that is configured for the given computer and user

        :param computer: a :class:`aiida.orm.implementation.computers.BackendComputer` instance
        :param user: a :class:`aiida.orm.implementation.users.BackendUser` instance
        :return: :class:`aiida.orm.implementation.authinfos.BackendAuthInfo`
        :raise aiida.common.exceptions.NotExistent: if no entry exists for the computer/user pair
        :raise aiida.common.exceptions.MultipleObjectsError: if multiple entries exist for the computer/user pair
        """
        # pylint: disable=import-error,no-name-in-module
        from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound

        session = get_scoped_session()

        try:
            authinfo = session.query(DbAuthInfo).filter_by(
                dbcomputer_id=computer.id, aiidauser_id=user.id).one()
        except NoResultFound:
            raise exceptions.NotExistent(
                f'User<{user.email}> has no configuration for Computer<{computer.name}>'
            )
        except MultipleResultsFound:
            raise exceptions.MultipleObjectsError(
                f'User<{user.email}> has multiple configurations for Computer<{computer.name}>'
            )
        else:
            return self.from_dbmodel(authinfo)
示例#7
0
 def remove(self, authinfo_id):
     from django.core.exceptions import ObjectDoesNotExist
     try:
         DbAuthInfo.objects.get(pk=authinfo_id).delete()
     except ObjectDoesNotExist:
         raise exceptions.NotExistent(
             "AuthInfo with id '{}' not found".format(authinfo_id))
示例#8
0
    def delete(self, pk):
        """Remove a Node entry from the collection with the given id

        :param pk: id of the node to delete
        """
        try:
            models.DbNode.objects.filter(pk=pk).delete()  # pylint: disable=no-member
        except ObjectDoesNotExist:
            raise exceptions.NotExistent(f"Node with pk '{pk}' not found") from ObjectDoesNotExist
示例#9
0
    def get(self, pk):
        """Return a Node entry from the collection with the given id

        :param pk: id of the node
        """
        try:
            return self.ENTITY_CLASS.from_dbmodel(models.DbNode.objects.get(pk=pk), self.backend)
        except ObjectDoesNotExist:
            raise exceptions.NotExistent(f"Node with pk '{pk}' not found") from ObjectDoesNotExist
示例#10
0
    def remove(self, authinfo_id):
        from sqlalchemy.orm.exc import NoResultFound
        from aiida.backends.sqlalchemy import get_scoped_session

        session = get_scoped_session()
        try:
            session.query(DbAuthInfo).filter_by(id=authinfo_id).delete()
            session.commit()
        except NoResultFound:
            raise exceptions.NotExistent("AuthInfo with id '{}' not found".format(authinfo_id))
示例#11
0
    def delete(self, pk):
        """Delete an entry from the collection.

        :param pk: the pk of the entry to delete
        """
        # pylint: disable=import-error,no-name-in-module
        from django.core.exceptions import ObjectDoesNotExist
        try:
            DbAuthInfo.objects.get(pk=pk).delete()
        except ObjectDoesNotExist:
            raise exceptions.NotExistent('AuthInfo<{}> does not exist'.format(pk))
示例#12
0
    def get_authinfo(self) -> 'AuthInfo':
        """Return the `AuthInfo` that is configured for the `Computer` set for this node.

        :return: `AuthInfo`
        """
        computer = self.computer

        if computer is None:
            raise exceptions.NotExistent('No computer has been set for this calculation')

        return computer.get_authinfo(self.user)
示例#13
0
    def delete(self, pk):
        """Remove a Node entry from the collection with the given id

        :param pk: id of the node to delete
        """
        session = get_scoped_session()

        try:
            session.query(models.DbNode).filter_by(id=pk).one().delete()
            session.commit()
        except NoResultFound:
            raise exceptions.NotExistent(
                f"Node with pk '{pk}' not found") from NoResultFound
示例#14
0
    def get(self, **filters):
        """
        Get the group matching the given filters

        :param filters: the attributes of the group to get
        :return: the group
        :rtype: :class:`aiida.orm.implementation.BackendGroup`
        """
        results = self.query(**filters)
        if len(results) > 1:
            raise exceptions.MultipleObjectsError("Found multiple groups matching criteria '{}'".format(filters))
        if not results:
            raise exceptions.NotExistent("No group bound matching criteria '{}'".format(filters))
        return results[0]
示例#15
0
    def get(self, pk):
        """Return a Node entry from the collection with the given id

        :param pk: id of the node
        """
        session = get_scoped_session()

        try:
            return self.ENTITY_CLASS.from_dbmodel(
                session.query(models.DbNode).filter_by(id=pk).one(),
                self.backend)
        except NoResultFound:
            raise exceptions.NotExistent(
                f"Node with pk '{pk}' not found") from NoResultFound
示例#16
0
    def get_authinfo(self):
        """Return the `AuthInfo` that is configured for the `Computer` set for this node.

        :return: `AuthInfo`
        """
        from aiida.orm.authinfos import AuthInfo

        computer = self.computer

        if computer is None:
            raise exceptions.NotExistent(
                'No computer has been set for this calculation')

        return AuthInfo.from_backend_entity(
            self.backend.authinfos.get(computer=computer, user=self.user))
示例#17
0
 def get(self, email):
     """
     Get a user using the email address
     :param email: The user's email address
     :return: The corresponding user object
     :raises: :class:`aiida.common.exceptions.MultipleObjectsError`, :class:`aiida.common.exceptions.NotExistent`
     """
     results = self.find(email=email)
     if not results:
         raise exceptions.NotExistent()
     else:
         if len(results) > 1:
             raise exceptions.MultipleObjectsError()
         else:
             return results[0]
示例#18
0
    def delete(self, pk):
        """Delete an entry from the collection.

        :param pk: the pk of the entry to delete
        """
        # pylint: disable=import-error,no-name-in-module
        from sqlalchemy.orm.exc import NoResultFound

        session = get_scoped_session()

        try:
            session.query(DbAuthInfo).filter_by(id=pk).one().delete()
            session.commit()
        except NoResultFound:
            raise exceptions.NotExistent(f'AuthInfo<{pk}> does not exist')
示例#19
0
    def delete(self, comment_id):
        """
        Remove a Comment from the collection with the given id

        :param comment_id: the id of the comment to delete
        :type comment_id: int

        :raises TypeError: if ``comment_id`` is not an `int`
        :raises `~aiida.common.exceptions.NotExistent`: if Comment with ID ``comment_id`` is not found
        """
        if not isinstance(comment_id, int):
            raise TypeError('comment_id must be an int')

        try:
            models.DbComment.objects.get(id=comment_id).delete()
        except ObjectDoesNotExist:
            raise exceptions.NotExistent(f"Comment with id '{comment_id}' not found")
示例#20
0
    def delete(self, log_id):
        """
        Remove a Log entry from the collection with the given id

        :param log_id: id of the Log to delete
        :type log_id: int

        :raises TypeError: if ``log_id`` is not an `int`
        :raises `~aiida.common.exceptions.NotExistent`: if Log with ID ``log_id`` is not found
        """
        if not isinstance(log_id, int):
            raise TypeError('log_id must be an int')

        try:
            models.DbLog.objects.get(id=log_id).delete()
        except ObjectDoesNotExist:
            raise exceptions.NotExistent(f"Log with id '{log_id}' not found")
示例#21
0
    def _parse_trajectory(self):
        """CP2K trajectory parser."""

        from ase import Atoms
        from aiida_cp2k.utils import parse_cp2k_trajectory

        fname = self.node.process_class._DEFAULT_RESTART_FILE_NAME  # pylint: disable=protected-access

        # Check if the restart file is present.
        if fname not in self.retrieved.list_object_names():
            raise exceptions.NotExistent("No restart file available, so the output trajectory can't be extracted")

        # Read the restart file.
        try:
            output_string = self.retrieved.get_object_content(fname)
        except IOError:
            return self.exit_codes.ERROR_OUTPUT_STDOUT_READ

        return StructureData(ase=Atoms(**parse_cp2k_trajectory(output_string)))
示例#22
0
    def get_reference_bandsdata(self):
        """
        Returns the reference BandsData, using the set uuid via
        set_reference_bandsdata

        :return: a BandsData instance
        :raise AttributeError: if the bandsdata has not been set yet
        :raise exceptions.NotExistent: if the bandsdata uuid did not retrieve bandsdata
        """
        from aiida.orm import load_node
        try:
            uuid = self.get_attribute('reference_bandsdata_uuid')
        except AttributeError:
            raise AttributeError('BandsData has not been set for this instance')
        try:
            bands = load_node(uuid=uuid)
        except exceptions.NotExistent:
            raise exceptions.NotExistent('The bands referenced to this class have not been found in this database.')
        return bands
示例#23
0
    def get_node_by_label(self, label):
        """Return the node from list for given label.

        :return: node that corresponds to the given label
        :raises aiida.common.NotExistent: if the label is not present among the link_triples
        """
        matching_entry = None
        for entry in self.link_triples:
            if entry.link_label == label:
                if matching_entry is None:
                    matching_entry = entry.node
                else:
                    raise exceptions.MultipleObjectsError(
                        f'more than one neighbor with the label {label} found'
                    )

        if matching_entry is None:
            raise exceptions.NotExistent(f'no neighbor with the label {label} found')

        return matching_entry
示例#24
0
    def delete(self, log_id):
        """
        Remove a Log entry from the collection with the given id

        :param log_id: id of the Log to delete
        :type log_id: int

        :raises TypeError: if ``log_id`` is not an `int`
        :raises `~aiida.common.exceptions.NotExistent`: if Log with ID ``log_id`` is not found
        """
        if not isinstance(log_id, int):
            raise TypeError('log_id must be an int')

        session = get_scoped_session()

        try:
            session.query(models.DbLog).filter_by(id=log_id).one().delete()
            session.commit()
        except NoResultFound:
            session.rollback()
            raise exceptions.NotExistent(f"Log with id '{log_id}' not found")
示例#25
0
    def get_authinfo(self, user):
        """
        Return the aiida.orm.authinfo.AuthInfo instance for the
        given user on this computer, if the computer
        is configured for the given user.

        :param user: a User instance.
        :return: a AuthInfo instance
        :raise aiida.common.NotExistent: if the computer is not configured for the given
            user.
        """
        from . import authinfos

        try:
            authinfo = authinfos.AuthInfo.objects(self.backend).get(
                dbcomputer_id=self.id, aiidauser_id=user.id)
        except exceptions.NotExistent as exc:
            raise exceptions.NotExistent(
                f'Computer `{self.label}` (ID={self.id}) not configured for user `{user.get_short_name()}` '
                f'(ID={user.id}) - use `verdi computer configure` first'
            ) from exc

        return authinfo
示例#26
0
    def delete(self, comment_id):
        """
        Remove a Comment from the collection with the given id

        :param comment_id: the id of the comment to delete
        :type comment_id: int

        :raises TypeError: if ``comment_id`` is not an `int`
        :raises `~aiida.common.exceptions.NotExistent`: if Comment with ID ``comment_id`` is not found
        """
        if not isinstance(comment_id, int):
            raise TypeError('comment_id must be an int')

        session = get_scoped_session()

        try:
            session.query(
                models.DbComment).filter_by(id=comment_id).one().delete()
            session.commit()
        except NoResultFound:
            session.rollback()
            raise exceptions.NotExistent(
                "Comment with id '{}' not found".format(comment_id))
示例#27
0
    def prepare_for_submission(self, folder):
        """Create the input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        local_copy_list = []
        remote_copy_list = []
        remote_symlink_list = []

        if 'settings' in self.inputs:
            settings = _uppercase_dict(self.inputs.settings.get_dict(),
                                       dict_name='settings')
        else:
            settings = {}

        parent_folder = self.inputs.parent_folder
        parent_calcs = parent_folder.get_incoming(
            node_class=orm.CalcJobNode).all()

        if not parent_calcs:
            raise exceptions.NotExistent(
                'parent_folder<{}> has no parent calculation'.format(
                    parent_folder.pk))
        elif len(parent_calcs) > 1:
            raise exceptions.UniquenessError(
                'parent_folder<{}> has multiple parent calculations'.format(
                    parent_folder.pk))

        parent_calc = parent_calcs[0].node

        # If the parent calculation is a `PhCalculation` we are restarting
        restart_flag = parent_calc.process_type == 'aiida.calculations:quantumespresso.ph'

        # Also, the parent calculation must be on the same computer
        if not self.node.computer.uuid == parent_calc.computer.uuid:
            raise exceptions.InputValidationError(
                'Calculation has to be launched on the same computer as that of the parent: {}'
                .format(parent_calc.computer.get_name()))

        # put by default, default_parent_output_folder = ./out
        try:
            default_parent_output_folder = parent_calc.process_class._OUTPUT_SUBFOLDER
        except AttributeError:
            try:
                default_parent_output_folder = parent_calc._get_output_folder()
            except AttributeError:
                raise exceptions.InputValidationError(
                    'parent calculation does not have a default output subfolder'
                )
        parent_calc_out_subfolder = settings.pop('PARENT_CALC_OUT_SUBFOLDER',
                                                 default_parent_output_folder)

        # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase
        parameters = _uppercase_dict(self.inputs.parameters.get_dict(),
                                     dict_name='parameters')
        parameters = {
            k: _lowercase_dict(v, dict_name=k)
            for k, v in six.iteritems(parameters)
        }

        prepare_for_d3 = settings.pop('PREPARE_FOR_D3', False)
        if prepare_for_d3:
            self._blocked_keywords += [('INPUTPH', 'fildrho'),
                                       ('INPUTPH', 'drho_star%open'),
                                       ('INPUTPH', 'drho_star%ext'),
                                       ('INPUTPH', 'drho_star%dir')]

        for namelist, flag in self._blocked_keywords:
            if namelist in parameters:
                if flag in parameters[namelist]:
                    raise exceptions.InputValidationError(
                        "Cannot specify explicitly the '{}' flag in the '{}' namelist or card."
                        .format(flag, namelist))

        if 'INPUTPH' not in parameters:
            raise exceptions.InputValidationError(
                'required namelist INPUTPH not specified')

        parameters['INPUTPH']['outdir'] = self._OUTPUT_SUBFOLDER
        parameters['INPUTPH']['iverbosity'] = 1
        parameters['INPUTPH']['prefix'] = self._PREFIX
        parameters['INPUTPH']['fildyn'] = self._OUTPUT_DYNAMICAL_MATRIX_PREFIX

        if prepare_for_d3:
            parameters['INPUTPH']['fildrho'] = self._DRHO_PREFIX
            parameters['INPUTPH']['drho_star%open'] = True
            parameters['INPUTPH']['drho_star%ext'] = self._DRHO_STAR_EXT
            parameters['INPUTPH']['drho_star%dir'] = self._FOLDER_DRHO

        try:
            mesh, offset = self.inputs.qpoints.get_kpoints_mesh()

            if any([i != 0. for i in offset]):
                raise NotImplementedError(
                    'Computation of phonons on a mesh with non zero offset is not implemented, at the level of ph.x'
                )

            parameters['INPUTPH']['ldisp'] = True
            parameters['INPUTPH']['nq1'] = mesh[0]
            parameters['INPUTPH']['nq2'] = mesh[1]
            parameters['INPUTPH']['nq3'] = mesh[2]

            postpend_text = None

        except AttributeError:
            # this is the case where no mesh was set. Maybe it's a list
            try:
                list_of_points = self.inputs.qpoints.get_kpoints(
                    cartesian=True)
            except AttributeError:
                # In this case, there are no info on the qpoints at all
                raise exceptions.InputValidationError(
                    'Input `qpoints` contains neither a mesh nor a list of points'
                )

            # change to 2pi/a coordinates
            lattice_parameter = numpy.linalg.norm(self.inputs.qpoints.cell[0])
            list_of_points *= lattice_parameter / (2. * numpy.pi)

            # add here the list of point coordinates
            if len(list_of_points) > 1:
                parameters['INPUTPH']['qplot'] = True
                parameters['INPUTPH']['ldisp'] = True
                postpend_text = u'{}\n'.format(len(list_of_points))
                for points in list_of_points:
                    postpend_text += u'{0:18.10f} {1:18.10f} {2:18.10f}  1\n'.format(
                        *points)

                # Note: the weight is fixed to 1, because ph.x calls these
                # things weights but they are not such. If they are going to
                # exist with the meaning of weights, they will be supported
            else:
                parameters['INPUTPH']['ldisp'] = False
                postpend_text = u''
                for points in list_of_points:
                    postpend_text += u'{0:18.10f} {1:18.10f} {2:18.10f}\n'.format(
                        *points)

        # customized namelists, otherwise not present in the distributed ph code
        try:
            namelists_toprint = settings.pop('NAMELISTS')
            if not isinstance(namelists_toprint, list):
                raise exceptions.InputValidationError(
                    "The 'NAMELISTS' value, if specified in the settings input "
                    'node, must be a list of strings')
        except KeyError:  # list of namelists not specified in the settings; do automatic detection
            namelists_toprint = self._compulsory_namelists

        # create a folder for the dynamical matrices
        if not restart_flag:  # if it is a restart, it will be copied over
            folder.get_subfolder(self._FOLDER_DYNAMICAL_MATRIX, create=True)

        with folder.open(self.metadata.options.input_filename, 'w') as infile:
            for namelist_name in namelists_toprint:
                infile.write(u'&{0}\n'.format(namelist_name))
                # namelist content; set to {} if not present, so that we leave an empty namelist
                namelist = parameters.pop(namelist_name, {})
                for key, value in sorted(six.iteritems(namelist)):
                    infile.write(convert_input_to_namelist_entry(key, value))
                infile.write(u'/\n')

            # add list of qpoints if required
            if postpend_text is not None:
                infile.write(postpend_text)

        if parameters:
            raise exceptions.InputValidationError(
                'The following namelists are specified in parameters, but are '
                'not valid namelists for the current type of calculation: '
                '{}'.format(','.join(list(parameters.keys()))))

        # copy the parent scratch
        symlink = settings.pop('PARENT_FOLDER_SYMLINK',
                               self._default_symlink_usage)  # a boolean
        if symlink:
            # I create a symlink to each file/folder in the parent ./out
            folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True)

            remote_symlink_list.append(
                (parent_folder.computer.uuid,
                 os.path.join(parent_folder.get_remote_path(),
                              parent_calc_out_subfolder,
                              '*'), self._OUTPUT_SUBFOLDER))

            # I also create a symlink for the ./pseudo folder
            # TODO: suppress this when the recover option of QE will be fixed
            # (bug when trying to find pseudo file)
            remote_symlink_list.append(
                (parent_folder.computer.uuid,
                 os.path.join(parent_folder.get_remote_path(),
                              self._get_pseudo_folder()),
                 self._get_pseudo_folder()))
        else:
            # here I copy the whole folder ./out
            remote_copy_list.append(
                (parent_folder.computer.uuid,
                 os.path.join(parent_folder.get_remote_path(),
                              parent_calc_out_subfolder),
                 self._OUTPUT_SUBFOLDER))
            # I also copy the ./pseudo folder
            # TODO: suppress this when the recover option of QE will be fixed
            # (bug when trying to find pseudo file)
            remote_copy_list.append(
                (parent_folder.computer.uuid,
                 os.path.join(parent_folder.get_remote_path(),
                              self._get_pseudo_folder()),
                 self._get_pseudo_folder()))

        if restart_flag:  # in this case, copy in addition also the dynamical matrices
            if symlink:
                remote_symlink_list.append(
                    (parent_folder.computer.uuid,
                     os.path.join(parent_folder.get_remote_path(),
                                  self._FOLDER_DYNAMICAL_MATRIX),
                     self._FOLDER_DYNAMICAL_MATRIX))

            else:
                # copy the dynamical matrices
                # no need to copy the _ph0, since I copied already the whole ./out folder
                remote_copy_list.append(
                    (parent_folder.computer.uuid,
                     os.path.join(parent_folder.get_remote_path(),
                                  self._FOLDER_DYNAMICAL_MATRIX), '.'))

        # Create an `.EXIT` file if `only_initialization` flag in `settings` is set to `True`
        if settings.pop('ONLY_INITIALIZATION', False):
            with folder.open('{}.EXIT'.format(self._PREFIX), 'w') as handle:
                handle.write('\n')

        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = (
            list(settings.pop('CMDLINE', [])) +
            ['-in', self.metadata.options.input_filename])
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list

        # Retrieve by default the output file and the xml file
        filepath_xml_tensor = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0',
                                           '{}.phsave'.format(self._PREFIX))
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(self.metadata.options.output_filename)
        calcinfo.retrieve_list.append(self._FOLDER_DYNAMICAL_MATRIX)
        calcinfo.retrieve_list.append(
            os.path.join(filepath_xml_tensor,
                         self._OUTPUT_XML_TENSOR_FILE_NAME))
        calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', [])

        if settings:
            unknown_keys = ', '.join(list(settings.keys()))
            raise exceptions.InputValidationError(
                '`settings` contained unexpected keys: {}'.format(
                    unknown_keys))

        return calcinfo
示例#28
0
    def prepare_for_submission(self, folder):  # pylint: disable=too-many-statements,too-many-branches
        """Prepare the calculation job for submission by transforming input nodes into input files.

        In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that
        contains lists of files that need to be copied to the remote machine before job submission, as well as file
        lists that are to be retrieved after job completion.

        :param folder: a sandbox folder to temporarily write files on disk.
        :return: :py:`~aiida.common.datastructures.CalcInfo` instance.
        """

        def test_offset(offset):
            """Check if the grid has an offset."""
            if any([i != 0. for i in offset]):
                raise NotImplementedError(
                    'Computation of electron-phonon on a mesh with non zero offset is not implemented, '
                    'at the level of epw.x')

        # pylint: disable=too-many-statements,too-many-branches
        local_copy_list = []
        remote_copy_list = []
        remote_symlink_list = []

        if 'settings' in self.inputs:
            settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings')
        else:
            settings = {}

        # Copy nscf folder
        parent_folder_nscf = self.inputs.parent_folder_nscf
        parent_calc_nscf = parent_folder_nscf.creator

        if parent_calc_nscf is None:
            raise exceptions.NotExistent('parent_folder<{}> has no parent calculation'.format(parent_folder_nscf.pk))

        # Also, the parent calculation must be on the same computer
        if not self.node.computer.uuid == parent_calc_nscf.computer.uuid:
            raise exceptions.InputValidationError(
                'Calculation has to be launched on the same computer as that of the parent: {}'.format(
                    parent_calc_nscf.computer.get_name()))

        # put by default, default_parent_output_folder = ./out
        parent_calc_out_subfolder_nscf = parent_calc_nscf.process_class._OUTPUT_SUBFOLDER # pylint: disable=protected-access

        # Now phonon folder
        parent_folder_ph = self.inputs.parent_folder_ph
        parent_calc_ph = parent_folder_ph.creator

        # Also, the parent calculation must be on the same computer
        if not self.node.computer.uuid == parent_calc_ph.computer.uuid:
            raise exceptions.InputValidationError(
                'Calculation has to be launched on the same computer as that of the parent: {}'.format(
                    parent_calc_ph.computer.get_name()))

        # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase
        parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters')
        parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()}

        if 'INPUTEPW' not in parameters:
            raise exceptions.InputValidationError('required namelist INPUTEPW not specified')

        parameters['INPUTEPW']['outdir'] = self._OUTPUT_SUBFOLDER
        parameters['INPUTEPW']['iverbosity'] = 1
        parameters['INPUTEPW']['prefix'] = self._PREFIX

        try:
            mesh, offset = self.inputs.qpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nq1'] = mesh[0]
            parameters['INPUTEPW']['nq2'] = mesh[1]
            parameters['INPUTEPW']['nq3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the coarse q-point grid') from exception

        try:
            mesh, offset = self.inputs.kpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nk1'] = mesh[0]
            parameters['INPUTEPW']['nk2'] = mesh[1]
            parameters['INPUTEPW']['nk3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the coarse k-point grid') from exception

        try:
            mesh, offset = self.inputs.qfpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nqf1'] = mesh[0]
            parameters['INPUTEPW']['nqf2'] = mesh[1]
            parameters['INPUTEPW']['nqf3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the fine q-point grid') from exception

        try:
            mesh, offset = self.inputs.kfpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nkf1'] = mesh[0]
            parameters['INPUTEPW']['nkf2'] = mesh[1]
            parameters['INPUTEPW']['nkf3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the fine k-point grid') from exception


        # customized namelists, otherwise not present in the distributed epw code
        try:
            namelists_toprint = settings.pop('NAMELISTS')
            if not isinstance(namelists_toprint, list):
                raise exceptions.InputValidationError(
                    "The 'NAMELISTS' value, if specified in the settings input "
                    'node, must be a list of strings')
        except KeyError:  # list of namelists not specified in the settings; do automatic detection
            namelists_toprint = self._compulsory_namelists


        # create the save folder with dvscf and dyn files.
        folder.get_subfolder(self._FOLDER_SAVE, create=True)

        # List of IBZ q-point to be added below EPW. To be removed when removed from EPW.
        qibz_ar = []
        for key, value in sorted(parent_folder_ph.creator.outputs.output_parameters.get_dict().items()):
            if key.startswith('dynamical_matrix_'):
                qibz_ar.append(value['q_point'])

        qibz_node = orm.ArrayData()
        qibz_node.set_array('qibz', np.array(qibz_ar))

        list_of_points = qibz_node.get_array('qibz')
        # Number of q-point in the irreducible Brillouin Zone.
        nqpt = len(list_of_points[0, :])

        # add here the list of point coordinates
        if len(list_of_points) > 1:
            postpend_text = '{} cartesian\n'.format(len(list_of_points))
            for points in list_of_points:
                postpend_text += '{0:18.10f} {1:18.10f} {2:18.10f} \n'.format(*points)

        with folder.open(self.metadata.options.input_filename, 'w') as infile:
            for namelist_name in namelists_toprint:
                infile.write('&{0}\n'.format(namelist_name))
                # namelist content; set to {} if not present, so that we leave an empty namelist
                namelist = parameters.pop(namelist_name, {})
                for key, value in sorted(namelist.items()):
                    infile.write(convert_input_to_namelist_entry(key, value))
                infile.write('/\n')

            # add list of qpoints if required
            if postpend_text is not None:
                infile.write(postpend_text)

        if parameters:
            raise exceptions.InputValidationError(
                'The following namelists are specified in parameters, but are '
                'not valid namelists for the current type of calculation: '
                '{}'.format(','.join(list(parameters.keys()))))

        # copy the parent scratch
        symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage)  # a boolean
        if symlink:
            # I create a symlink to each file/folder in the parent ./out
            folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True)

            remote_symlink_list.append((
                parent_folder_nscf.computer.uuid,
                os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf, '*'),
                self._OUTPUT_SUBFOLDER
            ))

        else:
            # here I copy the whole folder ./out
            remote_copy_list.append((
                parent_folder_nscf.computer.uuid,
                os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf),
                self._OUTPUT_SUBFOLDER
            ))

        prefix = self._PREFIX

        for iqpt in range(1, nqpt+1):
            label = str(iqpt)
            tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-0')
            remote_copy_list.append((
                parent_folder_ph.computer.uuid,
                os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                'save/'+prefix+'.dyn_q0'))
            tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-'+label)
            remote_copy_list.append((
                parent_folder_ph.computer.uuid,
                os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                'save/'+prefix+'.dyn_q'+label))

            if iqpt == 1:
                tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.dvscf*')
                remote_copy_list.append((
                    parent_folder_ph.computer.uuid,
                    os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                    'save/'+prefix+'.dvscf_q'+label))
                tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.phsave')
                remote_copy_list.append((
                    parent_folder_ph.computer.uuid,
                    os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                    'save/'))
            else:
                tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.q_'+label+'/'+prefix+'.dvscf*')
                remote_copy_list.append((
                    parent_folder_ph.computer.uuid,
                    os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                    'save/'+prefix+'.dvscf_q'+label))

        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = (list(settings.pop('CMDLINE', [])) + ['-in', self.metadata.options.input_filename])
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list

        # Retrieve by default the output file
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(self.metadata.options.output_filename)
        calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', [])

        if settings:
            unknown_keys = ', '.join(list(settings.keys()))
            raise exceptions.InputValidationError('`settings` contained unexpected keys: {}'.format(unknown_keys))

        return calcinfo
示例#29
0
def traverse_graph(
    starting_pks: Iterable[int],
    max_iterations: Optional[int] = None,
    get_links: bool = False,
    links_forward: Iterable[LinkType] = (),
    links_backward: Iterable[LinkType] = (),
    missing_callback: Optional[Callable[[Iterable[int]], None]] = None
) -> TraverseGraphOutput:
    """
    This function will return the set of all nodes that can be connected
    to a list of initial nodes through any sequence of specified links.
    Optionally, it may also return the links that connect these nodes.

    :param starting_pks: Contains the (valid) pks of the starting nodes.

    :param max_iterations:
        The number of iterations to apply the set of rules (a value of 'None' will
        iterate until no new nodes are added).

    :param get_links: Pass True to also return the links between all nodes (found + initial).

    :param links_forward: List with all the links that should be traversed in the forward direction.
    :param links_backward: List with all the links that should be traversed in the backward direction.

    :param missing_callback: A callback to handle missing starting_pks or if None raise NotExistent
    """
    # pylint: disable=too-many-locals,too-many-statements,too-many-branches

    if max_iterations is None:
        max_iterations = cast(int, inf)
    elif not (isinstance(max_iterations, int) or max_iterations is inf):
        raise TypeError('Max_iterations has to be an integer or infinity')

    linktype_list = []
    for linktype in links_forward:
        if not isinstance(linktype, LinkType):
            raise TypeError(
                f'links_forward should contain links, but one of them is: {type(linktype)}'
            )
        linktype_list.append(linktype.value)
    filters_forwards = {'type': {'in': linktype_list}}

    linktype_list = []
    for linktype in links_backward:
        if not isinstance(linktype, LinkType):
            raise TypeError(
                f'links_backward should contain links, but one of them is: {type(linktype)}'
            )
        linktype_list.append(linktype.value)
    filters_backwards = {'type': {'in': linktype_list}}

    if not isinstance(starting_pks, Iterable):  # pylint: disable=isinstance-second-argument-not-valid-type
        raise TypeError(
            f'starting_pks must be an iterable\ninstead, it is {type(starting_pks)}'
        )

    if any([not isinstance(pk, int) for pk in starting_pks]):
        raise TypeError(
            f'one of the starting_pks is not of type int:\n {starting_pks}')
    operational_set = set(starting_pks)

    if not operational_set:
        if get_links:
            return {'nodes': set(), 'links': set()}
        return {'nodes': set(), 'links': None}

    query_nodes = orm.QueryBuilder()
    query_nodes.append(orm.Node,
                       project=['id'],
                       filters={'id': {
                           'in': operational_set
                       }})
    existing_pks = set(query_nodes.all(flat=True))
    missing_pks = operational_set.difference(existing_pks)
    if missing_pks and missing_callback is None:
        raise exceptions.NotExistent(
            f'The following pks are not in the database and must be pruned before this call: {missing_pks}'
        )
    elif missing_pks and missing_callback is not None:
        missing_callback(missing_pks)

    rules = []
    basket = Basket(nodes=existing_pks)

    # When max_iterations is finite, the order of traversal may affect the result
    # (its not the same to first go backwards and then forwards than vice-versa)
    # In order to make it order-independent, the result of the first operation needs
    # to be stashed and the second operation must be performed only on the nodes
    # that were already in the set at the begining of the iteration: this way, both
    # rules are applied on the same set of nodes and the order doesn't matter.
    # The way to do this is saving and seting the walkers at the right moments only
    # when both forwards and backwards rules are present.
    if links_forward and links_backward:
        stash = basket.get_template()
        rules += [RuleSaveWalkers(stash)]

    if links_forward:
        query_outgoing = orm.QueryBuilder()
        query_outgoing.append(orm.Node, tag='sources')
        query_outgoing.append(orm.Node,
                              edge_filters=filters_forwards,
                              with_incoming='sources')
        rule_outgoing = UpdateRule(query_outgoing,
                                   max_iterations=1,
                                   track_edges=get_links)
        rules += [rule_outgoing]

    if links_forward and links_backward:
        rules += [RuleSetWalkers(stash)]

    if links_backward:
        query_incoming = orm.QueryBuilder()
        query_incoming.append(orm.Node, tag='sources')
        query_incoming.append(orm.Node,
                              edge_filters=filters_backwards,
                              with_outgoing='sources')
        rule_incoming = UpdateRule(query_incoming,
                                   max_iterations=1,
                                   track_edges=get_links)
        rules += [rule_incoming]

    rulesequence = RuleSequence(rules, max_iterations=max_iterations)

    results = rulesequence.run(basket)

    output = {}
    output['nodes'] = results.nodes.keyset
    output['links'] = None
    if get_links:
        output['links'] = results['nodes_nodes'].keyset

    return cast(TraverseGraphOutput, output)
示例#30
0
def traverse_graph(starting_pks, max_iterations=None, get_links=False, links_forward=(), links_backward=()):
    """
    This function will return the set of all nodes that can be connected
    to a list of initial nodes through any sequence of specified links.
    Optionally, it may also return the links that connect these nodes.

    :type starting_pks: list or tuple or set
    :param starting_pks: Contains the (valid) pks of the starting nodes.

    :type max_iterations: int or None
    :param max_iterations:
        The number of iterations to apply the set of rules (a value of 'None' will
        iterate until no new nodes are added).

    :param bool get_links:
        Pass True to also return the links between all nodes (found + initial).

    :type links_forward: aiida.common.links.LinkType
    :param links_forward:
        List with all the links that should be traversed in the forward direction.

    :type links_backward: aiida.common.links.LinkType
    :param links_backward:
        List with all the links that should be traversed in the backward direction.
    """
    # pylint: disable=too-many-locals,too-many-statements,too-many-branches
    from aiida import orm
    from aiida.tools.graph.age_entities import Basket
    from aiida.tools.graph.age_rules import UpdateRule, RuleSequence, RuleSaveWalkers, RuleSetWalkers
    from aiida.common import exceptions

    if max_iterations is None:
        max_iterations = inf
    elif not (isinstance(max_iterations, int) or max_iterations is inf):
        raise TypeError('Max_iterations has to be an integer or infinity')

    linktype_list = []
    for linktype in links_forward:
        if not isinstance(linktype, LinkType):
            raise TypeError('links_forward should contain links, but one of them is: {}'.format(type(linktype)))
        linktype_list.append(linktype.value)
    filters_forwards = {'type': {'in': linktype_list}}

    linktype_list = []
    for linktype in links_backward:
        if not isinstance(linktype, LinkType):
            raise TypeError('links_backward should contain links, but one of them is: {}'.format(type(linktype)))
        linktype_list.append(linktype.value)
    filters_backwards = {'type': {'in': linktype_list}}

    if not isinstance(starting_pks, (list, set, tuple)):
        raise TypeError('starting_pks must be of type list, set or tuple\ninstead, it is {}'.format(type(starting_pks)))

    if not starting_pks:
        if get_links:
            output = {'nodes': set(), 'links': set()}
        else:
            output = {'nodes': set(), 'links': None}
        return output

    if any([not isinstance(pk, int) for pk in starting_pks]):
        raise TypeError('one of the starting_pks is not of type int:\n {}'.format(starting_pks))
    operational_set = set(starting_pks)

    query_nodes = orm.QueryBuilder()
    query_nodes.append(orm.Node, project=['id'], filters={'id': {'in': operational_set}})
    existing_pks = set(query_nodes.all(flat=True))
    missing_pks = operational_set.difference(existing_pks)
    if missing_pks:
        raise exceptions.NotExistent(
            'The following pks are not in the database and must be pruned before this   call: {}'.format(missing_pks)
        )

    rules = []
    basket = Basket(nodes=operational_set)

    # When max_iterations is finite, the order of traversal may affect the result
    # (its not the same to first go backwards and then forwards than vice-versa)
    # In order to make it order-independent, the result of the first operation needs
    # to be stashed and the second operation must be performed only on the nodes
    # that were already in the set at the begining of the iteration: this way, both
    # rules are applied on the same set of nodes and the order doesn't matter.
    # The way to do this is saving and seting the walkers at the right moments only
    # when both forwards and backwards rules are present.
    if links_forward and links_backward:
        stash = basket.get_template()
        rules += [RuleSaveWalkers(stash)]

    if links_forward:
        query_outgoing = orm.QueryBuilder()
        query_outgoing.append(orm.Node, tag='sources')
        query_outgoing.append(orm.Node, edge_filters=filters_forwards, with_incoming='sources')
        rule_outgoing = UpdateRule(query_outgoing, max_iterations=1, track_edges=get_links)
        rules += [rule_outgoing]

    if links_forward and links_backward:
        rules += [RuleSetWalkers(stash)]

    if links_backward:
        query_incoming = orm.QueryBuilder()
        query_incoming.append(orm.Node, tag='sources')
        query_incoming.append(orm.Node, edge_filters=filters_backwards, with_outgoing='sources')
        rule_incoming = UpdateRule(query_incoming, max_iterations=1, track_edges=get_links)
        rules += [rule_incoming]

    rulesequence = RuleSequence(rules, max_iterations=max_iterations)

    results = rulesequence.run(basket)

    output = {}
    output['nodes'] = results.nodes.keyset
    output['links'] = None
    if get_links:
        output['links'] = results['nodes_nodes'].keyset

    return output