Ejemplo n.º 1
0
 def query_by_attrs(cls, query=None, **kwargs):
     family_name = kwargs.pop('family_name', None)
     if family_name:
         group_filters = {'label': {'==': family_name}, 'type_string': {'==': cls.potcar_family_type_string}}
         query = QueryBuilder()
         query.append(Group, tag='family', filters=group_filters)
         query.append(cls, tag=cls._query_label, with_group='family')
     return super(PotcarData, cls).query_by_attrs(query=query, **kwargs)
Ejemplo n.º 2
0
def get_all_structure_from_structuregroup(structure_group):

    if structure_group:
        structure_group_name = structure_group.label
    else:
        return []

    sqb = QueryBuilder()
    sqb.append(Group, filters={'label': structure_group_name}, tag='g')
    sqb.append(StructureData, member_of='g')

    return [x[0] for x in sqb.all()]
Ejemplo n.º 3
0
    def get_all_parents(self, node_pks, return_values=('id', )):
        """
        Get all the parents of given nodes
        :param node_pks: one node pk or an iterable of node pks
        :return: a list of aiida objects with all the parents of the nodes
        """
        from aiida.orm import Node, QueryBuilder

        qb = QueryBuilder()
        qb.append(Node, tag='low_node', filters={'id': {'in': node_pks}})
        qb.append(Node, with_descendants='low_node', project=return_values)
        return qb.all()
Ejemplo n.º 4
0
def process_pause(processes, all_entries, timeout, wait):
    """Pause running processes."""
    from aiida.orm import ProcessNode, QueryBuilder

    controller = get_manager().get_process_controller()

    if processes and all_entries:
        raise click.BadOptionUsage(
            'all',
            'cannot specify individual processes and the `--all` flag at the same time.'
        )

    if not processes and all_entries:
        active_states = options.active_process_states()
        builder = QueryBuilder().append(
            ProcessNode,
            filters={'attributes.process_state': {
                'in': active_states
            }})
        processes = [entry[0] for entry in builder.all()]

    futures = {}
    for process in processes:

        if process.is_terminated:
            echo.echo_error('Process<{}> is already terminated'.format(
                process.pk))
            continue

        try:
            future = controller.pause_process(
                process.pk, msg='Paused through `verdi process pause`')
        except communications.UnroutableError:
            echo.echo_error('Process<{}> is unreachable'.format(process.pk))
        else:
            futures[future] = process

    process_actions(futures, 'pause', 'pausing', 'paused', wait, timeout)
Ejemplo n.º 5
0
 def _get_unique_node_property(self, project: str) -> Union[Node, Any]:
     query = QueryBuilder(limit=1)
     query.append(self.AIIDA_ENTITY,
                  filters={"id": self._pk},
                  project=project)
     if query.count() != 1:
         raise AiidaEntityNotFound(
             f"Could not find {self.AIIDA_ENTITY} with PK {self._pk}.")
     return query.first()[0]
Ejemplo n.º 6
0
def group_remove_nodes(group, nodes, clear, force):
    """Remove nodes from a group."""
    from aiida.orm import QueryBuilder, Group, Node

    label = group.label
    klass = group.__class__.__name__

    if nodes and clear:
        echo.echo_critical(
            'Specify either the `--clear` flag to remove all nodes or the identifiers of the nodes you want to remove.'
        )

    if not force:

        if nodes:
            node_pks = [node.pk for node in nodes]

            query = QueryBuilder()
            query.append(Group, filters={'id': group.pk}, tag='group')
            query.append(Node,
                         with_group='group',
                         filters={'id': {
                             'in': node_pks
                         }},
                         project='id')

            group_node_pks = query.all(flat=True)

            if not group_node_pks:
                echo.echo_critical(
                    f'None of the specified nodes are in {klass}<{label}>.')

            if len(node_pks) > len(group_node_pks):
                node_pks = set(node_pks).difference(set(group_node_pks))
                echo.echo_warning(
                    f'{len(node_pks)} nodes with PK {node_pks} are not in {klass}<{label}>.'
                )

            message = f'Are you sure you want to remove {len(group_node_pks)} nodes from {klass}<{label}>?'

        elif clear:
            message = f'Are you sure you want to remove ALL the nodes from {klass}<{label}>?'
        else:
            echo.echo_critical(
                f'No nodes were provided for removal from {klass}<{label}>.')

        click.confirm(message, abort=True)

    if clear:
        group.clear()
    else:
        group.remove_nodes(nodes)
Ejemplo n.º 7
0
    def get(self):
        from aiida.orm import QueryBuilder, Dict

        qb = QueryBuilder()
        qb.append(Dict, project=['id', 'ctime', 'attributes'], tag='pdata')
        qb.order_by({'pdata': {'ctime': 'desc'}})
        result = qb.first()

        # Results are returned as a dictionary, datetime objects is
        # serialized as ISO 8601
        return dict(id=result[0],
                    ctime=result[1].isoformat(),
                    attributes=result[2])
Ejemplo n.º 8
0
def get_data_aiida(projections, sliders_dict, quantities, plot_info):
    """Query the AiiDA database"""
    from figure.aiida import load_profile
    from aiida.orm import QueryBuilder, Dict

    load_profile()

    filters = {}

    def add_range_filter(bounds, label):
        # a bit of cheating until this is resolved
        # https://github.com/aiidateam/aiida_core/issues/1389
        #filters['attributes.'+label] = {'>=':bounds[0]}
        filters['attributes.' + label] = {
            'and': [{
                '>=': bounds[0]
            }, {
                '<': bounds[1]
            }]
        }

    for k, v in sliders_dict.items():
        # Note: filtering is costly, avoid if possible
        if not v.value == quantities[k]['range']:
            add_range_filter(v.value, k)

    qb = QueryBuilder()
    qb.append(
        Dict,
        filters=filters,
        project=['attributes.' + p
                 for p in projections] + ['uuid', 'extras.cif_uuid'],
    )

    nresults = qb.count()
    if nresults == 0:
        plot_info.text = 'No matching COFs found.'
        return data_empty

    plot_info.text = '{} COFs found. Plotting...'.format(nresults)

    # x,y position
    x, y, clrs, uuids, names, cif_uuids = list(zip(*qb.all()))
    plot_info.text = '{} COFs queried'.format(nresults)
    x = list(map(float, x))
    y = list(map(float, y))
    cif_uuids = list(map(str, cif_uuids))
    uuids = list(map(str, uuids))

    if projections[2] == 'bond_type':
        #clrs = map(lambda clr: bondtypes.index(clr), clrs)
        clrs = list(map(str, clrs))
    else:
        clrs = list(map(float, clrs))

    return dict(x=x, y=y, uuid=cif_uuids, color=clrs, name=names)
Ejemplo n.º 9
0
def retrieve_basis_sets(files, stop_if_existing):
    """Retrieve existing basis sets or create if them, if they do not exist.

    :param files: list of basis set file paths
    :param stop_if_existing: if True, check for the md5 of the files and,
        if the file already exists in the DB, raises a MultipleObjectsError.
        If False, simply adds the existing BasisSetData node to the group.
    :return:
    """
    basis_and_created = []
    for basis_file in files:
        _, content = parse_basis(basis_file)
        md5sum = md5_from_string(content)
        qb = QueryBuilder()
        qb.append(BasisSetData, filters={"attributes.md5": {"==": md5sum}})
        existing_basis = qb.first()

        if existing_basis is None:
            # return the basis set data instances, not stored
            basisset, created = BasisSetData.get_or_create(
                basis_file, use_first=True, store_basis=False
            )
            # to check whether only one basis set per element exists
            # NOTE: actually, created has the meaning of "to_be_created"
            basis_and_created.append((basisset, created))
        else:
            if stop_if_existing:
                raise ValueError(
                    "A Basis Set with identical MD5 to "
                    " {} cannot be added with stop_if_existing"
                    "".format(basis_file)
                )
            existing_basis = existing_basis[0]
            basis_and_created.append((existing_basis, False))

    return basis_and_created
Ejemplo n.º 10
0
    def _get_codes(self):
        """Query the list of available codes."""

        user = User.objects.get_default()

        return {
            self._full_code_label(c[0]): c[0]
            for c in QueryBuilder().append(
                Code, filters={
                    "attributes.input_plugin": self.input_plugin
                }).all() if c[0].computer.is_user_configured(user) and (
                    self.allow_hidden_codes or not c[0].hidden) and (
                        self.allow_disabled_computers
                        or c[0].computer.is_user_enabled(user))
        }
Ejemplo n.º 11
0
def default_sssp_family(ctx, param, identifier):  # pylint: disable=unused-argument
    """Determine the default if no value is specified."""
    from aiida.common import exceptions
    from aiida.orm import QueryBuilder
    from aiida_sssp.groups import SsspFamily

    if identifier is not None:
        return identifier

    try:
        return QueryBuilder().append(SsspFamily).first()[0]
    except exceptions.NotExistent:
        raise click.BadParameter(
            'failed to automatically detect an SSSP family: install it with `aiida-sssp install`.'
        )
Ejemplo n.º 12
0
    def _observe_node(self, _=None):
        if self.node is None or self.eln is None:
            return

        if "eln" in self.node.extras:
            info = self.node.extras["eln"]
        else:
            try:
                q = QueryBuilder().append(
                    Node,
                    filters={"extras": {
                        "has_key": "eln"
                    }},
                    tag="source_node",
                    project="extras.eln",
                )
                q.append(Node,
                         filters={"uuid": self.node.uuid},
                         with_ancestors="source_node")
                info = q.all(flat=True)[0]
            except IndexError:
                info = {}

        self.eln.set_sample_config(**info)
Ejemplo n.º 13
0
 def _set_shear_aiida_relaxes(self):
     """
     Set list of AiidaRelaxWorkChain objects.
     """
     rlx_wf = WorkflowFactory('vasp.relax')
     qb = QueryBuilder()
     qb.append(Node, filters={'id': {'==': self._pk}}, tag='wf')
     qb.append(rlx_wf, with_incoming='wf', project=['id', 'label'])
     qb_all = qb.all()
     qb_all.sort(key=lambda qb_all: qb_all[1])
     rlx_pks = [q[0] for q in qb_all if not load_node(q[0]).is_failed]
     self._shear_aiida_relaxes = [
         AiidaRelaxWorkChain(load_node(pk)) for pk in rlx_pks
     ]
Ejemplo n.º 14
0
    def get(cls, **kwargs):
        """
        Custom get for group which can be used to get a group with the given attributes

        :param kwargs: the attributes to match the group to

        :return: the group
        :type nodes: :class:`aiida.orm.Node` or list
        """
        from aiida.orm import QueryBuilder

        if 'type_string' in kwargs:
            message = '`type_string` is deprecated because it is determined automatically'
            warnings.warn(message)  # pylint: disable=no-member
            type_check(kwargs['type_string'], str)

        return QueryBuilder().append(cls, filters=kwargs).one()[0]
def get_allstructurenodes_fromgroup(structure_group):
    from aiida.orm import Group
    from aiida.orm import StructureData
    from aiida.orm import QueryBuilder

    if structure_group:
        structure_group_label = structure_group.label
    else:
        return []

    sqb = QueryBuilder()
    sqb.append(Group, filters={'label': structure_group_label}, tag='g')
    sqb.append(StructureData, with_group='g')

    return [x[0] for x in sqb.all()]
Ejemplo n.º 16
0
    def convert(self, value, param, ctx):
        is_path = False
        # Alternative one could check if int or uuid
        # aiida allows also for shorten uuids
        from aiida.orm import StructureData, QueryBuilder

        try:
            structure = types.DataParamType(
                sub_classes=('aiida.data:structure', )).convert(
                    value, param, ctx)
        except (NotExistent, click.exceptions.BadParameter) as er:
            echo.echo(f'Tried to load node, could not fine one for {value}. '
                      'I will further check if it is a filepath.')
            is_path = True

        if is_path:
            # If it is a path to a file try to convert the structure
            pathtype = click.Path(exists=True,
                                  dir_okay=False,
                                  resolve_path=True)
            filename = pathtype.convert(value, param, ctx)
            try:
                import ase.io
            except ImportError:
                echo.echo_critical(
                    'You have not installed the package ase. \nYou can install it with: pip install ase'
                )

            try:
                asecell = ase.io.read(filename)
                structure = StructureData(ase=asecell)
            except ValueError as err:
                echo.echo_critical(str(err))
            # do not store structure, since this option is for calculation and workflow
            # input, which will store the structure anyway.

        # do not store again if structure is already there.
        duplicate = QueryBuilder().append(StructureData,
                                          filters={
                                              'extras._aiida_hash':
                                              structure._get_hash()
                                          }).first()  # pylint: disable=protected-access

        if duplicate:
            return duplicate[0]
        return structure
Ejemplo n.º 17
0
def get_sssp_families_builder(version=None, functional=None, protocol=None):
    """Return a query builder that will query for SSSP families of the given configuration.

    :param version: optional version filter
    :param functional: optional functional filter
    :param protocol: optional protocol filter
    :return: `QueryBuilder` instance
    """
    from aiida.orm import QueryBuilder
    from aiida_sssp.groups import SsspFamily

    label = 'SSSP/{version}/{functional}/{protocol}'
    filters = {
        'label': {
            'like': label.format(version=version or '%', functional=functional or '%', protocol=protocol or '%')
        }
    }
    builder = QueryBuilder().append(SsspFamily, filters=filters)

    return builder
Ejemplo n.º 18
0
def generate_wf_option_node(computer=None, check_existence=True, **kwargs):
    """Create a option node for a certain workflow or calculation entry point.

    :param wf_entry_point: string the entry point to create the node for, default='fleur.scf'
    :param computer: dict {computername, queue} to provide computer dependend defaults
    :param kwargs: dict, further key word argument by which the node content will be updated

    :returns: AiiDA Dict node
    """
    option_node_dict = generate_wf_option_dict(computer=computer, **kwargs)
    option_node = orm.Dict(dict=option_node_dict)
    if check_existence:
        duplicate = QueryBuilder().append(orm.Dict,
                                          filters={
                                              'extras._aiida_hash':
                                              option_node._get_hash()
                                          }).first()  # pylint: disable=protected-access
    if duplicate:
        option_node = duplicate[0]

    return option_node
Ejemplo n.º 19
0
def cmd_install_openmx(version, protocol, hardness, traceback):
    """Install an OpenMX configuration.

    The OpenMX configuration will be automatically downloaded from t-ozaki.issp.u-tokyo.ac.jp to create a new
    `OpenmxBasisSet`.
    """
    # pylint: disable=too-many-locals
    configuration = OpenmxConfiguration(version, protocol, hardness)

    if configuration not in OpenmxBasisSet.valid_configurations:
        echo.echo_critical(f'{version} {hardness} {protocol} is not a valid OpenMX basis set configuration')

    label = OpenmxBasisSet.format_configuration_label(configuration)
    description = f'OpenMX 20{version} {hardness} {protocol} installed with aiida-basis v{__version__}'
    metadata = OpenmxBasisSet.get_configuration_metadata(configuration)

    if QueryBuilder().append(OpenmxBasisSet, filters={'label': label}).first():
        echo.echo_critical(f'{OpenmxBasisSet.__name__}<{label}> is already installed')

    with tempfile.TemporaryDirectory() as dirpath:
        dirpath = pathlib.Path(dirpath)
        download_openmx(configuration, dirpath)

        with attempt('parsing PAOs... ', include_traceback=traceback):
            basis_set = create_basis_set_from_directory(OpenmxBasisSet, label, dirpath)

        orbital_configurations = {}

        for element, values in metadata.items():
            if basis_set.get_basis(element).md5 != values['md5']:
                Group.objects.delete(basis_set.pk)
                msg = f'md5 of PAO for element {element} does not match that of the metadata {values["md5"]}'
                echo.echo_critical(msg)

            orbital_configurations[element] = metadata[element]['orbital_configuration']

        basis_set.description = description
        basis_set.set_orbital_configurations(orbital_configurations)

        echo.echo_success(f'installed `{label}` containing `{basis_set.count()}` orbital bases')
Ejemplo n.º 20
0
    def get_members(
            self,
            elements: Optional[Sequence[str]] = None,
            structure: Optional[StructureData] = None) -> Dict[str, List[_T]]:
        """
        Return a dict of kind names/elements to a list of respective data nodes
        for the given list of elements or structure.

        :param elements: list of element symbols.
        :param structure: the ``StructureData`` node.
        """

        assert (elements is None) ^ (
            structure is None
        ), "Exactly one of the parameters elements and structure must be specified"
        assert isinstance(elements, Sequence) or isinstance(
            structure, StructureData)

        if structure:
            elements = list(structure.get_symbols_set())

        query = (QueryBuilder().append(
            self.__class__, filters={
                "id": self.pk
            }, tag="group").append(
                self.member_type,
                with_group="group",
                filters={"attributes.element": {
                    "in": elements
                }}))

        pseudos: Dict[str, List[_T]] = {}

        for (pseudo, ) in query.iterall():
            pseudos.setdefault(pseudo.element, []).append(pseudo)

        return pseudos
Ejemplo n.º 21
0
    def get_potcars_dict(cls, elements, family_name, mapping=None):
        """
        Get a dictionary {element: ``PotcarData.full_name``} for all given symbols.

        :param elements: The list of symbols to find POTCARs for
        :param family_name: The POTCAR family to be used
        :param mapping: A mapping[element] -> ``full_name``, for example: mapping={'In': 'In', 'As': 'As_d'}

        Exceptions:

         *If the mapping does not contain an item for a given element name, raise a ``ValueError``.
         *If no POTCAR is found for a given element, a ``NotExistent`` error is raised.

        If there are multiple POTCAR with the same ``full_name``, the first one
        returned by ``PotcarData.find()`` will be used.
        """
        if not mapping:
            mapping = {element: element for element in elements}
        group_filters = {'label': {'==': family_name}, 'type_string': {'==': cls.potcar_family_type_string}}
        element_filters = {'attributes.full_name': {'in': [mapping[element] for element in elements]}}
        query = QueryBuilder()
        query.append(Group, tag='family', filters=group_filters)
        query.append(cls, tag='potcar', with_group='family', filters=element_filters)

        result_potcars = {}
        for element in elements:
            if element not in mapping:
                raise ValueError('Potcar mapping must contain an item for each element in the structure, '
                                 'with the full name of the POTCAR file (i.e. "In_d", "As_h").')
            full_name = mapping[element]
            potcars_of_kind = [potcar[0] for potcar in query.all() if potcar[0].full_name == full_name]
            if not potcars_of_kind:
                raise NotExistent('No POTCAR found for full name {} in family {}'.format(full_name, family_name))
            if len(potcars_of_kind) > 1:
                result_potcars[element] = cls.find(family=family_name, full_name=full_name)[0]
            else:
                result_potcars[element] = potcars_of_kind[0]

        return result_potcars
Ejemplo n.º 22
0
def generate_wf_para_node(wf_entry_point='fleur.scf',
                          check_existence=True,
                          **kwargs):
    """Create a wf parameter node for a certain workflow or calculation entry point.

    :param wf_entry_point: string the entry point to create the node for, default='fleur.scf'
    :param kwargs: dict, further key word argument by which the node content will be updated

    :returns: AiiDA Dict node
    """
    wf_para_node_dict = generate_wf_para_dict(wf_entry_point=wf_entry_point,
                                              check_existence=check_existence,
                                              **kwargs)
    wf_para_node = orm.Dict(dict=wf_para_node_dict)
    if check_existence:
        duplicate = QueryBuilder().append(orm.Dict,
                                          filters={
                                              'extras._aiida_hash':
                                              wf_para_node._get_hash()
                                          }).first()  # pylint: disable=protected-access
    if duplicate:
        wf_para_node = duplicate[0]

    return wf_para_node
Ejemplo n.º 23
0
    def _set_structure_pks(self):
        """
        Set structure pks.
        """
        qb = QueryBuilder()
        qb.append(Node, filters={'id': {'==': self._pk}}, tag='wf')
        qb.append(
            Node,
            filters={'label': {
                '==': 'get_twinboundary_shear_structure'
            }},
            project=['id'],
            with_incoming='wf')
        cf_pks = [q[0] for q in qb.all()]
        cf_pks.sort()
        shear_ratios = self._shear_strain_ratios
        orders = list(np.argsort(shear_ratios))
        orig_pks = []
        input_pks = []
        # for ix in orders:
        for ix in range(len(self._shear_aiida_relaxes)):
            cf = load_node(cf_pks[ix])
            orig_pks.append(cf.outputs.twinboundary_shear_structure_orig.pk)
            input_pks.append(cf.outputs.twinboundary_shear_structure.pk)

        rlx_pks = []
        for aiida_rlx, i_struct_pk in zip(self._shear_aiida_relaxes,
                                          input_pks):
            pks = aiida_rlx.get_pks()
            assert pks['initial_structure_pk'] == i_struct_pk, \
                    "Input structure does not match."
            rlx_pks.append(pks['final_structure_pk'])

        self._structure_pks = {
            'original_structures': orig_pks,
            'input_structures': input_pks,
            'relax_structures': rlx_pks,
        }
Ejemplo n.º 24
0
    def do_group_belong(self, args):
        """Command for list groups"""
        from aiida.orm import QueryBuilder, Node, Group
        from tabulate import tabulate

        q = QueryBuilder()
        q.append(Node, filters={'id': self._current_node.pk})
        q.append(Group, with_node=Node, project=['*'])

        projection_lambdas = {
            'pk': lambda group: str(group.pk),
            'label': lambda group: group.label,
            'type_string': lambda group: group.type_string,
            'count': lambda group: group.count(),
            'user': lambda group: group.user.email.strip(),
            'description': lambda group: group.description
        }

        table = []
        projection_header = ['PK', 'Label', 'Type string', 'User']
        projection_fields = ['pk', 'label', 'type_string', 'user']

        if args.with_count:
            projection_header.append('Node Count')
            projection_fields.append('count')

        if args.with_description:
            projection_header.append('Description')
            projection_fields.append('description')

        for (group, ) in q.all():
            table.append([
                projection_lambdas[field](group) for field in projection_fields
            ])

        self.poutput(tabulate(table, headers=projection_header))
Ejemplo n.º 25
0
    def test_get_or_create_invalid_prefix(self):
        """Test the ``get_or_create_group`` method of ``Autogroup`` when there is already a group
        with the same prefix, but followed by other non-underscore characters."""
        label_prefix = 'new_test_prefix_TestAutogroup'
        # I create a group with the same prefix, but followed by non-underscore
        # characters. These should be ignored in the logic.
        AutoGroup(label='{}xx'.format(label_prefix)).store()

        # Check that there are no groups to begin with
        queryb = QueryBuilder().append(AutoGroup,
                                       filters={'label': label_prefix})
        assert not list(queryb.all())
        queryb = QueryBuilder().append(
            AutoGroup,
            filters={'label': {
                'like': r'{}\_%'.format(label_prefix)
            }})
        assert not list(queryb.all())

        # First group (no existing one)
        autogroup = Autogroup()
        autogroup.set_group_label_prefix(label_prefix)
        group = autogroup.get_or_create_group()
        expected_label = label_prefix
        self.assertEqual(
            group.label, expected_label,
            "The auto-group should be labelled '{}', it is instead '{}'".
            format(expected_label, group.label))

        # Second group (only one with no suffix existing)
        autogroup = Autogroup()
        autogroup.set_group_label_prefix(label_prefix)
        group = autogroup.get_or_create_group()
        expected_label = label_prefix + '_1'
        self.assertEqual(
            group.label, expected_label,
            "The auto-group should be labelled '{}', it is instead '{}'".
            format(expected_label, group.label))
Ejemplo n.º 26
0
def _add_nodes_to_groups(*, group_count: int,
                         group_uuids: Iterable[Tuple[str, Set[str]]],
                         foreign_ids_reverse_mappings: Dict[str, Dict[str,
                                                                      int]]):
    """Add nodes to imported groups."""
    if not group_count:
        return

    pbar_base_str = 'Groups - '

    with get_progress_reporter()(total=group_count,
                                 desc=pbar_base_str) as progress:
        for groupuuid, groupnodes in group_uuids:
            if not groupnodes:
                progress.update()
                continue
            # TODO: cache these to avoid too many queries
            qb_group = QueryBuilder().append(
                Group, filters={'uuid': {
                    '==': groupuuid
                }})
            group_ = qb_group.first()[0]

            progress.set_description_str(
                f'{pbar_base_str}label={group_.label}', refresh=False)
            progress.update()

            nodes_to_store = [
                foreign_ids_reverse_mappings[NODE_ENTITY_NAME][node_uuid]
                for node_uuid in groupnodes
            ]
            qb_nodes = QueryBuilder().append(
                Node, filters={'id': {
                    'in': nodes_to_store
                }})
            # Adding nodes to group avoiding the SQLA ORM to increase speed
            nodes_to_add = [n[0].backend_entity for n in qb_nodes.all()]
            group_.backend_entity.add_nodes(nodes_to_add, skip_orm=True)
Ejemplo n.º 27
0
    def get_or_create_group(self):
        """Return the current `AutoGroup`, or create one if None has been set yet.

        This function implements a somewhat complex logic that is however needed
        to make sure that, even if `verdi run` is called at the same time multiple
        times, e.g. in a for loop in bash, there is never the risk that two ``verdi run``
        Unix processes try to create the same group, with the same label, ending
        up in a crash of the code (see PR #3650).

        Here, instead, we make sure that if this concurrency issue happens,
        one of the two will get a IntegrityError from the DB, and then recover
        trying to create a group with a different label (with a numeric suffix appended),
        until it manages to create it.
        """
        from aiida.orm import QueryBuilder

        # When this function is called, if it is the first time, just generate
        # a new group name (later on, after this ``if`` block`).
        # In that case, we will later cache in ``self._group_label`` the group label,
        # So the group with the same name can be returned quickly in future
        # calls of this method.
        if self._group_label is not None:
            builder = QueryBuilder().append(
                AutoGroup, filters={'label': self._group_label})
            results = [res[0] for res in builder.iterall()]
            if results:
                # If it is not empty, it should have only one result due to the uniqueness constraints
                assert len(
                    results
                ) == 1, 'I got more than one autogroup with the same label!'
                return results[0]
            # There are no results: probably the group has been deleted.
            # I continue as if it was not cached
            self._group_label = None

        label_prefix = self.get_group_label_prefix()
        # Try to do a preliminary QB query to avoid to do too many try/except
        # if many of the prefix_NUMBER groups already exist
        queryb = QueryBuilder().append(
            AutoGroup,
            filters={
                'or': [{
                    'label': {
                        '==': label_prefix
                    }
                }, {
                    'label': {
                        'like': escape_for_sql_like(label_prefix + '_') + '%'
                    }
                }]
            },
            project='label')
        existing_group_labels = [
            res[0][len(label_prefix):] for res in queryb.all()
        ]
        existing_group_ints = []
        for label in existing_group_labels:
            if label == '':
                # This is just the prefix without name - corresponds to counter = 0
                existing_group_ints.append(0)
            elif label.startswith('_'):
                try:
                    existing_group_ints.append(int(label[1:]))
                except ValueError:
                    # It's not an integer, so it will never collide - just ignore it
                    pass

        if not existing_group_ints:
            counter = 0
        else:
            counter = max(existing_group_ints) + 1

        while True:
            try:
                label = label_prefix if counter == 0 else '{}_{}'.format(
                    label_prefix, counter)
                group = AutoGroup(label=label).store()
                self._group_label = group.label
            except exceptions.IntegrityError:
                counter += 1
            else:
                break

        return group
Ejemplo n.º 28
0
def _retrieve_linked_nodes_query(current_node, input_type, output_type,
                                 direction, link_type_value):
    """Helper function for :py:func:`~aiida.tools.importexport.dbexport.utils.retrieve_linked_nodes`

    A general :py:class:`~aiida.orm.querybuilder.QueryBuilder` query, retrieving linked Nodes and returning link
    information and the found Nodes.

    :param current_node: The current Node's PK.
    :type current_node: int

    :param input_type: Source Node class for Link
    :type input_type: :py:class:`~aiida.orm.nodes.data.data.Data`,
        :py:class:`~aiida.orm.nodes.process.process.ProcessNode`.

    :param output_type: Target Node class for Link
    :type output_type: :py:class:`~aiida.orm.nodes.data.data.Data`,
        :py:class:`~aiida.orm.nodes.process.process.ProcessNode`.

    :param direction: Link direction, must be either ``'forward'`` or ``'backward'``.
    :type direction: str

    :param link_type_value: A :py:class:`~aiida.common.links.LinkType` value, e.g. ``LinkType.RETURN.value``.
    :type link_type_value: str

    :return: Dictionary of link information to be used for the export archive and set of found Nodes.
    """
    found_nodes = set()
    links_uuid_dict = {}
    filters_input = {}
    filters_output = {}

    if direction == 'forward':
        filters_input['id'] = current_node
    elif direction == 'backward':
        filters_output['id'] = current_node
    else:
        raise exceptions.ExportValidationError(
            'direction must be either "forward" or "backward"')

    builder = QueryBuilder()
    builder.append(input_type,
                   project=['uuid', 'id'],
                   tag='input',
                   filters=filters_input)
    builder.append(output_type,
                   project=['uuid', 'id'],
                   with_incoming='input',
                   filters=filters_output,
                   edge_filters={'type': link_type_value},
                   edge_project=['label', 'type'])

    for input_uuid, input_pk, output_uuid, output_pk, link_label, link_type in builder.iterall(
    ):
        links_uuid_entry = {
            'input': str(input_uuid),
            'output': str(output_uuid),
            'label': str(link_label),
            'type': str(link_type)
        }
        links_uuid_dict[frozenset(links_uuid_entry.items())] = links_uuid_entry

        node_pk = output_pk if direction == 'forward' else input_pk
        found_nodes.add(node_pk)

    return links_uuid_dict, found_nodes
Ejemplo n.º 29
0
def delete_nodes(pks, verbosity=0, dry_run=False, force=False, **kwargs):
    """Delete nodes by a list of pks.

    This command will delete not only the specified nodes, but also the ones that are
    linked to these and should be also deleted in order to keep a consistent provenance
    according to the rules explained in the concepts section of the documentation.
    In summary:

    1. If a DATA node is deleted, any process nodes linked to it will also be deleted.

    2. If a CALC node is deleted, any incoming WORK node (callers) will be deleted as
    well whereas any incoming DATA node (inputs) will be kept. Outgoing DATA nodes
    (outputs) will be deleted by default but this can be disabled.

    3. If a WORK node is deleted, any incoming WORK node (callers) will be deleted as
    well, but all DATA nodes will be kept. Outgoing WORK or CALC nodes will be kept by
    default, but deletion of either of both kind of connected nodes can be enabled.

    These rules are 'recursive', so if a CALC node is deleted, then its output DATA
    nodes will be deleted as well, and then any CALC node that may have those as
    inputs, and so on.

    :param pks: a list of the PKs of the nodes to delete
    :param bool force: do not ask for confirmation to delete nodes.
    :param int verbosity: 0 prints nothing,
                          1 prints just sums and total,
                          2 prints individual nodes.

    :param kwargs: graph traversal rules. See :const:`aiida.common.links.GraphTraversalRules` what rule names
        are toggleable and what the defaults are.
    :param bool dry_run:
        Just perform a dry run and do not delete anything. Print statistics according
        to the verbosity level set.
    :param bool force:
        Do not ask for confirmation to delete nodes.
    """
    # pylint: disable=too-many-arguments,too-many-branches,too-many-locals,too-many-statements
    from aiida.backends.utils import delete_nodes_and_connections
    from aiida.common import exceptions
    from aiida.orm import Node, QueryBuilder, load_node
    from aiida.tools.graph.graph_traversers import get_nodes_delete

    starting_pks = []
    for pk in pks:
        try:
            load_node(pk)
        except exceptions.NotExistent:
            echo.echo_warning(
                f'warning: node with pk<{pk}> does not exist, skipping')
        else:
            starting_pks.append(pk)

    # An empty set might be problematic for the queries done below.
    if not starting_pks:
        if verbosity:
            echo.echo('Nothing to delete')
        return

    pks_set_to_delete = get_nodes_delete(starting_pks, **kwargs)['nodes']

    if verbosity > 0:
        echo.echo('I {} delete {} node{}'.format(
            'would' if dry_run else 'will', len(pks_set_to_delete),
            's' if len(pks_set_to_delete) > 1 else ''))
        if verbosity > 1:
            builder = QueryBuilder().append(
                Node,
                filters={'id': {
                    'in': pks_set_to_delete
                }},
                project=('uuid', 'id', 'node_type', 'label'))
            echo.echo(f"The nodes I {'would' if dry_run else 'will'} delete:")
            for uuid, pk, type_string, label in builder.iterall():
                try:
                    short_type_string = type_string.split('.')[-2]
                except IndexError:
                    short_type_string = type_string
                echo.echo(f'   {uuid} {pk} {short_type_string} {label}')

    if dry_run:
        if verbosity > 0:
            echo.echo(
                '\nThis was a dry run, exiting without deleting anything')
        return

    # Asking for user confirmation here
    if force:
        pass
    else:
        echo.echo_warning(
            f'YOU ARE ABOUT TO DELETE {len(pks_set_to_delete)} NODES! THIS CANNOT BE UNDONE!'
        )
        if not click.confirm('Shall I continue?'):
            echo.echo('Exiting without deleting')
            return

    # Recover the list of folders to delete before actually deleting the nodes. I will delete the folders only later,
    # so that if there is a problem during the deletion of the nodes in the DB, I don't delete the folders
    repositories = [load_node(pk)._repository for pk in pks_set_to_delete]  # pylint: disable=protected-access

    if verbosity > 0:
        echo.echo('Starting node deletion...')
    delete_nodes_and_connections(pks_set_to_delete)

    if verbosity > 0:
        echo.echo(
            'Nodes deleted from database, deleting files from the repository now...'
        )

    # If we are here, we managed to delete the entries from the DB.
    # I can now delete the folders
    for repository in repositories:
        repository.erase(force=True)

    if verbosity > 0:
        echo.echo('Deletion completed.')
Ejemplo n.º 30
0
    def search(self, _=None):
        """Launch the search of structures in AiiDA database."""
        self.preprocess()

        qbuild = QueryBuilder()

        # If the date range is valid, use it for the search
        try:
            start_date = datetime.datetime.strptime(
                self.start_date_widget.value, '%Y-%m-%d')
            end_date = datetime.datetime.strptime(
                self.end_date_widget.value,
                '%Y-%m-%d') + datetime.timedelta(hours=24)

        # Otherwise revert to the standard (i.e. last 7 days)
        except ValueError:
            start_date = datetime.datetime.now() - datetime.timedelta(days=7)
            end_date = datetime.datetime.now() + datetime.timedelta(hours=24)

            self.start_date_widget.value = start_date.strftime('%Y-%m-%d')
            self.end_date_widget.value = end_date.strftime('%Y-%m-%d')

        filters = {}
        filters['ctime'] = {'and': [{'>': start_date}, {'<=': end_date}]}

        if self.mode.value == "uploaded":
            qbuild2 = QueryBuilder().append(self.query_structure_type,
                                            project=["id"],
                                            tag='structures').append(
                                                Node,
                                                with_outgoing='structures')
            processed_nodes = [n[0] for n in qbuild2.all()]
            if processed_nodes:
                filters['id'] = {"!in": processed_nodes}
            qbuild.append(self.query_structure_type, filters=filters)

        elif self.mode.value == "calculated":
            if self.drop_label.value == 'All':
                qbuild.append((CalcJobNode, WorkChainNode),
                              tag='calcjobworkchain')
            else:
                qbuild.append((CalcJobNode, WorkChainNode),
                              filters={'label': self.drop_label.value},
                              tag='calcjobworkchain')
            qbuild.append(self.query_structure_type,
                          with_incoming='calcjobworkchain',
                          filters=filters)

        elif self.mode.value == "edited":
            qbuild.append(CalcFunctionNode)
            qbuild.append(self.query_structure_type,
                          with_incoming=CalcFunctionNode,
                          filters=filters)

        elif self.mode.value == "all":
            qbuild.append(self.query_structure_type, filters=filters)

        qbuild.order_by({self.query_structure_type: {'ctime': 'desc'}})
        matches = {n[0] for n in qbuild.iterall()}
        matches = sorted(matches, reverse=True, key=lambda n: n.ctime)

        options = OrderedDict()
        options["Select a Structure ({} found)".format(len(matches))] = False

        for mch in matches:
            label = "PK: {}".format(mch.id)
            label += " | " + mch.ctime.strftime("%Y-%m-%d %H:%M")
            label += " | " + mch.get_extra("formula")
            label += " | " + mch.node_type.split('.')[-2]
            label += " | " + mch.label
            label += " | " + mch.description
            options[label] = mch

        self.results.options = options