def delete_group_nodes( pks: Iterable[int], dry_run: Union[bool, Callable[[Set[int]], bool]] = True, **traversal_rules: bool ) -> Tuple[Set[int], bool]: """Delete nodes contained in a list of groups (not the groups themselves!). This command will delete not only the nodes, but also the ones that are linked to these and should be also deleted in order to keep a consistent provenance according to the rules explained in the concepts section of the documentation. In summary: 1. If a DATA node is deleted, any process nodes linked to it will also be deleted. 2. If a CALC node is deleted, any incoming WORK node (callers) will be deleted as well whereas any incoming DATA node (inputs) will be kept. Outgoing DATA nodes (outputs) will be deleted by default but this can be disabled. 3. If a WORK node is deleted, any incoming WORK node (callers) will be deleted as well, but all DATA nodes will be kept. Outgoing WORK or CALC nodes will be kept by default, but deletion of either of both kind of connected nodes can be enabled. These rules are 'recursive', so if a CALC node is deleted, then its output DATA nodes will be deleted as well, and then any CALC node that may have those as inputs, and so on. :param pks: a list of the groups :param dry_run: If True, return the pks to delete without deleting anything. If False, delete the pks without confirmation If callable, a function that return True/False, based on the pks, e.g. ``dry_run=lambda pks: True`` :param traversal_rules: graph traversal rules. See :const:`aiida.common.links.GraphTraversalRules` what rule names are toggleable and what the defaults are. :returns: (node pks to delete, whether they were deleted) """ group_node_query = QueryBuilder().append( Group, filters={ 'id': { 'in': list(pks) } }, tag='groups', ).append(Node, project='id', with_group='groups') group_node_query.distinct() node_pks = group_node_query.all(flat=True) return delete_nodes(node_pks, dry_run=dry_run, **traversal_rules)
def retrieve_alluncalculated_structures(structure_group_name, workchain_group_name=None): print("Warning! Warning! Untested!") sqb = QueryBuilder() sqb.append(Group, filters={'name': structure_group_name}, tag='g') sqb.append(StructureData, project='id', tag='s', member_of='g') sqb.append(WorkChainNode, tag='job', output_of='s') filters = {} if workchain_group_name: filters = {'name': workchain_group_name} sqb.append(Group, group_of='job', filters=filters) ids_dealt_with = [_ for _, in sqb.distinct().all() ] or [-1] # prevent empty list # # Now the main query: qb = QueryBuilder() qb.append(Group, filters={'name': structure_group_name}, tag='g') qb.append(StructureData, project='*', tag='s', member_of='g', filters={'id': { '!in': ids_dealt_with }}) # filter out calculated '!in' for not in res = [x[0] for x in qb.all()] return res
def retrieve_alluncalculated_structures(structure_group_label, workchain_group_label=None): from aiida.orm import Group from aiida.orm import StructureData from aiida.orm import WorkChainNode from aiida.orm import QueryBuilder sqb = QueryBuilder() sqb.append(Group, filters={'label': structure_group_label}, tag='g') sqb.append(StructureData, project='id', tag='s', with_group='g') sqb.append(WorkChainNode, tag='job', with_incoming='s') filters = {} if workchain_group_label: filters = {'label': workchain_group_label} sqb.append(Group, with_node='job', filters=filters) ids_dealt_with = [_ for _, in sqb.distinct().all()] or [-1] # prevent empty list # # Now the main query: qb = QueryBuilder() qb.append(Group, filters={'label': structure_group_label}, tag='g') qb.append(StructureData, project='*', tag='s', with_group='g', filters={'id': {'!in': ids_dealt_with}}) # filter out calculated '!in' for not in res = [x[0] for x in qb.all()] return res
def listfamilies(element, with_description): """List available OtfgData families""" from aiida.orm import QueryBuilder, Node from aiida_castep.data.otfg import OTFGGroup qbd = QueryBuilder() qbd.append(Node, tag="otfgdata") if element: qbd.add_filter("otfgdata", { "attributes.element": { "or": [{ 'in': element }, { '==': "LIBRARY" }] } }) qbd.append(OTFGGroup, tag='group', with_node='otfgdata', project=['label', 'description']) qbd.distinct() if qbd.count() > 0: for res in qbd.dict(): group_label = res.get("group").get("label") group_desc = res.get("group").get("description") # Count the number of pseudos in this group qbd = QueryBuilder() qbd.append(OTFGGroup, tag='thisgroup', filters={"label": { 'like': group_label }}) qbd.append(Node, project=["id"], with_group='thisgroup') if with_description: description_string = ": {}".format(group_desc) else: description_string = "" click.echo("* {} [{} pseudos]{}".format(group_label, qbd.count(), description_string)) else: click.echo("No valid pseudopotential family found.")
def database_summary(verbose): """Summarise the entities in the database.""" from aiida.orm import QueryBuilder, Node, Group, Computer, Comment, Log, User data = {} # User query_user = QueryBuilder().append(User, project=['email']) data['Users'] = {'count': query_user.count()} if verbose: data['Users']['emails'] = query_user.distinct().all(flat=True) # Computer query_comp = QueryBuilder().append(Computer, project=['name']) data['Computers'] = {'count': query_comp.count()} if verbose: data['Computers']['names'] = query_comp.distinct().all(flat=True) # Node count = QueryBuilder().append(Node).count() data['Nodes'] = {'count': count} if verbose: node_types = QueryBuilder().append(Node, project=['node_type']).distinct().all(flat=True) data['Nodes']['node_types'] = node_types process_types = QueryBuilder().append(Node, project=['process_type']).distinct().all(flat=True) data['Nodes']['process_types'] = [p for p in process_types if p] # Group query_group = QueryBuilder().append(Group, project=['type_string']) data['Groups'] = {'count': query_group.count()} if verbose: data['Groups']['type_strings'] = query_group.distinct().all(flat=True) # Comment count = QueryBuilder().append(Comment).count() data['Comments'] = {'count': count} # Log count = QueryBuilder().append(Log).count() data['Logs'] = {'count': count} echo.echo_dictionary(data, sort_keys=False, fmt='yaml')