def get_full_type_filters(full_type): """Return the `QueryBuilder` filters that will return all `Nodes` identified by the given `full_type`. :param full_type: the `full_type` unique node identifier :return: dictionary of filters to be passed for the `filters` keyword in `QueryBuilder.append` :raises ValueError: if the `full_type` is invalid :raises TypeError: if the `full_type` is not a string type """ validate_full_type(full_type) filters = {} node_type, process_type = full_type.split(FULL_TYPE_CONCATENATOR) for entry in (node_type, process_type): if entry.count(LIKE_OPERATOR_CHARACTER) > 1: raise ValueError( f'full type component `{entry}` contained more than one like-operator character' ) if LIKE_OPERATOR_CHARACTER in entry and entry[ -1] != LIKE_OPERATOR_CHARACTER: raise ValueError( f'like-operator character in full type component `{entry}` is not at the end' ) if LIKE_OPERATOR_CHARACTER in node_type: # Remove the trailing `LIKE_OPERATOR_CHARACTER`, escape the string and reattach the character node_type = node_type[:-1] node_type = escape_for_sql_like(node_type) + LIKE_OPERATOR_CHARACTER filters['node_type'] = {'like': node_type} else: filters['node_type'] = {'==': node_type} if LIKE_OPERATOR_CHARACTER in process_type: # Remove the trailing `LIKE_OPERATOR_CHARACTER` () # If that was the only specification, just ignore this filter (looking for any process_type) # If there was more: escape the string and reattach the character process_type = process_type[:-1] if process_type: process_type = escape_for_sql_like( process_type) + LIKE_OPERATOR_CHARACTER filters['process_type'] = {'like': process_type} else: if process_type: filters['process_type'] = {'==': process_type} else: # A `process_type=''` is used to represents both `process_type='' and `process_type=None`. # This is because there is no simple way to single out null `process_types`, and therefore # we consider them together with empty-string process_types. # Moreover, the existence of both is most likely a bug of migrations and thus both share # this same "erroneous" origin. filters['process_type'] = {'or': [{'==': ''}, {'==': None}]} return filters
def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', project='*'): """ Return the query builder instance that attempts to map the identifier onto an entity of the orm class, defined for this loader class, interpreting the identifier as a LABEL like identifier :param identifier: the LABEL identifier :param classes: a tuple of orm classes to which the identifier should be mapped :param operator: the operator to use in the query :param project: the property or properties to project for entities matching the query :returns: the query builder instance that should retrieve the entity corresponding to the identifier :raises ValueError: if the identifier is invalid :raises aiida.common.NotExistent: if the orm base class does not support a LABEL like identifier """ from aiida.common.escaping import escape_for_sql_like from aiida.orm import Computer try: identifier, _, machinename = identifier.partition('@') except AttributeError: raise ValueError('the identifier needs to be a string') if operator == 'like': identifier = escape_for_sql_like(identifier) + '%' builder = QueryBuilder() builder.append(cls=classes, tag='code', project=project, filters={'label': {operator: identifier}}) if machinename: builder.append(Computer, filters={'name': {'==': machinename}}, with_node='code') return builder
def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', project='*'): """ Return the query builder instance that attempts to map the identifier onto an entity of the orm class, defined for this loader class, interpreting the identifier as a LABEL like identifier :param identifier: the LABEL identifier :param classes: a tuple of orm classes to which the identifier should be mapped :param operator: the operator to use in the query :param project: the property or properties to project for entities matching the query :returns: the query builder instance that should retrieve the entity corresponding to the identifier :raises ValueError: if the identifier is invalid :raises aiida.common.NotExistent: if the orm base class does not support a LABEL like identifier """ from aiida.common.escaping import escape_for_sql_like if operator == 'like': identifier = escape_for_sql_like(identifier) + '%' builder = QueryBuilder() builder.append(cls=classes, tag='workflow', project=project, filters={'label': { operator: identifier }}) return builder
def get_full_type_filters(full_type): """Return the `QueryBuilder` filters that will return all `Nodes` identified by the given `full_type`. :param full_type: the `full_type` unique node identifier :return: dictionary of filters to be passed for the `filters` keyword in `QueryBuilder.append` :raises ValueError: if the `full_type` is invalid :raises TypeError: if the `full_type` is not a string type """ validate_full_type(full_type) filters = {} node_type, process_type = full_type.split(FULL_TYPE_CONCATENATOR) for entry in (node_type, process_type): if entry.count(LIKE_OPERATOR_CHARACTER) > 1: raise ValueError( 'full type component `{}` contained more than one like-operator character' .format(entry)) if LIKE_OPERATOR_CHARACTER in entry and entry[ -1] != LIKE_OPERATOR_CHARACTER: raise ValueError( 'like-operator character in full type component `{}` is not at the end' .format(entry)) if LIKE_OPERATOR_CHARACTER in node_type: # Remove the trailing `LIKE_OPERATOR_CHARACTER`, escape the string and reattach the character node_type = node_type[:-1] node_type = escape_for_sql_like(node_type) + LIKE_OPERATOR_CHARACTER filters['node_type'] = {'like': node_type} else: filters['node_type'] = escape_for_sql_like(node_type) if LIKE_OPERATOR_CHARACTER in process_type: # Remove the trailing `LIKE_OPERATOR_CHARACTER`, escape the string and reattach the character process_type = process_type[:-1] process_type = escape_for_sql_like( process_type) + LIKE_OPERATOR_CHARACTER filters['process_type'] = {'like': process_type} else: if process_type: filters['process_type'] = escape_for_sql_like(process_type) return filters
def get_or_create_group(self): """Return the current `AutoGroup`, or create one if None has been set yet. This function implements a somewhat complex logic that is however needed to make sure that, even if `verdi run` is called at the same time multiple times, e.g. in a for loop in bash, there is never the risk that two ``verdi run`` Unix processes try to create the same group, with the same label, ending up in a crash of the code (see PR #3650). Here, instead, we make sure that if this concurrency issue happens, one of the two will get a IntegrityError from the DB, and then recover trying to create a group with a different label (with a numeric suffix appended), until it manages to create it. """ from aiida.orm import QueryBuilder # When this function is called, if it is the first time, just generate # a new group name (later on, after this ``if`` block`). # In that case, we will later cache in ``self._group_label`` the group label, # So the group with the same name can be returned quickly in future # calls of this method. if self._group_label is not None: builder = QueryBuilder().append( AutoGroup, filters={'label': self._group_label}) results = [res[0] for res in builder.iterall()] if results: # If it is not empty, it should have only one result due to the uniqueness constraints assert len( results ) == 1, 'I got more than one autogroup with the same label!' return results[0] # There are no results: probably the group has been deleted. # I continue as if it was not cached self._group_label = None label_prefix = self.get_group_label_prefix() # Try to do a preliminary QB query to avoid to do too many try/except # if many of the prefix_NUMBER groups already exist queryb = QueryBuilder().append( AutoGroup, filters={ 'or': [{ 'label': { '==': label_prefix } }, { 'label': { 'like': escape_for_sql_like(label_prefix + '_') + '%' } }] }, project='label') existing_group_labels = [ res[0][len(label_prefix):] for res in queryb.all() ] existing_group_ints = [] for label in existing_group_labels: if label == '': # This is just the prefix without name - corresponds to counter = 0 existing_group_ints.append(0) elif label.startswith('_'): try: existing_group_ints.append(int(label[1:])) except ValueError: # It's not an integer, so it will never collide - just ignore it pass if not existing_group_ints: counter = 0 else: counter = max(existing_group_ints) + 1 while True: try: label = label_prefix if counter == 0 else '{}_{}'.format( label_prefix, counter) group = AutoGroup(label=label).store() self._group_label = group.label except exceptions.IntegrityError: counter += 1 else: break return group
def group_list(all_users, user_email, all_types, group_type, with_description, count, past_days, startswith, endswith, contains, node): """Show a list of existing groups.""" # pylint: disable=too-many-branches,too-many-arguments, too-many-locals import datetime from aiida.common.escaping import escape_for_sql_like from aiida.common import timezone from aiida.orm import Group from aiida.orm import QueryBuilder from aiida.orm import User from aiida import orm from tabulate import tabulate query = QueryBuilder() filters = {} # Specify group types if not all_types: filters = {'type_string': {'==': group_type}} # Creation time if past_days: filters['time'] = { '>': timezone.now() - datetime.timedelta(days=past_days) } # Query for specific group names filters['or'] = [] if startswith: filters['or'].append( {'label': { 'like': '{}%'.format(escape_for_sql_like(startswith)) }}) if endswith: filters['or'].append( {'label': { 'like': '%{}'.format(escape_for_sql_like(endswith)) }}) if contains: filters['or'].append( {'label': { 'like': '%{}%'.format(escape_for_sql_like(contains)) }}) query.append(Group, filters=filters, tag='group', project='*') # Query groups that belong to specific user if user_email: user = user_email else: # By default: only groups of this user user = orm.User.objects.get_default().email # Query groups that belong to all users if not all_users: query.append(User, filters={'email': {'==': user}}, with_group='group') # Query groups that contain a particular node if node: from aiida.orm import Node query.append(Node, filters={'id': {'==': node.id}}, with_group='group') query.order_by({Group: {'id': 'asc'}}) result = query.all() projection_lambdas = { 'pk': lambda group: str(group.pk), 'label': lambda group: group.label, 'type_string': lambda group: group.type_string, 'count': lambda group: group.count(), 'user': lambda group: group.user.email.strip(), 'description': lambda group: group.description } table = [] projection_header = ['PK', 'Label', 'Type string', 'User'] projection_fields = ['pk', 'label', 'type_string', 'user'] if with_description: projection_header.append('Description') projection_fields.append('description') if count: projection_header.append('Node count') projection_fields.append('count') for group in result: table.append([ projection_lambdas[field](group[0]) for field in projection_fields ]) if not all_types: echo.echo_info( 'If you want to see the groups of all types, please add -a/--all-types option' ) echo.echo(tabulate(table, headers=projection_header))
def group_list(all_users, user, all_entries, group_type, type_string, with_description, count, past_days, startswith, endswith, contains, order_by, order_dir, node): """Show a list of existing groups.""" # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements import datetime import warnings from aiida import orm from aiida.common import timezone from aiida.common.escaping import escape_for_sql_like from aiida.common.warnings import AiidaDeprecationWarning from tabulate import tabulate builder = orm.QueryBuilder() filters = {} if group_type is not None: warnings.warn( '`--group-type` is deprecated, use `--type-string` instead', AiidaDeprecationWarning) # pylint: disable=no-member if type_string is not None: raise click.BadOptionUsage( 'group-type', 'cannot use `--group-type` and `--type-string` at the same time.' ) else: type_string = group_type # Have to specify the default for `type_string` here instead of directly in the option otherwise it will always # raise above if the user specifies just the `--group-type` option. Once that option is removed, the default can # be moved to the option itself. if type_string is None: type_string = 'core' if not all_entries: if '%' in type_string or '_' in type_string: filters['type_string'] = {'like': type_string} else: filters['type_string'] = type_string # Creation time if past_days: filters['time'] = { '>': timezone.now() - datetime.timedelta(days=past_days) } # Query for specific group names filters['or'] = [] if startswith: filters['or'].append( {'label': { 'like': '{}%'.format(escape_for_sql_like(startswith)) }}) if endswith: filters['or'].append( {'label': { 'like': '%{}'.format(escape_for_sql_like(endswith)) }}) if contains: filters['or'].append( {'label': { 'like': '%{}%'.format(escape_for_sql_like(contains)) }}) builder.append(orm.Group, filters=filters, tag='group', project='*') # Query groups that belong to specific user if user: user_email = user.email else: # By default: only groups of this user user_email = orm.User.objects.get_default().email # Query groups that belong to all users if not all_users: builder.append(orm.User, filters={'email': { '==': user_email }}, with_group='group') # Query groups that contain a particular node if node: builder.append(orm.Node, filters={'id': { '==': node.id }}, with_group='group') builder.order_by({orm.Group: {order_by: order_dir}}) result = builder.all() projection_lambdas = { 'pk': lambda group: str(group.pk), 'label': lambda group: group.label, 'type_string': lambda group: group.type_string, 'count': lambda group: group.count(), 'user': lambda group: group.user.email.strip(), 'description': lambda group: group.description } table = [] projection_header = ['PK', 'Label', 'Type string', 'User'] projection_fields = ['pk', 'label', 'type_string', 'user'] if with_description: projection_header.append('Description') projection_fields.append('description') if count: projection_header.append('Node count') projection_fields.append('count') for group in result: table.append([ projection_lambdas[field](group[0]) for field in projection_fields ]) if not all_entries: echo.echo_info( 'to show groups of all types, use the `-a/--all` option.') if not table: echo.echo_info('no groups found matching the specified criteria.') else: echo.echo(tabulate(table, headers=projection_header))