Ejemplo n.º 1
0
    def test_comments(self):
        # This is the best way to compare dates with the stored ones, instead of
        # directly loading datetime.datetime.now(), or you can get a
        # "can't compare offset-naive and offset-aware datetimes" error
        user = get_automatic_user()
        a = Node()
        with self.assertRaises(ModificationNotAllowed):
            a.add_comment('text', user=user)
        self.assertEquals(a.get_comments(), [])
        a.store()
        before = timezone.now()
        time.sleep(1)  # I wait 1 second because MySql time precision is 1 sec
        a.add_comment('text', user=user)
        a.add_comment('text2', user=user)
        time.sleep(1)
        after = timezone.now()

        comments = a.get_comments()

        times = [i['mtime'] for i in comments]
        for t in times:
            self.assertTrue(t > before)
            self.assertTrue(t < after)

        self.assertEquals([(i['user__email'], i['content']) for i in comments],
                          [
                              (user.email, 'text'),
                              (user.email, 'text2'),
                          ])
Ejemplo n.º 2
0
def print_last_process_state_change(process_type=None):
    """
    Print the last time that a process of the specified type has changed its state.
    This function will also print a warning if the daemon is not running.

    :param process_type: optional process type for which to get the latest state change timestamp.
        Valid process types are either 'calculation' or 'work'.
    """
    from aiida.cmdline.utils.echo import echo_info, echo_warning
    from aiida.daemon.client import DaemonClient
    from aiida.utils import timezone
    from aiida.common.utils import str_timedelta
    from aiida.work.utils import get_process_state_change_timestamp

    client = DaemonClient()

    timestamp = get_process_state_change_timestamp(process_type)

    if timestamp is None:
        echo_info('last time an entry changed state: never')
    else:
        timedelta = timezone.delta(timestamp, timezone.now())
        formatted = format_local_time(timestamp,
                                      format_str='at %H:%M:%S on %Y-%m-%d')
        relative = str_timedelta(timedelta,
                                 negative_to_zero=True,
                                 max_num_fields=1)
        echo_info('last time an entry changed state: {} ({})'.format(
            relative, formatted))

    if not client.is_daemon_running:
        echo_warning('the daemon is not running', bold=True)
Ejemplo n.º 3
0
def _build_query(projections=None, order_by=None, limit=None, past_days=None):
    import datetime
    from aiida.utils import timezone
    from aiida.orm.mixins import Sealable
    from aiida.orm.querybuilder import QueryBuilder
    from aiida.orm.calculation.work import WorkCalculation

    # Define filters
    calculation_filters = {}

    if past_days is not None:
        n_days_ago = timezone.now() - datetime.timedelta(days=past_days)
        calculation_filters['ctime'] = {'>': n_days_ago}

    # Build the query
    qb = QueryBuilder()
    qb.append(cls=WorkCalculation,
              filters=calculation_filters,
              project=projections,
              tag='calculation')

    # Ordering of queryset
    if order_by is not None:
        qb.order_by({'calculation': order_by})

    # Limiting the queryset
    if limit is not None:
        qb.limit(limit)

    return qb.iterdict()
Ejemplo n.º 4
0
    def test_date(self):
        from aiida.orm.querybuilder import QueryBuilder
        from aiida.utils import timezone
        from datetime import timedelta
        from aiida.orm.node import Node
        n = Node()
        now = timezone.now()
        n._set_attr('now', now)
        n.store()

        qb = QueryBuilder().append(Node,
                                   filters={
                                       'attributes.now': {
                                           "and": [
                                               {
                                                   ">":
                                                   now - timedelta(seconds=1)
                                               },
                                               {
                                                   "<":
                                                   now + timedelta(seconds=1)
                                               },
                                           ]
                                       }
                                   })
        self.assertEqual(qb.count(), 1)
Ejemplo n.º 5
0
def do_list(past_days, limit):
    """
    Return a list of running workflows on screen
    """
    from aiida.common.utils import str_timedelta
    from aiida.backends.utils import load_dbenv, is_dbenv_loaded
    if not is_dbenv_loaded():
        load_dbenv()
    import aiida.utils.timezone as timezone
    from aiida.orm.mixins import SealableMixin
    _SEALED_ATTRIBUTE_KEY = 'attributes.{}'.format(SealableMixin.SEALED_KEY)

    now = timezone.now()

    table = []
    for res in _build_query(limit=limit, past_days=past_days):
        calc = res['calculation']
        creation_time = str_timedelta(timezone.delta(calc['ctime'], now),
                                      negative_to_zero=True,
                                      max_num_fields=1)

        table.append([
            calc['id'], creation_time, calc['type'],
            str(calc[_SEALED_ATTRIBUTE_KEY])
        ])

    print(tabulate(table, headers=["PID", "Creation time", "Type", "Sealed"]))
Ejemplo n.º 6
0
    def aquire(self, key, timeout=3600, owner="None"):
        session = get_scoped_session()
        try:
            with session.begin(subtransactions=True):
                dblock = DbLock(key=key, timeout=timeout, owner=owner)
                session.add(dblock)

            return Lock(dblock)

        except SQLAlchemyError:
            old_lock = DbLock.query.filter_by(key=key).first()

            timeout_secs = time.mktime(
                old_lock.creation.timetuple()) + old_lock.timeout
            now_secs = time.mktime(timezone.now().timetuple())

            if now_secs > timeout_secs:
                raise InternalError(
                    "A lock went over the limit timeout, this could mine the integrity of the system. Reload the Daemon to fix the problem."
                )
            else:
                raise LockPresent("A lock is present.")

        except:
            raise InternalError("Something went wrong, try to keep on.")
Ejemplo n.º 7
0
def get_workflow_list(
        pk_list=tuple(), user=None, all_states=False, n_days_ago=None):
    """
    Get a list of workflow.
    :param user: A ORM User class if you want to filter by user
    :param pk_list: Limit the results to this list of PKs
    :param all_states: if False, limit results to "active" (e.g., running) wfs
    :param n_days_ago: an integer number of days. If specifies, limit results to
      workflows started up to this number of days ago
    """
    from aiida.backends.djsite.db.models import DbWorkflow

    if pk_list:
        filters = Q(pk__in=pk_list)
    else:
        filters = Q(user=user._dbuser)

        if not all_states:
            filters &= ~Q(state=wf_states.FINISHED) & ~Q(state=wf_states.ERROR)
        if n_days_ago:
            t = timezone.now() - datetime.timedelta(days=n_days_ago)
            filters &= Q(ctime__gte=t)

    wf_list = DbWorkflow.objects.filter(filters).order_by('ctime')

    return list(wf_list)
Ejemplo n.º 8
0
    def aquire(self, key, timeout=3600, owner="None"):
        from aiida.backends.djsite.db.models import DbLock
        try:
            sid = transaction.savepoint()
            dblock = DbLock.objects.create(key=key,
                                           timeout=timeout,
                                           owner=owner)
            transaction.savepoint_commit(sid)
            return Lock(dblock)

        except IntegrityError:
            transaction.savepoint_rollback(sid)

            old_lock = DbLock.objects.get(key=key)
            timeout_secs = time.mktime(
                old_lock.creation.timetuple()) + old_lock.timeout
            now_secs = time.mktime(timezone.now().timetuple())

            if now_secs > timeout_secs:
                raise InternalError(
                    "A lock went over the limit timeout, this could mine the integrity of the system. Reload the Daemon to fix the problem."
                )
            else:
                raise LockPresent("A lock is present.")

        except:
            raise InternalError("Something went wrong, try to keep on.")
Ejemplo n.º 9
0
def set_process_state_change_timestamp(process):
    """
    Set the global setting that reflects the last time a process changed state, for the process type
    of the given process, to the current timestamp. The process type will be determined based on
    the class of the calculation node it has as its database container.

    :param process: the Process instance that changed its state
    """
    from aiida.backends.utils import set_global_setting
    from aiida.common.exceptions import UniquenessError
    from aiida.orm.calculation.inline import InlineCalculation
    from aiida.orm.calculation.job import JobCalculation
    from aiida.utils import timezone

    if isinstance(process.calc, (JobCalculation, InlineCalculation)):
        process_type = 'calculation'
    elif is_work_calc_type(process.calc):
        process_type = 'work'
    else:
        raise ValueError('unsupported calculation node type {}'.format(
            type(process.calc)))

    key = PROCESS_STATE_CHANGE_KEY.format(process_type)
    description = PROCESS_STATE_CHANGE_DESCRIPTION.format(process_type)
    value = timezone.now()

    try:
        set_global_setting(key, value, description)
    except UniquenessError as exception:
        process.logger.debug(
            'could not update the {} setting because of a UniquenessError: {}'.
            format(key, exception))
Ejemplo n.º 10
0
def run(scriptname, varargs, group, group_name, exclude, excludesubclasses,
        include, includesubclasses):
    # pylint: disable=too-many-arguments,exec-used
    """Execute an AiiDA script."""
    from aiida.cmdline.utils.shell import DEFAULT_MODULES_LIST
    from aiida.orm import autogroup

    # Prepare the environment for the script to be run
    globals_dict = {
        '__builtins__': globals()['__builtins__'],
        '__name__': '__main__',
        '__file__': scriptname,
        '__doc__': None,
        '__package__': None
    }

    # Dynamically load modules (the same of verdi shell) - but in globals_dict, not in the current environment
    for app_mod, model_name, alias in DEFAULT_MODULES_LIST:
        globals_dict['{}'.format(alias)] = getattr(
            __import__(app_mod, {}, {}, model_name), model_name)

    if group:
        automatic_group_name = group_name
        if automatic_group_name is None:
            from aiida.utils import timezone

            automatic_group_name = 'Verdi autogroup on ' + timezone.now(
            ).strftime("%Y-%m-%d %H:%M:%S")

        aiida_verdilib_autogroup = autogroup.Autogroup()
        aiida_verdilib_autogroup.set_exclude(exclude)
        aiida_verdilib_autogroup.set_include(include)
        aiida_verdilib_autogroup.set_exclude_with_subclasses(excludesubclasses)
        aiida_verdilib_autogroup.set_include_with_subclasses(includesubclasses)
        aiida_verdilib_autogroup.set_group_name(automatic_group_name)

        # Note: this is also set in the exec environment! This is the intended behavior
        autogroup.current_autogroup = aiida_verdilib_autogroup

    try:
        handle = open(scriptname)
    except IOError:
        echo.echo_critical("Unable to load file '{}'".format(scriptname))
    else:
        try:
            # Must add also argv[0]
            new_argv = [scriptname] + list(varargs)
            with update_environment(new_argv=new_argv):
                # Add local folder to sys.path
                sys.path.insert(0, os.path.abspath(os.curdir))
                # Pass only globals_dict
                exec(handle, globals_dict)
        except SystemExit:
            # Script called sys.exit()
            # Re-raise the exception to have the error code properly returned at the end
            raise
        finally:
            handle.close()
Ejemplo n.º 11
0
    def isexpired(self):
        if self.dblock == None:
            return False

        timeout_secs = time.mktime(self.dblock.creation.timetuple()) + self.dblock.timeout
        now_secs = time.mktime(timezone.now().timetuple())

        if now_secs > timeout_secs:
            return True
        else:
            return False
Ejemplo n.º 12
0
 def get_group_name(self):
     """
     Get the name of the group.
     If no group name was set, it will set a default one by itself.
     """
     try:
         return self.group_name
     except AttributeError:
         now = timezone.now()
         gname = "Verdi autogroup on " + now.strftime("%Y-%m-%d %H:%M:%S")
         self.set_group_name(gname)
         return self.group_name
Ejemplo n.º 13
0
def format_relative_time(datetime):
    """
    Return a string formatted timedelta of the given datetime with respect to the current datetime

    :param datetime: the datetime to format
    :return: string representation of the relative time since the given datetime
    """
    from aiida.common.utils import str_timedelta
    from aiida.utils import timezone

    timedelta = timezone.delta(datetime, timezone.now())

    return str_timedelta(timedelta, negative_to_zero=True, max_num_fields=1)
Ejemplo n.º 14
0
    def query_past_days(self, q_object, args):
        """
        Subselect to filter data nodes by their age.

        :param q_object: a query object
        :param args: a namespace with parsed command line parameters.
        """
        from aiida.utils import timezone
        from django.db.models import Q
        import datetime
        if args.past_days is not None:
            now = timezone.now()
            n_days_ago = now - datetime.timedelta(days=args.past_days)
            q_object.add(Q(ctime__gte=n_days_ago), Q.AND)
Ejemplo n.º 15
0
 def setUp(self):
     super(TestBackendLog, self).setUp()
     self._backend = construct()
     self._record = {
         'time': now(),
         'loggername': 'loggername',
         'levelname': logging.getLevelName(LOG_LEVEL_REPORT),
         'objname': 'objname',
         'objpk': 0,
         'message': 'This is a template record message',
         'metadata': {
             'content': 'test'
         },
     }
Ejemplo n.º 16
0
    def get_query_set(self,
                      filters=None,
                      order_by=None,
                      past_days=None,
                      limit=None):
        """
        Return the query set of calculations for the given filters and query parameters

        :param filters: rules to filter query results with
        :param order_by: order the query set by this criterion
        :param past_days: only include entries from the last past days
        :param limit: limit the query set to this number of entries
        :return: the query set, a list of dictionaries
        """
        import datetime

        from aiida.orm.calculation import Calculation
        from aiida.orm.querybuilder import QueryBuilder
        from aiida.utils import timezone

        projected_attributes = [
            self.mapper.get_attribute(projection)
            for projection in self._valid_projections
        ]

        if filters is None:
            filters = {}

        if past_days is not None:
            filters['ctime'] = {
                '>': timezone.now() - datetime.timedelta(days=past_days)
            }

        builder = QueryBuilder()
        builder.append(cls=Calculation,
                       filters=filters,
                       project=projected_attributes,
                       tag='calculation')

        if order_by is not None:
            builder.order_by({'calculation': order_by})
        else:
            builder.order_by({'calculation': {'ctime': 'asc'}})

        if limit is not None:
            builder.limit(limit)

        return builder.iterdict()
Ejemplo n.º 17
0
def group_show(group, raw, uuid):
    """
    Show information on a given group. Pass the GROUP as a parameter.
    """

    from aiida.common.utils import str_timedelta
    from aiida.utils import timezone
    from aiida.plugins.loader import get_plugin_type_from_type_string
    from tabulate import tabulate

    if raw:
        if uuid:
            echo.echo(" ".join(str(_.uuid) for _ in group.nodes))
        else:
            echo.echo(" ".join(str(_.pk) for _ in group.nodes))
    else:
        type_string = group.type_string
        desc = group.description
        now = timezone.now()

        table = []
        table.append(["Group name", group.name])
        table.append(
            ["Group type", type_string if type_string else "<user-defined>"])
        table.append(
            ["Group description", desc if desc else "<no description>"])
        echo.echo(tabulate(table))

        table = []
        header = []
        if uuid:
            header.append('UUID')
        header.extend(['PK', 'Type', 'Created'])
        echo.echo("# Nodes:")
        for node in group.nodes:
            row = []
            if uuid:
                row.append(node.uuid)
            row.append(node.pk)
            row.append(
                get_plugin_type_from_type_string(node.type).rsplit(".", 1)[1])
            row.append(
                str_timedelta(now - node.ctime,
                              short=True,
                              negative_to_zero=True))
            table.append(row)
        echo.echo(tabulate(table, headers=header))
Ejemplo n.º 18
0
def get_group_list(user, type_string, n_days_ago=None, name_filters={}):
    from aiida.orm.implementation.django.group import Group

    name_filters = {
        "name__" + k: v
        for (k, v) in name_filters.iteritems() if v
    }

    if n_days_ago:
        n_days_ago = timezone.now() - datetime.timedelta(days=n_days_ago)

    groups = Group.query(user=user,
                         type_string=type_string,
                         past_days=n_days_ago,
                         **name_filters)

    return tuple([(str(g.pk), g.name, len(g.nodes), g.user.email.strip(),
                   g.description) for g in groups])
Ejemplo n.º 19
0
def _build_query(order_by=None, limit=None, past_days=None):
    import datetime
    from aiida.utils import timezone
    from aiida.orm.mixins import Sealable
    from aiida.orm.querybuilder import QueryBuilder
    from aiida.orm.calculation.work import WorkCalculation

    _SEALED_ATTRIBUTE_KEY = 'attributes.{}'.format(Sealable.SEALED_KEY)
    _ABORTED_ATTRIBUTE_KEY = 'attributes.{}'.format(
        WorkCalculation.ABORTED_KEY)
    _FAILED_ATTRIBUTE_KEY = 'attributes.{}'.format(WorkCalculation.FAILED_KEY)
    _FINISHED_ATTRIBUTE_KEY = 'attributes.{}'.format(
        WorkCalculation.FINISHED_KEY)

    calculation_projections = [
        'id', 'ctime', 'attributes._process_label', _SEALED_ATTRIBUTE_KEY,
        _ABORTED_ATTRIBUTE_KEY, _FAILED_ATTRIBUTE_KEY, _FINISHED_ATTRIBUTE_KEY
    ]

    # Define filters
    calculation_filters = {}

    if past_days is not None:
        n_days_ago = timezone.now() - datetime.timedelta(days=past_days)
        calculation_filters['ctime'] = {'>': n_days_ago}

    # Build the query
    qb = QueryBuilder()
    qb.append(cls=WorkCalculation,
              filters=calculation_filters,
              project=calculation_projections,
              tag='calculation')

    # Ordering of queryset
    if order_by is not None:
        qb.order_by({'calculation': order_by})

    # Limiting the queryset
    if limit is not None:
        qb.limit(limit)

    return qb.iterdict()
Ejemplo n.º 20
0
def _build_query(order_by=None, limit=None, past_days=None):
    from aiida.orm.querybuilder import QueryBuilder
    from aiida.orm.calculation.work import WorkCalculation
    import aiida.utils.timezone as timezone
    import datetime
    from aiida.orm.mixins import Sealable
    _SEALED_ATTRIBUTE_KEY = 'attributes.{}'.format(Sealable.SEALED_KEY)

    # The things that we want to get out
    calculation_projections = \
        ['id', 'ctime', 'attributes._process_label', _SEALED_ATTRIBUTE_KEY]

    now = timezone.now()

    # The things to filter by
    calculation_filters = {}

    if past_days is not None:
        n_days_ago = now - datetime.timedelta(days=past_days)
        calculation_filters['ctime'] = {'>': n_days_ago}

    qb = QueryBuilder()

    # Build the quiery
    qb.append(
        cls=WorkCalculation,
        filters=calculation_filters,
        project=calculation_projections,
        tag='calculation'
    )

    # ORDER
    if order_by is not None:
        qb.order_by({'calculation': order_by})

    # LIMIT
    if limit is not None:
        qb.limit(limit)

    return qb.iterdict()
Ejemplo n.º 21
0
def query(datatype, project, past_days, group_pks, all_users):
    """
    Perform the query
    """
    import datetime

    from aiida.orm.implementation import Group
    from aiida.orm.user import User
    from aiida.orm.backend import construct_backend
    from aiida.orm.querybuilder import QueryBuilder
    from aiida.utils import timezone

    backend = construct_backend()

    qbl = QueryBuilder()
    if all_users is False:
        user = backend.users.get_automatic_user()
        qbl.append(User, tag="creator", filters={"email": user.email})
    else:
        qbl.append(User, tag="creator")

    # If there is a time restriction
    data_filters = {}
    if past_days is not None:
        now = timezone.now()
        n_days_ago = now - datetime.timedelta(days=past_days)
        data_filters.update({"ctime": {'>=': n_days_ago}})

    qbl.append(datatype, tag="data", created_by="creator", filters=data_filters, project=project)

    # If there is a group restriction
    if group_pks is not None:
        group_filters = dict()
        group_filters.update({"id": {"in": group_pks}})
        qbl.append(Group, tag="group", filters=group_filters, group_of="data")

    qbl.order_by({datatype: {'ctime': 'asc'}})

    object_list = qbl.distinct()
    return object_list.all()
Ejemplo n.º 22
0
def get_query(full, node_ids, past_days, timestamp, query_date_mode):
    """
    Construct the query
    :param bool full: if True, returns the full database.
    :param list node_ids: If this is set, and full is False, reurns the Node that match the idea
    :param str timestamp: The filename where a valid timestamp is stored.
        Will read the timestamps, and return query for everything above timestamp.
    :param int past_days: If this is set, and none of the above, returnes the nodes modified in
        the last past_days days.
    :returns: A querybuilder instance.
    """
    if full:
        # Returning a query that searches all nodes.
        return QueryBuilder().append(Node)
    elif node_ids:
        # Filter by id:
        return QueryBuilder().append(Node, filters={'id':{'in':node_ids}})
    elif timestamp:
        # Returning a query that searches all nodes.
        try:
            with open(timestamp) as f:
                for line in f.readlines():
                    # This way, the timestamp is the first float in the last non-empty line:
                    if line.strip():
                        timestamp_from = float(line.split()[0])
            datetime_from = datetime.datetime.fromtimestamp(timestamp_from)
            return QueryBuilder().append(Node, filters={query_date_mode:{'>=': datetime_from}})
        except Exception as e:
            print(e)
            print("There was an error reading the timestamp, I will return all nodes")
            return QueryBuilder().append(Node)
    elif past_days is not None:
        if past_days < 1:
            raise ValueError('past days has to be at least one')
        # Filtering by days before now:
        n_days_ago = timezone.now() - datetime.timedelta(days=past_days)
        return QueryBuilder().append(Node, filters={query_date_mode:{'>=': n_days_ago}})
    else:
        raise RuntimeError("Shouldn't get here, node_ids={} past_days={}".format(
                node_ids, past_days))
Ejemplo n.º 23
0
def do_list(past_days, all_nodes, limit):
    """
    Return a list of running workflows on screen
    """
    from aiida.common.utils import str_timedelta
    from aiida.backends.utils import load_dbenv, is_dbenv_loaded
    if not is_dbenv_loaded():
        load_dbenv()
    import aiida.utils.timezone as timezone
    from aiida.orm.mixins import Sealable
    _SEALED_ATTRIBUTE_KEY = 'attributes.{}'.format(Sealable.SEALED_KEY)

    now = timezone.now()

    if all_nodes:
        past_days = None

    table = []
    for res in _build_query(limit=limit,
                            past_days=past_days,
                            order_by={'ctime': 'desc'}):
        calc = res['calculation']
        creation_time = str_timedelta(timezone.delta(calc['ctime'], now),
                                      negative_to_zero=True,
                                      max_num_fields=1)

        table.append([
            calc['id'], creation_time, calc['attributes._process_label'],
            str(calc[_SEALED_ATTRIBUTE_KEY])
        ])

    # Revert table:
    # in this way, I order by 'desc', so I start by the most recent, but then
    # I print this as the las one (like 'verdi calculation list' does)
    # This is useful when 'limit' is set to not None
    table = table[::-1]
    print(
        tabulate(table,
                 headers=["PID", "Creation time", "ProcessLabel", "Sealed"]))
Ejemplo n.º 24
0
def get_workflow_list(
        pk_list=tuple(), user=None, all_states=False, n_days_ago=None):
    """
    Get a list of workflow.
    """

    from aiida.backends.djsite.db.models import DbWorkflow

    if pk_list:
        filters = Q(pk__in=pk_list)
    else:
        filters = Q(user=user)

        if not all_states:
            filters &= ~Q(state=wf_states.FINISHED) & ~Q(state=wf_states.ERROR)
        if n_days_ago:
            t = timezone.now() - datetime.timedelta(days=n_days_ago)
            filters &= Q(ctime__gte=t)

    wf_list = DbWorkflow.objects.filter(filters).order_by('ctime')

    return wf_list
Ejemplo n.º 25
0
    def test_datetime_attribute(self):
        from aiida.utils.timezone import (get_current_timezone, is_naive,
                                          make_aware, now)

        a = Node()

        date = now()

        a._set_attr('some_date', date)
        a.store()

        retrieved = a.get_attr('some_date')

        if is_naive(date):
            date_to_compare = make_aware(date, get_current_timezone())
        else:
            date_to_compare = date

        # Do not compare microseconds (they are not stored in the case of MySQL)
        date_to_compare = date_to_compare.replace(microsecond=0)
        retrieved = retrieved.replace(microsecond=0)

        self.assertEquals(date_to_compare, retrieved)
Ejemplo n.º 26
0
    def get_bands_and_parents_structure(self, args):
        """
        Search for bands and return bands and the closest structure that is a parent of the instance.
        This is the backend independent way, can be overriden for performance reason

        :returns:
            A list of sublists, each latter containing (in order):
                pk as string, formula as string, creation date, bandsdata-label
        """

        import datetime
        from aiida.utils import timezone
        from aiida.orm.querybuilder import QueryBuilder
        from aiida.backends.utils import get_automatic_user
        from aiida.orm.implementation import User
        from aiida.orm.implementation import Group
        from aiida.orm.data.structure import (get_formula, get_symbols_string)
        from aiida.orm.data.array.bands import BandsData
        from aiida.orm.data.structure import StructureData

        qb = QueryBuilder()
        if args.all_users is False:
            au = get_automatic_user()
            user = User(dbuser=au)
            qb.append(User, tag="creator", filters={"email": user.email})
        else:
            qb.append(User, tag="creator")

        bdata_filters = {}
        if args.past_days is not None:
            now = timezone.now()
            n_days_ago = now - datetime.timedelta(days=args.past_days)
            bdata_filters.update({"ctime": {'>=': n_days_ago}})

        qb.append(BandsData,
                  tag="bdata",
                  created_by="creator",
                  filters=bdata_filters,
                  project=["id", "label", "ctime"])

        group_filters = {}

        if args.group_name is not None:
            group_filters.update({"name": {"in": args.group_name}})
        if args.group_pk is not None:
            group_filters.update({"id": {"in": args.group_pk}})
        if group_filters:
            qb.append(Group,
                      tag="group",
                      filters=group_filters,
                      group_of="bdata")

        qb.append(
            StructureData,
            tag="sdata",
            ancestor_of="bdata",
            # We don't care about the creator of StructureData
            project=["id", "attributes.kinds", "attributes.sites"])

        qb.order_by({StructureData: {'ctime': 'desc'}})

        list_data = qb.distinct()

        entry_list = []
        already_visited_bdata = set()

        for [bid, blabel, bdate, sid, akinds, asites] in list_data.all():

            # We process only one StructureData per BandsData.
            # We want to process the closest StructureData to
            # every BandsData.
            # We hope that the StructureData with the latest
            # creation time is the closest one.
            # This will be updated when the QueryBuilder supports
            # order_by by the distance of two nodes.
            if already_visited_bdata.__contains__(bid):
                continue
            already_visited_bdata.add(bid)

            if args.element is not None:
                all_symbols = [_["symbols"][0] for _ in akinds]
                if not any([s in args.element for s in all_symbols]):
                    continue

            if args.element_only is not None:
                all_symbols = [_["symbols"][0] for _ in akinds]
                if not all([s in all_symbols for s in args.element_only]):
                    continue

            # We want only the StructureData that have attributes
            if akinds is None or asites is None:
                continue

            symbol_dict = {}
            for k in akinds:
                symbols = k['symbols']
                weights = k['weights']
                symbol_dict[k['name']] = get_symbols_string(symbols, weights)

            try:
                symbol_list = []
                for s in asites:
                    symbol_list.append(symbol_dict[s['kind_name']])
                formula = get_formula(symbol_list, mode=args.formulamode)
            # If for some reason there is no kind with the name
            # referenced by the site
            except KeyError:
                formula = "<<UNKNOWN>>"
            entry_list.append(
                [str(bid),
                 str(formula),
                 bdate.strftime('%d %b %Y'), blabel])

        return entry_list
Ejemplo n.º 27
0
    def group_list(self, *args):
        """
        Print a list of groups in the DB.
        """
        if not is_dbenv_loaded():
            load_dbenv()

        import datetime
        from aiida.utils import timezone
        from aiida.orm.group import get_group_type_mapping
        from aiida.backends.utils import get_automatic_user
        from tabulate import tabulate

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description='List AiiDA user-defined groups.')
        exclusive_group = parser.add_mutually_exclusive_group()
        exclusive_group.add_argument('-A', '--all-users',
                                     dest='all_users', action='store_true',
                                     help="Show groups for all users, rather than only for the current user")
        exclusive_group.add_argument('-u', '--user', metavar='USER_EMAIL',
                                     help="Add a filter to show only groups belonging to a specific user",
                                     action='store', type=str)
        parser.add_argument('-t', '--type', metavar='TYPE',
                            help="Show groups of a specific type, instead of user-defined groups",
                            action='store', type=str)
        parser.add_argument('-d', '--with-description',
                            dest='with_description', action='store_true',
                            help="Show also the group description")
        parser.add_argument('-p', '--past-days', metavar='N',
                            help="add a filter to show only groups created in the past N days",
                            action='store', type=int)
        parser.add_argument('-s', '--startswith', metavar='STRING',
                            default=None,
                            help="add a filter to show only groups for which the name begins with STRING",
                            action='store', type=str)
        parser.add_argument('-e', '--endswith', metavar='STRING', default=None,
                            help="add a filter to show only groups for which the name ends with STRING",
                            action='store', type=str)
        parser.add_argument('-c', '--contains', metavar='STRING', default=None,
                            help="add a filter to show only groups for which the name contains STRING",
                            action='store', type=str)
        parser.add_argument('-n', '--node', metavar='PK', default=None,
                            help="Show only the groups that contain the node specified by PK",
                            action='store', type=int)
        parser.set_defaults(all_users=False)
        parser.set_defaults(with_description=False)

        args = list(args)
        parsed_args = parser.parse_args(args)

        if parsed_args.all_users:
            user = None
        else:
            if parsed_args.user:
                user = parsed_args.user
            else:
                # By default: only groups of this user
                user = get_automatic_user()

        type_string = ""
        if parsed_args.type is not None:
            try:
                type_string = get_group_type_mapping()[parsed_args.type]
            except KeyError:
                print >> sys.stderr, "Invalid group type. Valid group types are:"
                print >> sys.stderr, ",".join(sorted(
                    get_group_type_mapping().keys()))
                sys.exit(1)

        name_filters = dict((k, getattr(parsed_args, k))
                            for k in ['startswith', 'endswith', 'contains'])

        n_days_ago = None
        if parsed_args.past_days:
            n_days_ago = (timezone.now() -
                          datetime.timedelta(days=parsed_args.past_days))

        # Depending on --nodes option use or not key "nodes"
        from aiida.orm.implementation import Group
        from aiida.orm import load_node

        node_pk = parsed_args.node
        if node_pk is not None:
            try:
                node = load_node(node_pk)
            except NotExistent as e:
                print >> sys.stderr, "Error: {}.".format(e.message)
                sys.exit(1)
            res = Group.query(user=user, type_string=type_string, nodes=node,
                              past_days=n_days_ago, name_filters=name_filters)
        else:
            res = Group.query(user=user, type_string=type_string,
                              past_days=n_days_ago, name_filters=name_filters)

        groups = tuple([(str(g.pk), g.name, len(g.nodes), g.user.email.strip(),
                         g.description) for g in res])


        table = []
        if parsed_args.with_description:
            table_header = \
                ["PK", "GroupName", "NumNodes", "User", "Description"]
            for pk, nam, nod, usr, desc in groups:
                table.append([pk, nam, nod, usr, desc])

        else:
            table_header = ["PK", "GroupName", "NumNodes", "User"]
            for pk, nam, nod, usr, _ in groups:
                table.append([pk, nam, nod, usr])
        print(tabulate(table, headers=table_header))
Ejemplo n.º 28
0
    def group_show(self, *args):
        """
        Show information on a given group. Pass the PK as a parameter.
        """
        if not is_dbenv_loaded():
            load_dbenv()

        import argparse
        from aiida.common.exceptions import NotExistent
        from aiida.orm import Group as G
        from aiida.common.utils import str_timedelta
        from aiida.utils import timezone
        from aiida.common.pluginloader import from_type_to_pluginclassname
        from tabulate import tabulate

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description='Information on a given AiiDA group.')
        parser.add_argument('-r', '--raw',
                            dest='raw', action='store_true',
                            help="Show only a space-separated list of PKs of "
                                 "the calculations in the group")
        parser.add_argument('-u', '--uuid',
                            dest='uuid', action='store_true',
                            help="Show UUIDs together with PKs. Note: if the "
                                 "--raw option is also passed, PKs are not "
                                 "printed, but oly UUIDs.")
        parser.add_argument('GROUP', help="The PK of the group to show")
        parser.set_defaults(raw=False)
        parser.set_defaults(uuid=False)

        args = list(args)
        parsed_args = parser.parse_args(args)

        group = parsed_args.GROUP
        try:
            group_pk = int(group)
        except ValueError:
            group_pk = None
            group_name = group

        if group_pk is not None:
            try:
                group = G(dbgroup=group_pk)
            except NotExistent as e:
                print >> sys.stderr, "Error: {}.".format(e.message)
                sys.exit(1)
        else:
            try:
                group = G.get_from_string(group_name)
            except NotExistent as e:
                print >> sys.stderr, "Error: {}.".format(e.message)
                sys.exit(1)

        group_pk = group.pk
        group_name = group.name

        if parsed_args.raw:
            if parsed_args.uuid:
                print " ".join(str(_.uuid) for _ in group.nodes)
            else:
                print " ".join(str(_.pk) for _ in group.nodes)
        else:
            type_string = group.type_string
            desc = group.description
            now = timezone.now()

            table = []
            table.append(["Group name", group.name])
            table.append(["Group type",
                          type_string if type_string else "<user-defined>"])
            table.append(["Group description",
                          desc if desc else "<no description>"])
            print(tabulate(table))

            table = []
            header = []
            if parsed_args.uuid:
                header.append('UUID')
            header.extend(['PK', 'Type', 'Created'])
            print "# Nodes:"
            for n in group.nodes:
                row = []
                if parsed_args.uuid:
                    row.append(n.uuid)
                row.append(n.pk)
                row.append(from_type_to_pluginclassname(n.dbnode.type).
                           rsplit(".", 1)[1])

                row.append(str_timedelta(now - n.ctime, short=True,
                                         negative_to_zero=True))
                table.append(row)
            print(tabulate(table, headers=header))
Ejemplo n.º 29
0
def get_workflow_info(w, tab_size=2, short=False, pre_string="", depth=16):
    """
    Return a string with all the information regarding the given workflow and
    all its calculations and subworkflows.
    This is a recursive function (to print all subworkflows info as well).

    :param w: a DbWorkflow instance
    :param tab_size: number of spaces to use for the indentation
    :param short: if True, provide a shorter output (only total number of
        calculations, rather than the state of each calculation)
    :param pre_string: string appended at the beginning of each line
    :param depth: the maximum depth level the recursion on sub-workflows will
                  try to reach (0 means we stay at the step level and don't go
                  into sub-workflows, 1 means we go down to one step level of
                  the sub-workflows, etc.)

    :return lines: list of lines to be outputed
    """
    # Note: pre_string becomes larger at each call of get_workflow_info on the
    #       subworkflows: pre_string -> pre_string + "|" + " "*(tab_size-1))
    # TODO SP: abstract the dependence on DbWorkflow

    from aiida.backends.djsite.db.models import DbWorkflow

    if tab_size < 2:
        raise ValueError("tab_size must be > 2")

    now = timezone.now()

    lines = []

    if w.label:
        wf_labelstring = "'{}', ".format(w.label)
    else:
        wf_labelstring = ""

    lines.append(pre_string)  # put an empty line before any workflow
    lines.append(pre_string + "+ Workflow {} ({}pk: {}) is {} [{}]".format(
        w.module_class, wf_labelstring, w.pk, w.state,
        str_timedelta(now - w.ctime, negative_to_zero=True)))

    # print information on the steps only if depth is higher than 0
    if depth > 0:

        # order all steps by time and  get all the needed values
        steps_and_subwf_pks = w.steps.all().order_by(
            'time', 'sub_workflows__ctime',
            'calculations__ctime').values_list('pk', 'sub_workflows__pk',
                                               'calculations', 'name',
                                               'nextcall', 'state')
        # get the list of step pks (distinct), preserving the order
        steps_pk = []
        for item in steps_and_subwf_pks:
            if item[0] not in steps_pk:
                steps_pk.append(item[0])

        # build a dictionary with all the infos for each step pk
        subwfs_of_steps = {}
        for step_pk, subwf_pk, calc_pk, name, nextcall, state in steps_and_subwf_pks:
            if step_pk not in subwfs_of_steps.keys():
                subwfs_of_steps[step_pk] = {
                    'name': name,
                    'nextcall': nextcall,
                    'state': state,
                    'subwf_pks': [],
                    'calc_pks': [],
                }
            if subwf_pk:
                subwfs_of_steps[step_pk]['subwf_pks'].append(subwf_pk)
            if calc_pk:
                subwfs_of_steps[step_pk]['calc_pks'].append(calc_pk)

        # TODO SP: abstract this
        # get all subworkflows for all steps
        wflows = DbWorkflow.objects.filter(
            parent_workflow_step__in=steps_pk)  # .order_by('ctime')
        # dictionary mapping pks into workflows
        workflow_mapping = {_.pk: _ for _ in wflows}

        # get all calculations for all steps
        calcs = JobCalculation.query(
            workflow_step__in=steps_pk)  # .order_by('ctime')
        # dictionary mapping pks into calculations
        calc_mapping = {_.pk: _ for _ in calcs}

        for step_pk in steps_pk:
            lines.append(
                pre_string + "|" + '-' * (tab_size - 1) +
                "* Step: {0} [->{1}] is {2}".format(
                    subwfs_of_steps[step_pk]['name'], subwfs_of_steps[step_pk]
                    ['nextcall'], subwfs_of_steps[step_pk]['state']))

            calc_pks = subwfs_of_steps[step_pk]['calc_pks']

            # print calculations only if it is not short
            if short:
                lines.append(pre_string + "|" + " " * (tab_size - 1) +
                             "| [{0} calculations]".format(len(calc_pks)))
            else:
                for calc_pk in calc_pks:
                    c = calc_mapping[calc_pk]
                    calc_state = c.get_state()
                    if c.label:
                        labelstring = "'{}', ".format(c.label)
                    else:
                        labelstring = ""

                    if calc_state == calc_states.WITHSCHEDULER:
                        sched_state = c.get_scheduler_state()
                        if sched_state is None:
                            remote_state = "(remote state still unknown)"
                        else:
                            last_check = c._get_scheduler_lastchecktime()
                            if last_check is not None:
                                when_string = " {}".format(
                                    str_timedelta(now - last_check,
                                                  short=True,
                                                  negative_to_zero=True))
                                verb_string = "was "
                            else:
                                when_string = ""
                                verb_string = ""
                            remote_state = " ({}{}{})".format(
                                verb_string, sched_state, when_string)
                    else:
                        remote_state = ""
                    lines.append(
                        pre_string + "|" + " " * (tab_size - 1) +
                        "| Calculation ({}pk: {}) is {}{}".format(
                            labelstring, calc_pk, calc_state, remote_state))

            ## SubWorkflows
            for subwf_pk in subwfs_of_steps[step_pk]['subwf_pks']:
                subwf = workflow_mapping[subwf_pk]
                lines.extend(
                    get_workflow_info(subwf,
                                      short=short,
                                      tab_size=tab_size,
                                      pre_string=pre_string + "|" + " " *
                                      (tab_size - 1),
                                      depth=depth - 1))

            lines.append(pre_string + "|")

    return lines
Ejemplo n.º 30
0
    def calculation_cleanworkdir(self, *args):
        """
        Clean the working directory of calculations by removing all the content of the
        associated RemoteFolder node. Calculations can be identified by pk with the -k flag
        or by specifying limits on the modification times with -p/-o flags
        """
        import argparse

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description="""
                Clean all content of all output remote folders of calculations,
                passed as a list of pks, or identified by modification time.

                If a list of calculation PKs is not passed with the -k option, one or both
                of the -p and -o options has to be specified. If both are specified, a logical
                AND is done between the two, i.e. the calculations that will be cleaned have been
                modified AFTER [-p option] days from now but BEFORE [-o option] days from now.
                Passing the -f option will prevent the confirmation dialog from being prompted.
                """
        )
        parser.add_argument(
            '-k', '--pk', metavar='PK', type=int, nargs='+', dest='pk',
            help='The principal key (PK) of the calculations of which to clean the work directory'
        )
        parser.add_argument(
            '-f', '--force', action='store_true',
            help='Force the cleaning (no prompt)'
        )
        parser.add_argument(
            '-p', '--past-days', metavar='N', type=int, action='store', dest='past_days',
            help='Include calculations that have been modified within the last N days', 
        )
        parser.add_argument(
            '-o', '--older-than', metavar='N', type=int, action='store', dest='older_than',
            help='Include calculations that have been modified more than N days ago',
        )
        parser.add_argument(
            '-c', '--computers', metavar='label', nargs='+', type=str, action='store', dest='computer',
            help='Include only calculations that were ran on these computers'
        )

        if not is_dbenv_loaded():
            load_dbenv()

        from aiida.backends.utils import get_automatic_user
        from aiida.backends.utils import get_authinfo
        from aiida.common.utils import query_yes_no
        from aiida.orm.computer import Computer as OrmComputer
        from aiida.orm.user import User as OrmUser
        from aiida.orm.calculation import Calculation as OrmCalculation
        from aiida.orm.querybuilder import QueryBuilder
        from aiida.utils import timezone
        import datetime

        parsed_args = parser.parse_args(args)

        # If a pk is given then the -o & -p options should not be specified
        if parsed_args.pk is not None:
            if (parsed_args.past_days is not None or parsed_args.older_than is not None):
                print("You cannot specify both a list of calculation pks and the -p or -o options")
                return

        # If no pk is given then at least one of the -o & -p options should be specified
        else:
            if (parsed_args.past_days is None and parsed_args.older_than is None):
                print("You should specify at least a list of calculations or the -p, -o options")
                return

        qb_user_filters = dict()
        user = OrmUser(dbuser=get_automatic_user())
        qb_user_filters["email"] = user.email

        qb_computer_filters = dict()
        if parsed_args.computer is not None:
            qb_computer_filters["name"] = {"in": parsed_args.computer}

        qb_calc_filters = dict()
        if parsed_args.past_days is not None:
            pd_ts = timezone.now() - datetime.timedelta(days=parsed_args.past_days)
            qb_calc_filters["mtime"] = {">": pd_ts}
        if parsed_args.older_than is not None:
            ot_ts = timezone.now() - datetime.timedelta(days=parsed_args.older_than)
            qb_calc_filters["mtime"] = {"<": ot_ts}
        if parsed_args.pk is not None:
            print("parsed_args.pk: ", parsed_args.pk)
            qb_calc_filters["id"] = {"in": parsed_args.pk}

        qb = QueryBuilder()
        qb.append(OrmCalculation, tag="calc",
                  filters=qb_calc_filters,
                  project=["id", "uuid", "attributes.remote_workdir"])
        qb.append(OrmComputer, computer_of="calc", tag="computer",
                  project=["*"],
                  filters=qb_computer_filters)
        qb.append(OrmUser, creator_of="calc", tag="user",
                  project=["*"],
                  filters=qb_user_filters)

        no_of_calcs = qb.count()
        if no_of_calcs == 0:
            print("No calculations found with the given criteria.")
            return

        print("Found {} calculations with the given criteria.".format(
            no_of_calcs))

        if not parsed_args.force:
            if not query_yes_no("Are you sure you want to clean the work "
                                "directory?", "no"):
                return

        # get the uuids of all calculations matching the filters
        calc_list_data = qb.dict()

        # get all computers associated to the calc uuids above, and load them
        # we group them by uuid to avoid computer duplicates
        comp_uuid_to_computers = {_["computer"]["*"].uuid: _["computer"]["*"] for _ in calc_list_data}

        # now build a dictionary with the info of folders to delete
        remotes = {}
        for computer in comp_uuid_to_computers.values():
            # initialize a key of info for a given computer
            remotes[computer.name] = {'transport': get_authinfo(
                computer=computer, aiidauser=user._dbuser).get_transport(),
                                      'computer': computer,
            }

            # select the calc pks done on this computer
            this_calc_pks = [_["calc"]["id"] for _ in calc_list_data
                             if _["computer"]["*"].id == computer.id]

            this_calc_uuids = [unicode(_["calc"]["uuid"])
                               for _ in calc_list_data
                               if _["computer"]["*"].id == computer.id]

            remote_workdirs = [_["calc"]["attributes.remote_workdir"]
                               for _ in calc_list_data
                               if _["calc"]["id"] in this_calc_pks
                               if _["calc"]["attributes.remote_workdir"]
                               is not None]

            remotes[computer.name]['remotes'] = remote_workdirs
            remotes[computer.name]['uuids'] = this_calc_uuids

        # now proceed to cleaning
        for computer, dic in remotes.iteritems():
            print("Cleaning the work directory on computer {}.".format(computer))
            counter = 0
            t = dic['transport']
            with t:
                remote_user = remote_user = t.whoami()
                aiida_workdir = dic['computer'].get_workdir().format(
                    username=remote_user)

                t.chdir(aiida_workdir)
                # Hardcoding the sharding equal to 3 parts!
                existing_folders = t.glob('*/*/*')

                folders_to_delete = [i for i in existing_folders if
                                     i.replace("/", "") in dic['uuids']]

                for folder in folders_to_delete:
                    t.rmtree(folder)
                    counter += 1
                    if counter % 20 == 0 and counter > 0:
                        print("Deleted work directories: {}".format(counter))

            print("{} remote folder(s) cleaned.".format(counter))