Пример #1
0
    def test_multiple_node_creation(self):
        """
        This test checks that a node is not added automatically to the session
        (and subsequently committed) when a user is in the session.
        It tests the fix for the issue #234
        """
        from aiida.backends.sqlalchemy.models.node import DbNode
        from aiida.common.utils import get_new_uuid
        from aiida.backends.utils import get_automatic_user

        import aiida.backends.sqlalchemy

        # Get the automatic user
        user = get_automatic_user()
        # Create a new node but don't add it to the session
        node_uuid = get_new_uuid()
        DbNode(user=user, uuid=node_uuid, type=None)

        session = aiida.backends.sqlalchemy.get_scoped_session()

        # Query the session before commit
        res = session.query(DbNode.uuid).filter(
            DbNode.uuid == node_uuid).all()
        self.assertEqual(len(res), 0, "There should not be any nodes with this"
                                      "UUID in the session/DB.")

        # Commit the transaction
        session.commit()

        # Check again that the node is not in the DB
        res = session.query(DbNode.uuid).filter(
            DbNode.uuid == node_uuid).all()
        self.assertEqual(len(res), 0, "There should not be any nodes with this"
                                      "UUID in the session/DB.")

        # Get the automatic user
        user = get_automatic_user()
        # Create a new node but now add it to the session
        node_uuid = get_new_uuid()
        node = DbNode(user=user, uuid=node_uuid, type=None)
        session.add(node)

        # Query the session before commit
        res = session.query(DbNode.uuid).filter(
            DbNode.uuid == node_uuid).all()
        self.assertEqual(len(res), 1,
                         "There should be a node in the session/DB with the "
                         "UUID {}".format(node_uuid))

        # Commit the transaction
        session.commit()

        # Check again that the node is in the db
        res = session.query(DbNode.uuid).filter(
            DbNode.uuid == node_uuid).all()
        self.assertEqual(len(res), 1,
                         "There should be a node in the session/DB with the "
                         "UUID {}".format(node_uuid))
Пример #2
0
def kill_all():
    w_list = DbWorkflow.query.filter(
        DbWorkflow.user == get_automatic_user(),
        DbWorkflow.state != wf_states.FINISHED).all()

    for w in w_list:
        Workflow.get_subclass_from_uuid(w.uuid).kill()
Пример #3
0
    def get_step(self, step_method):
        """
        Retrieves by name a step from the Workflow.
        :param step_method: a string with the name of the step to retrieve or a method
        :raise: ObjectDoesNotExist: if there is no step with the specific name.
        :return: a DbWorkflowStep object.
        """
        if isinstance(step_method, basestring):
            step_method_name = step_method
        else:
            if not getattr(step_method, "is_wf_step"):
                raise AiidaException(
                    "Cannot get step calculations from a method not decorated as Workflow method"
                )

            step_method_name = step_method.wf_step_name

        if step_method_name == wf_exit_call:
            raise InternalError(
                "Cannot query a step with name {0}, reserved string".format(
                    step_method_name))

        step_list = self.dbworkflowinstance.steps
        step = [
            _ for _ in step_list
            if _.name == step_method_name and _.user == get_automatic_user()
        ]
        try:
            return step[0]
        except IndexError:
            return None
Пример #4
0
    def _prepare_group_for_upload(cls,
                                  group_name,
                                  group_description=None,
                                  dry_run=False):
        """Prepare a (possibly new) group to upload a POTCAR family to."""
        if not dry_run:
            group, group_created = Group.get_or_create(
                name=group_name, type_string=cls.potcar_family_type_string)
        else:
            group = cls.get_potcar_group(group_name)
            group_created = bool(not group)
            if not group:
                group = Group(name=group_name)

        if group.user != get_automatic_user():
            raise UniquenessError(
                'There is already a PotcarFamily group with name {}, but it belongs to user {}, therefore you cannot modify it'
                .format(group_name, group.user.email))

        if group_description:
            group.description = group_description
        elif group_created:
            raise ValueError(
                'A new PotcarGroup {} should be created but no description was given!'
                .format(group_name))

        return group
Пример #5
0
    def _get_codes(self, input_plugin):
        from aiida.orm.querybuilder import QueryBuilder
        from aiida.orm import Code, Computer
        from aiida.backends.utils import get_automatic_user

        current_user = get_automatic_user()

        qb = QueryBuilder()
        qb.append(Computer,
                  filters={'enabled': True},
                  project=['*'],
                  tag='computer')
        qb.append(Code,
                  filters={
                      'attributes.input_plugin': {
                          '==': input_plugin
                      },
                      'extras.hidden': {
                          "~==": True
                      }
                  },
                  project=['*'],
                  has_computer='computer')
        results = qb.all()

        # only codes on computers configured for the current user
        results = [r for r in results if r[0].is_user_configured(current_user)]

        codes = {"{}@{}".format(r[1].label, r[0].name): r[1] for r in results}
        return codes
Пример #6
0
    def test_comments(self):
        # This is the best way to compare dates with the stored ones, instead of
        # directly loading datetime.datetime.now(), or you can get a
        # "can't compare offset-naive and offset-aware datetimes" error
        user = get_automatic_user()
        a = Node()
        with self.assertRaises(ModificationNotAllowed):
            a.add_comment('text', user=user)
        self.assertEquals(a.get_comments(), [])
        a.store()
        before = timezone.now()
        time.sleep(1)  # I wait 1 second because MySql time precision is 1 sec
        a.add_comment('text', user=user)
        a.add_comment('text2', user=user)
        time.sleep(1)
        after = timezone.now()

        comments = a.get_comments()

        times = [i['mtime'] for i in comments]
        for t in times:
            self.assertTrue(t > before)
            self.assertTrue(t < after)

        self.assertEquals([(i['user__email'], i['content']) for i in comments],
                          [
                              (user.email, 'text'),
                              (user.email, 'text2'),
                          ])
Пример #7
0
    def comment_remove(self, *args):
        """
        Remove comments. The user can only remove its own comments
        """
        # Note: in fact, the user can still manually delete any comment
        import argparse
        from aiida.backends.utils import get_automatic_user

        if not is_dbenv_loaded():
            load_dbenv()
        user = get_automatic_user()

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description='Remove comments of a node.')
        parser.add_argument('pk', metavar='PK', type=int,
                            help='The pk (an integer) of the node')
        parser.add_argument('id', metavar='ID', type=int, default=None, nargs='?',
                            help='#ID of the comment to be removed from node #PK')
        parser.add_argument('-a', '--all', action='store_true', default=False,
                            help='If used, deletes all the comments of the active user attached to the node')
        parsed_args = parser.parse_args(args)

        if parsed_args.id is None and not parsed_args.all:
            print "One argument between -a and ID must be provided"
            sys.exit(1)
        if parsed_args.id is not None and parsed_args.all:
            print "Only one between -a and ID should be provided"
            sys.exit(1)

        node = load_node(parsed_args.pk)

        allowed_trues = ['1', 't', 'true', 'y', 'yes']
        if parsed_args.all:
            sys.stdout.write("Delete all comments of user {}? ".format(user))
            inpread = sys.stdin.readline()
            do_I_delete = True if inpread.strip().lower() in allowed_trues else False

            if not do_I_delete:
                print "Not deleting comment. Aborting."
                sys.exit(1)
            else:
                comments = node.get_comment_obj(user=user)
                for comment in comments:
                    comment.delete()
                print("Deleted {} comments.".format(len(comments)))

        else:
            sys.stdout.write("Delete comment? ")
            inpread = sys.stdin.readline()
            do_I_delete = True if inpread.strip().lower() in allowed_trues else False

            if not do_I_delete:
                print "Not deleting comment. Aborting."
                sys.exit(1)
            else:
                from aiida.orm.implementation import Comment as CommentOrm
                c = CommentOrm(id=parsed_args.id, user=user)
                c.delete()
Пример #8
0
    def test_query(self):
        """
        Test if queries are working
        """

        g1 = Group(name='testquery1').store()
        g2 = Group(name='testquery2').store()

        n1 = Node().store()
        n2 = Node().store()
        n3 = Node().store()
        n4 = Node().store()

        g1.add_nodes([n1, n2])
        g2.add_nodes([n1, n3])

        newuser = DbUser(email='*****@*****.**', password='').save()
        g3 = Group(name='testquery3', user=newuser).store()

        # I should find it
        g1copy = Group.get(uuid=g1.uuid)
        self.assertEquals(g1.pk, g1copy.pk)

        # Try queries
        res = Group.query(nodes=n4)
        self.assertEquals([_.pk for _ in res], [])

        res = Group.query(nodes=n1)
        self.assertEquals([_.pk for _ in res], [_.pk for _ in [g1, g2]])

        res = Group.query(nodes=n2)
        self.assertEquals([_.pk for _ in res], [_.pk for _ in [g1]])

        # I try to use 'get' with zero or multiple results
        with self.assertRaises(NotExistent):
            Group.get(nodes=n4)
        with self.assertRaises(MultipleObjectsError):
            Group.get(nodes=n1)

        self.assertEquals(Group.get(nodes=n2).pk, g1.pk)

        # Query by user
        res = Group.query(user=newuser)
        self.assertEquals(set(_.pk for _ in res), set(_.pk for _ in [g3]))

        # Same query, but using a string (the username=email) instead of
        # a DbUser object
        res = Group.query(user=newuser.email)
        self.assertEquals(set(_.pk for _ in res), set(_.pk for _ in [g3]))

        res = Group.query(user=get_automatic_user())
        self.assertEquals(set(_.pk for _ in res), set(_.pk for _ in [g1, g2]))

        # Final cleanup
        g1.delete()
        g2.delete()
        newuser.delete()
Пример #9
0
def get_automatic_user():
    try:
        from aiida.backends.utils import get_automatic_user
        automatic_user = get_automatic_user()
    except ImportError:
        from aiida.orm.backend import construct_backend
        backend = construct_backend()
        automatic_user = backend.users.get_automatic_user()
    return automatic_user
Пример #10
0
    def comment_update(self, *args):
        """
        Update a comment
        """
        import argparse
        from aiida.backends.utils import get_automatic_user

        if not is_dbenv_loaded():
            load_dbenv()
        user = get_automatic_user()

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description='Add a comment to a node in the database.')
        parser.add_argument('pk',
                            metavar='PK',
                            type=int,
                            help='The pk (an integer) of the node')
        parser.add_argument('id',
                            metavar='ID',
                            type=int,
                            help='Identify the comment to update by ID')
        parser.add_argument(
            '-c',
            '--comment',
            type=str,
            default=None,
            help='The comment (a string) to be added to the node')
        parsed_args = parser.parse_args(args)

        # read the comment from terminal if it is not on command line
        if parsed_args.comment is None:
            print "Write below the comment that you want to save in the database."
            print "   # This is a multiline input, press CTRL+D on a"
            print "   # empty line when you finish"
            try:
                newlines = []
                while True:
                    input_txt = raw_input()
                    if input_txt.strip() == '?':
                        print "\n".join([
                            "  > {}".format(descl)
                            for descl in "HELP: {}".format(desc).split('\n')
                        ])
                        continue
                    else:
                        newlines.append(input_txt)
            except EOFError:
                # Ctrl+D pressed: end of input.
                pass
            the_comment = "\n".join(newlines)
        else:
            the_comment = parsed_args.comment

        node = load_node(parsed_args.pk)
        node._update_comment(the_comment, parsed_args.id, user)
Пример #11
0
def get_current_user():
    """Get current user backwards compatibly with aiida-core <= 0.12.1."""
    current_user = None
    if backend_obj_users():
        from aiida.orm.backend import construct_backend  # pylint: disable=no-name-in-module
        backend = construct_backend()
        current_user = backend.users.get_automatic_user()
    else:
        from aiida.backends.utils import get_automatic_user  # pylint: disable=no-name-in-module
        current_user = get_automatic_user()
    return current_user
Пример #12
0
    def comment_show(self, *args):
        """
        Show the comments of a node
        """
        import argparse
        from aiida.backends.utils import get_automatic_user

        if not is_dbenv_loaded():
            load_dbenv()
        user = get_automatic_user()

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description='Show the comments of a node in the database.')
        parser.add_argument('-u', '--user', type=str, default=None,
                            help='Show only the comments of a given user (optional).')
        parser.add_argument('pk', metavar='PK', type=int,
                            help='The pk (an integer) of the node.')
        parser.add_argument('id', metavar='ID', type=int, default=None, nargs='?',
                            help='The id (an integer) of the comment.')
        # Note that this is a false description, I'm using the DBComment.pk
        parsed_args = parser.parse_args(args)

        node = load_node(parsed_args.pk)
        all_comments = node.get_comments(pk=parsed_args.id)

        if parsed_args.user is not None:
            to_print = [i for i in all_comments if i['user__email'] == parsed_args.user]
            if not to_print:
                print "Nothing found for user '{}'.".format(parsed_args.user)
                print "Valid users found for Node {} are: {}.".format(parsed_args.pk,
                                                                      ", ".join(set(
                                                                          ["'" + i['user__email'] + "'" for i in
                                                                           all_comments])))
        else:
            to_print = all_comments

        if parsed_args.id is not None:
            to_print = [i for i in to_print if i['pk'] == parsed_args.id]

        for i in to_print:
            print "***********************************************************"
            print "Comment of '{}' on {}".format(i['user__email'],
                                                 i['ctime'].strftime("%Y-%m-%d %H:%M"))
            print "ID: {}. Last modified on {}".format(i['pk'],
                                                       i['mtime'].strftime("%Y-%m-%d %H:%M"))
            print ""
            print "{}".format(i['content'])
            print ""

        # If there is nothing to print, print a message
        if not to_print:
            print "No comment found."
Пример #13
0
        def wrapper(cls, *args, **kwargs):
            # Store the workflow at the first step executed
            if cls._to_be_stored:
                cls.store()

            if len(args) > 0:
                raise AiidaException(
                    "A step method cannot have any argument, use add_attribute to the workflow"
                )

            # If a method is launched and the step is RUNNING or INITIALIZED we should stop
            if cls.has_step(wrapped_method) and \
                    not (cls.get_step(wrapped_method).state == wf_states.ERROR or \
                                     cls.get_step(wrapped_method).state == wf_states.SLEEP or \
                                     cls.get_step(wrapped_method).nextcall == wf_default_call or \
                                     cls.get_step(wrapped_method).nextcall == wrapped_method \
                         #cls.has_step(wrapped_method) \
                    ):
                raise AiidaException(
                    "The step {0} has already been initialized, cannot change this outside the parent workflow !"
                    .format(wrapped_method))

            # If a method is launched and the step is halted for ERROR, then clean the step and re-launch
            if cls.has_step(wrapped_method) and \
                    ( cls.get_step(wrapped_method).state == wf_states.ERROR or \
                                  cls.get_step(wrapped_method).state == wf_states.SLEEP ):

                for w in cls.get_step(wrapped_method).get_sub_workflows():
                    w.kill()
                cls.get_step(wrapped_method).remove_sub_workflows()

                for c in cls.get_step(wrapped_method).get_calculations():
                    c.kill()
                cls.get_step(wrapped_method).remove_calculations()

                #self.get_steps(wrapped_method).set_nextcall(wf_exit_call)

            method_step, created = cls.dbworkflowinstance._get_or_create_step(
                name=wrapped_method, user=get_automatic_user())

            try:
                fun(cls)
            except:
                exc_type, exc_value, exc_traceback = sys.exc_info()
                cls.append_to_report(
                    "ERROR ! This workflow got an error in the {0} method, we report down the stack trace"
                    .format(wrapped_method))
                cls.append_to_report("full traceback: {0}".format(
                    traceback.format_exc()))
                method_step.set_state(wf_states.ERROR)
                cls.set_state(wf_states.ERROR)
            return None
Пример #14
0
    def __init__(self, **kwargs):
        super(Node, self).__init__()

        self._temp_folder = None

        dbnode = kwargs.pop('dbnode', None)

        # Set the internal parameters
        # Can be redefined in the subclasses
        self._init_internal_params()

        if dbnode is not None:
            if not isinstance(dbnode, DbNode):
                raise TypeError("dbnode is not a DbNode instance")
            if dbnode.id is None:
                raise ValueError("If cannot load an aiida.orm.Node instance "
                                 "from an unsaved DbNode object.")
            if kwargs:
                raise ValueError("If you pass a dbnode, you cannot pass any "
                                 "further parameter")

            # If I am loading, I cannot modify it
            self._to_be_stored = False

            self._dbnode = dbnode

            # If this is changed, fix also the importer
            self._repo_folder = RepositoryFolder(section=self._section_name,
                                                 uuid=self._dbnode.uuid)

        else:
            # TODO: allow to get the user from the parameters
            user = get_automatic_user()

            self._dbnode = DbNode(user=user,
                                  uuid=get_new_uuid(),
                                  type=self._plugin_type_string)

            self._to_be_stored = True

            # As creating the temp folder may require some time on slow
            # filesystems, we defer its creation
            self._temp_folder = None
            # Used only before the first save
            self._attrs_cache = {}
            # If this is changed, fix also the importer
            self._repo_folder = RepositoryFolder(section=self._section_name,
                                                 uuid=self.uuid)

            # Automatically set all *other* attributes, if possible, otherwise
            # stop
            self._set_with_defaults(**kwargs)
Пример #15
0
    def get_or_create_famgroup(cls, famname):
        """Returns a PAW family group, creates it if it didn't exists"""
        from aiida.orm import Group
        from aiida.backends.utils import get_automatic_user

        group, group_created = Group.get_or_create(name=famname,
                                                   type_string=cls.group_type)

        if group.user != get_automatic_user():
            raise UniquenessError("There is already a UpfFamily group "
                                  "with name {}, but it belongs to user {},"
                                  " therefore you cannot modify it".format(
                                      famname, group.user.email))
        return group, group_created
Пример #16
0
    def run_listislands(self, *args):
        """
        List all AiiDA nodes, that have no parents and children.
        """
        load_dbenv()
        from django.db.models import Q
        from aiida.orm.node import Node
        from aiida.backends.utils import get_automatic_user

        q_object = Q(user=get_automatic_user())
        q_object.add(Q(parents__isnull=True), Q.AND)
        q_object.add(Q(children__isnull=True), Q.AND)

        node_list = Node.query(q_object).distinct().order_by('ctime')
        print "ID\tclass"
        for node in node_list:
            print "{}\t{}".format(node.pk, node.__class__.__name__)
Пример #17
0
def get_code_options(plugin_classes):
    """
    Return AiiDA codes using a specific set of plugins
    
    :param plugin_classes: a dictionary of the type
      {'pw': 'quantumespresso.pw', 'ph': 'quantumespresso.ph'}
      where the key is a label and the value is the plugin to check for.
      It will return the set of codes that exist on the same machine.
    """
    from aiida.orm.querybuilder import QueryBuilder
    from aiida.orm import Code, Computer
    from aiida.backends.utils import get_automatic_user

    current_user = get_automatic_user()

    qb = QueryBuilder()
    qb.append(Computer,
              filters={'enabled': True},
              project=['*'],
              tag='computer')
    ordered_tags = []
    for tag, plugin_class in plugin_classes.iteritems():
        ordered_tags.append(tag)
        qb.append(Code,
                  filters={
                      'attributes.input_plugin': {
                          '==': plugin_class
                      },
                      'extras.hidden': {
                          "~==": True
                      }
                  },
                  project='label',
                  tag='{}code'.format(tag),
                  has_computer='computer')
    all_results = qb.all()
    # Filter in python only the ones that are actually user_configured
    # codeset[0] is the computer
    # codeset[1:] are the various code names, depending on the ones asked in input
    return [{
        tag: "{}@{}".format(codename, codeset[0].name)
        for codename, tag in zip(codeset[1:], ordered_tags)
    } for codeset in all_results if codeset[0].is_user_configured(current_user)
            and codeset[0].is_user_enabled(current_user)]
Пример #18
0
def test_full(new_database, new_workdir):
    from aiida_crystal17.calculations.cry_main_immigrant import CryMainImmigrantCalculation

    computer = tests.get_computer(workdir=new_workdir, configure=True)
    code = tests.get_code(entry_point='crystal17.main', computer=computer)

    inpath = os.path.join(TEST_DIR, "input_files", 'nio_sto3g_afm.crystal.d12')
    outpath = os.path.join(TEST_DIR, "output_files",
                           'nio_sto3g_afm.crystal.out')

    shutil.copy(inpath, new_workdir)
    shutil.copy(outpath, new_workdir)

    resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 16}

    calc = CryMainImmigrantCalculation(
        computer=computer,
        resources=resources,
        remote_workdir=new_workdir,
        input_file_name='nio_sto3g_afm.crystal.d12',
        output_file_name='nio_sto3g_afm.crystal.out')
    calc.use_code(code)

    try:
        # aiida v0.12
        from aiida.backends.utils import get_authinfo, get_automatic_user
        authinfo = get_authinfo(computer=computer,
                                aiidauser=get_automatic_user())
        transport = authinfo.get_transport()
    except ImportError:
        # aiida v1
        transport = computer.get_transport()

    with transport as open_transport:
        calc.create_input_nodes(open_transport)
        calc.prepare_for_retrieval_and_parsing(open_transport)

    assert set(calc.get_inputs_dict().keys()) == set(
        ['basis_O', 'parameters', 'settings', 'basis_Ni', 'code', 'structure'])
Пример #19
0
    def get_paw_groups(cls, elements=None, symbols=None, user=None):
        """Find all paw groups containing potentials with the given attributes"""
        from aiida.orm import Group
        from aiida.backends.utils import get_automatic_user
        params = {
            'type_string': cls.group_type,
            'node_attributes': {
                'element': elements,
                'symbol': symbols
            }
        }
        if user:
            params['user'] = user
        else:
            params['user'] = get_automatic_user()

        res = Group.query(**params)
        groups = [(g.name, g) for g in res]
        # Sort by name
        groups.sort()
        # Return the groups, without name
        return [i[1] for i in groups]
Пример #20
0
    def __init__(self, **kwargs):
        given_dbgroup = kwargs.pop('dbgroup', None)

        if given_dbgroup is not None:

            # Check that there is no other parameter passed besides dbgroup
            if kwargs:
                raise ValueError("If you pass a dbgroups, you cannot pass any "
                                 "further parameter")

            if isinstance(given_dbgroup, (int, long)):
                dbgroup_res = DbGroup.query.filter_by(id=given_dbgroup).first()
                if not dbgroup_res:
                    raise NotExistent("Group with pk={} does not exist".format(
                        given_dbgroup))
                self._dbgroup = dbgroup_res
            elif isinstance(given_dbgroup, DbGroup):
                self._dbgroup = given_dbgroup

        else:
            name = kwargs.pop('name', None)
            if name is None:
                raise ValueError("You have to specify a group name")
            group_type = kwargs.pop('type_string',
                                    "")  # By default, an user group
            user = kwargs.pop('user', get_automatic_user())
            description = kwargs.pop('description', "")

            if kwargs:
                raise ValueError("Too many parameters passed to Group, the "
                                 "unknown parameters are: {}".format(", ".join(
                                     kwargs.keys())))

            self._dbgroup = DbGroup(name=name,
                                    description=description,
                                    user=user,
                                    type=group_type)
Пример #21
0
    def computer_list(self, *args):
        """
        List available computers
        """
        import argparse

        if not is_dbenv_loaded():
            load_dbenv()
        from aiida.orm.computer import Computer as AiiDAOrmComputer
        from aiida.backends.utils import get_automatic_user

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description='List the computers in the database.')
        # The default states are those that are shown if no option is given
        parser.add_argument(
            '-C',
            '--color',
            action='store_true',
            help="Use colors to help visualizing the different categories",
        )
        parser.add_argument(
            '-o',
            '--only-usable',
            action='store_true',
            help="Show only computers that are usable (i.e., "
            "configured for the given user and enabled)",
        )
        parser.add_argument(
            '-p',
            '--parsable',
            action='store_true',
            help="Show only the computer names, one per line, "
            "without any other information or string.",
        )
        parser.add_argument(
            '-a',
            '--all',
            action='store_true',
            help="Show also disabled or unconfigured computers",
        )
        parser.set_defaults(also_disabled=False)
        parsed_args = parser.parse_args(args)
        use_colors = parsed_args.color
        only_usable = parsed_args.only_usable
        parsable = parsed_args.parsable
        all_comps = parsed_args.all

        computer_names = self.get_computer_names()

        if use_colors:
            color_id = 90  # Dark gray
            color_id = None  # Default color
            if color_id is not None:
                start_color = "\x1b[{}m".format(color_id)
                end_color = "\x1b[0m"
            else:
                start_color = ""
                end_color = ""
        else:
            start_color = ""
            end_color = ""

        if not parsable:
            print "{}# List of configured computers:{}".format(
                start_color, end_color)
            print(
                "{}# (use 'verdi computer show COMPUTERNAME' "
                "to see the details){}".format(start_color, end_color))
        if computer_names:
            for name in sorted(computer_names):
                computer = AiiDAOrmComputer.get(name)

                # color_id = 90 # Dark gray
                # color_id = 34 # Blue

                is_configured = computer.is_user_configured(
                    get_automatic_user())
                is_user_enabled = computer.is_user_enabled(
                    get_automatic_user())

                is_usable = False  # True if both enabled and configured

                if not all_comps:
                    if not is_configured or not is_user_enabled or not computer.is_enabled(
                    ):
                        continue

                if computer.is_enabled():
                    if is_configured:
                        configured_str = ""
                        if is_user_enabled:
                            symbol = "*"
                            color_id = None
                            enabled_str = ""
                            is_usable = True
                        else:
                            symbol = "x"
                            color_id = 31  # Red
                            enabled_str = "[DISABLED for this user]"
                    else:
                        symbol = "x"
                        color_id = 90  # Dark gray
                        enabled_str = ""
                        configured_str = " [unconfigured]"
                else:  # GLOBALLY DISABLED
                    symbol = "x"
                    color_id = 31  # Red
                    if is_configured and not is_user_enabled:
                        enabled_str = " [DISABLED globally AND for this user]"
                    else:
                        enabled_str = " [DISABLED globally]"
                    if is_configured:
                        configured_str = ""
                    else:
                        configured_str = " [unconfigured]"

                if use_colors:
                    if color_id is not None:
                        start_color = "\x1b[{}m".format(color_id)
                        bold_sequence = "\x1b[1;{}m".format(color_id)
                        nobold_sequence = "\x1b[0;{}m".format(color_id)
                    else:
                        start_color = "\x1b[0m"
                        bold_sequence = "\x1b[1m"
                        nobold_sequence = "\x1b[0m"
                    end_color = "\x1b[0m"
                else:
                    start_color = ""
                    end_color = ""
                    bold_sequence = ""
                    nobold_sequence = ""

                if parsable:
                    print "{}{}{}".format(start_color, name, end_color)
                else:
                    if (not only_usable) or is_usable:
                        print "{}{} {}{}{} {}{}{}".format(
                            start_color, symbol, bold_sequence, name,
                            nobold_sequence, enabled_str, configured_str,
                            end_color)

        else:
            print "# No computers configured yet. Use 'verdi computer setup'"
Пример #22
0
    def computer_configure(self, *args):
        """
        Configure the authentication information for a given computer
        """
        if not is_dbenv_loaded():
            load_dbenv()

        import readline
        import inspect

        from django.core.exceptions import ObjectDoesNotExist

        from aiida.common.exceptions import (NotExistent, ValidationError)
        from aiida.backends.utils import get_automatic_user
        from aiida.common.utils import get_configured_user_email
        from aiida.backends.settings import BACKEND
        from aiida.backends.profile import BACKEND_SQLA, BACKEND_DJANGO

        import argparse

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description='Configure a computer for a given AiiDA user.')
        # The default states are those that are shown if no option is given
        parser.add_argument(
            '-u',
            '--user',
            type=str,
            metavar='EMAIL',
            help=
            "Configure the computer for the given AiiDA user (otherwise, configure the current default user)",
        )
        parser.add_argument(
            'computer',
            type=str,
            help="The name of the computer that you want to configure")

        parsed_args = parser.parse_args(args)

        user_email = parsed_args.user
        computername = parsed_args.computer

        try:
            computer = self.get_computer(name=computername)
        except NotExistent:
            print >> sys.stderr, "No computer exists with name '{}'".format(
                computername)
            sys.exit(1)
        if user_email is None:
            user = get_automatic_user()
        else:
            from aiida.orm.querybuilder import QueryBuilder
            qb = QueryBuilder()
            qb.append(type="user", filters={'email': user_email})
            user = qb.first()
            if user is None:
                print >> sys.stderr, ("No user with email '{}' in the "
                                      "database.".format(user_email))
                sys.exit(1)

        if BACKEND == BACKEND_DJANGO:
            from aiida.backends.djsite.db.models import DbAuthInfo

            try:
                authinfo = DbAuthInfo.objects.get(
                    dbcomputer=computer.dbcomputer, aiidauser=user)

                old_authparams = authinfo.get_auth_params()
            except ObjectDoesNotExist:
                authinfo = DbAuthInfo(dbcomputer=computer.dbcomputer,
                                      aiidauser=user)
                old_authparams = {}

        elif BACKEND == BACKEND_SQLA:
            from aiida.backends.sqlalchemy.models.authinfo import DbAuthInfo
            from aiida.backends.sqlalchemy import session

            authinfo = session.query(DbAuthInfo).filter(
                DbAuthInfo.dbcomputer == computer.dbcomputer).filter(
                    DbAuthInfo.aiidauser == user).first()
            if authinfo is None:
                authinfo = DbAuthInfo(dbcomputer=computer.dbcomputer,
                                      aiidauser=user)
                old_authparams = {}
            else:
                old_authparams = authinfo.get_auth_params()
        else:
            raise Exception("Unknown backend {}".format(BACKEND))
        Transport = computer.get_transport_class()

        print("Configuring computer '{}' for the AiiDA user '{}'".format(
            computername, user.email))

        print "Computer {} has transport of type {}".format(
            computername, computer.get_transport_type())

        if user.email != get_configured_user_email():
            print "*" * 72
            print "** {:66s} **".format("WARNING!")
            print "** {:66s} **".format(
                "  You are configuring a different user.")
            print "** {:66s} **".format(
                "  Note that the default suggestions are taken from your")
            print "** {:66s} **".format(
                "  local configuration files, so they may be incorrect.")
            print "*" * 72

        valid_keys = Transport.get_valid_auth_params()

        default_authparams = {}
        for k in valid_keys:
            if k in old_authparams:
                default_authparams[k] = old_authparams.pop(k)
        if old_authparams:
            print(
                "WARNING: the following keys were previously in the "
                "authorization parameters,")
            print "but have not been recognized and have been deleted:"
            print ", ".join(old_authparams.keys())

        if not valid_keys:
            print "There are no special keys to be configured. Configuration completed."
            authinfo.set_auth_params({})
            authinfo.save()
            return

        print ""
        print "Note: to leave a field unconfigured, leave it empty and press [Enter]"

        # I strip out the old auth_params that are not among the valid keys

        new_authparams = {}

        for k in valid_keys:
            key_set = False
            while not key_set:
                try:
                    converter_name = '_convert_{}_fromstring'.format(k)
                    try:
                        converter = dict(
                            inspect.getmembers(Transport))[converter_name]
                    except KeyError:
                        print >> sys.stderr, (
                            "Internal error! "
                            "No {} defined in Transport {}".format(
                                converter_name, computer.get_transport_type()))
                        sys.exit(1)

                    if k in default_authparams:
                        readline.set_startup_hook(lambda: readline.insert_text(
                            str(default_authparams[k])))
                    else:
                        # Use suggestion only if parameters were not already set
                        suggester_name = '_get_{}_suggestion_string'.format(k)
                        try:
                            suggester = dict(
                                inspect.getmembers(Transport))[suggester_name]
                            suggestion = suggester(computer)
                            readline.set_startup_hook(
                                lambda: readline.insert_text(suggestion))
                        except KeyError:
                            readline.set_startup_hook()

                    txtval = raw_input("=> {} = ".format(k))
                    if txtval:
                        new_authparams[k] = converter(txtval)
                    key_set = True
                except ValidationError as e:
                    print "Error in the inserted value: {}".format(e.message)

        authinfo.set_auth_params(new_authparams)
        authinfo.save()
        print "Configuration stored for your user on computer '{}'.".format(
            computername)
Пример #23
0
    def get_bands_and_parents_structure(self, args):
        """
        Search for bands and return bands and the closest structure that is a parent of the instance.
        This is the backend independent way, can be overriden for performance reason

        :returns:
            A list of sublists, each latter containing (in order):
                pk as string, formula as string, creation date, bandsdata-label
        """

        import datetime
        from aiida.utils import timezone
        from aiida.orm.querybuilder import QueryBuilder
        from aiida.backends.utils import get_automatic_user
        from aiida.orm.implementation import User
        from aiida.orm.implementation import Group
        from aiida.orm.data.structure import (get_formula, get_symbols_string)
        from aiida.orm.data.array.bands import BandsData
        from aiida.orm.data.structure import StructureData

        qb = QueryBuilder()
        if args.all_users is False:
            au = get_automatic_user()
            user = User(dbuser=au)
            qb.append(User, tag="creator", filters={"email": user.email})
        else:
            qb.append(User, tag="creator")

        bdata_filters = {}
        if args.past_days is not None:
            now = timezone.now()
            n_days_ago = now - datetime.timedelta(days=args.past_days)
            bdata_filters.update({"ctime": {'>=': n_days_ago}})

        qb.append(BandsData,
                  tag="bdata",
                  created_by="creator",
                  filters=bdata_filters,
                  project=["id", "label", "ctime"])

        group_filters = {}

        if args.group_name is not None:
            group_filters.update({"name": {"in": args.group_name}})
        if args.group_pk is not None:
            group_filters.update({"id": {"in": args.group_pk}})
        if group_filters:
            qb.append(Group,
                      tag="group",
                      filters=group_filters,
                      group_of="bdata")

        qb.append(
            StructureData,
            tag="sdata",
            ancestor_of="bdata",
            # We don't care about the creator of StructureData
            project=["id", "attributes.kinds", "attributes.sites"])

        qb.order_by({StructureData: {'ctime': 'desc'}})

        list_data = qb.distinct()

        entry_list = []
        already_visited_bdata = set()

        for [bid, blabel, bdate, sid, akinds, asites] in list_data.all():

            # We process only one StructureData per BandsData.
            # We want to process the closest StructureData to
            # every BandsData.
            # We hope that the StructureData with the latest
            # creation time is the closest one.
            # This will be updated when the QueryBuilder supports
            # order_by by the distance of two nodes.
            if already_visited_bdata.__contains__(bid):
                continue
            already_visited_bdata.add(bid)

            if args.element is not None:
                all_symbols = [_["symbols"][0] for _ in akinds]
                if not any([s in args.element for s in all_symbols]):
                    continue

            if args.element_only is not None:
                all_symbols = [_["symbols"][0] for _ in akinds]
                if not all([s in all_symbols for s in args.element_only]):
                    continue

            # We want only the StructureData that have attributes
            if akinds is None or asites is None:
                continue

            symbol_dict = {}
            for k in akinds:
                symbols = k['symbols']
                weights = k['weights']
                symbol_dict[k['name']] = get_symbols_string(symbols, weights)

            try:
                symbol_list = []
                for s in asites:
                    symbol_list.append(symbol_dict[s['kind_name']])
                formula = get_formula(symbol_list, mode=args.formulamode)
            # If for some reason there is no kind with the name
            # referenced by the site
            except KeyError:
                formula = "<<UNKNOWN>>"
            entry_list.append(
                [str(bid),
                 str(formula),
                 bdate.strftime('%d %b %Y'), blabel])

        return entry_list
Пример #24
0
    def workflow_list(self, *args):
        """
        Return a list of workflows on screen
        """
        from aiida.backends.utils import load_dbenv, is_dbenv_loaded

        if not is_dbenv_loaded():
            load_dbenv()

        from aiida.backends.utils import get_workflow_list, get_automatic_user
        from aiida.orm.workflow import get_workflow_info
        from aiida.orm import User

        import argparse

        parser = argparse.ArgumentParser(prog=self.get_full_command_name(),
                                         description='List AiiDA workflows.')
        parser.add_argument('-s',
                            '--short',
                            action='store_true',
                            help="show shorter output "
                            "(only subworkflows and steps, no calculations)")
        parser.add_argument(
            '-a',
            '--all-states',
            action='store_true',
            help="show all existing "
            "AiiDA workflows, not only running ones",
        )
        parser.add_argument(
            '-d',
            '--depth',
            metavar='M',
            action='store',
            type=int,
            default=16,
            help="add a filter "
            "to show only steps down to a depth of M levels in "
            "subworkflows (0 means only the parent "
            "workflows are shown)")
        parser.add_argument(
            '-p',
            '--past-days',
            metavar='N',
            action='store',
            type=int,
            help="add a "
            "filter to show only workflows created in the past N days")
        parser.add_argument(
            'pks',
            type=int,
            nargs='*',
            help="a list of workflows to show. If empty, "
            "all running workflows are shown. If non-empty, "
            "automatically sets --all and ignores the -p option.")

        tab_size = 2  # how many spaces to use for indentation of subworkflows

        args = list(args)
        parsed_args = parser.parse_args(args)

        workflows = get_workflow_list(parsed_args.pks,
                                      user=User(dbuser=get_automatic_user()),
                                      all_states=parsed_args.all_states,
                                      n_days_ago=parsed_args.past_days)

        for w in workflows:
            if not w.is_subworkflow() or w.pk in parsed_args.pks:
                print "\n".join(
                    get_workflow_info(w,
                                      tab_size=tab_size,
                                      short=parsed_args.short,
                                      depth=parsed_args.depth))
        if not workflows:
            if parsed_args.all_states:
                print "# No workflows found"
            else:
                print "# No running workflows found"
Пример #25
0
    def group_list(self, *args):
        """
        Print a list of groups in the DB.
        """
        if not is_dbenv_loaded():
            load_dbenv()

        import datetime
        from aiida.utils import timezone
        from aiida.orm.group import get_group_type_mapping
        from aiida.backends.utils import get_automatic_user
        from tabulate import tabulate

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description='List AiiDA user-defined groups.')
        exclusive_group = parser.add_mutually_exclusive_group()
        exclusive_group.add_argument('-A', '--all-users',
                                     dest='all_users', action='store_true',
                                     help="Show groups for all users, rather than only for the current user")
        exclusive_group.add_argument('-u', '--user', metavar='USER_EMAIL',
                                     help="Add a filter to show only groups belonging to a specific user",
                                     action='store', type=str)
        parser.add_argument('-t', '--type', metavar='TYPE',
                            help="Show groups of a specific type, instead of user-defined groups",
                            action='store', type=str)
        parser.add_argument('-d', '--with-description',
                            dest='with_description', action='store_true',
                            help="Show also the group description")
        parser.add_argument('-p', '--past-days', metavar='N',
                            help="add a filter to show only groups created in the past N days",
                            action='store', type=int)
        parser.add_argument('-s', '--startswith', metavar='STRING',
                            default=None,
                            help="add a filter to show only groups for which the name begins with STRING",
                            action='store', type=str)
        parser.add_argument('-e', '--endswith', metavar='STRING', default=None,
                            help="add a filter to show only groups for which the name ends with STRING",
                            action='store', type=str)
        parser.add_argument('-c', '--contains', metavar='STRING', default=None,
                            help="add a filter to show only groups for which the name contains STRING",
                            action='store', type=str)
        parser.add_argument('-n', '--node', metavar='PK', default=None,
                            help="Show only the groups that contain the node specified by PK",
                            action='store', type=int)
        parser.set_defaults(all_users=False)
        parser.set_defaults(with_description=False)

        args = list(args)
        parsed_args = parser.parse_args(args)

        if parsed_args.all_users:
            user = None
        else:
            if parsed_args.user:
                user = parsed_args.user
            else:
                # By default: only groups of this user
                user = get_automatic_user()

        type_string = ""
        if parsed_args.type is not None:
            try:
                type_string = get_group_type_mapping()[parsed_args.type]
            except KeyError:
                print >> sys.stderr, "Invalid group type. Valid group types are:"
                print >> sys.stderr, ",".join(sorted(
                    get_group_type_mapping().keys()))
                sys.exit(1)

        name_filters = dict((k, getattr(parsed_args, k))
                            for k in ['startswith', 'endswith', 'contains'])

        n_days_ago = None
        if parsed_args.past_days:
            n_days_ago = (timezone.now() -
                          datetime.timedelta(days=parsed_args.past_days))

        # Depending on --nodes option use or not key "nodes"
        from aiida.orm.implementation import Group
        from aiida.orm import load_node

        node_pk = parsed_args.node
        if node_pk is not None:
            try:
                node = load_node(node_pk)
            except NotExistent as e:
                print >> sys.stderr, "Error: {}.".format(e.message)
                sys.exit(1)
            res = Group.query(user=user, type_string=type_string, nodes=node,
                              past_days=n_days_ago, name_filters=name_filters)
        else:
            res = Group.query(user=user, type_string=type_string,
                              past_days=n_days_ago, name_filters=name_filters)

        groups = tuple([(str(g.pk), g.name, len(g.nodes), g.user.email.strip(),
                         g.description) for g in res])


        table = []
        if parsed_args.with_description:
            table_header = \
                ["PK", "GroupName", "NumNodes", "User", "Description"]
            for pk, nam, nod, usr, desc in groups:
                table.append([pk, nam, nod, usr, desc])

        else:
            table_header = ["PK", "GroupName", "NumNodes", "User"]
            for pk, nam, nod, usr, _ in groups:
                table.append([pk, nam, nod, usr])
        print(tabulate(table, headers=table_header))
Пример #26
0
    def calculation_cleanworkdir(self, *args):
        """
        Clean the working directory of calculations by removing all the content of the
        associated RemoteFolder node. Calculations can be identified by pk with the -k flag
        or by specifying limits on the modification times with -p/-o flags
        """
        import argparse

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description="""
                Clean all content of all output remote folders of calculations,
                passed as a list of pks, or identified by modification time.

                If a list of calculation PKs is not passed with the -k option, one or both
                of the -p and -o options has to be specified. If both are specified, a logical
                AND is done between the two, i.e. the calculations that will be cleaned have been
                modified AFTER [-p option] days from now but BEFORE [-o option] days from now.
                Passing the -f option will prevent the confirmation dialog from being prompted.
                """
        )
        parser.add_argument(
            '-k', '--pk', metavar='PK', type=int, nargs='+', dest='pk',
            help='The principal key (PK) of the calculations of which to clean the work directory'
        )
        parser.add_argument(
            '-f', '--force', action='store_true',
            help='Force the cleaning (no prompt)'
        )
        parser.add_argument(
            '-p', '--past-days', metavar='N', type=int, action='store', dest='past_days',
            help='Include calculations that have been modified within the last N days', 
        )
        parser.add_argument(
            '-o', '--older-than', metavar='N', type=int, action='store', dest='older_than',
            help='Include calculations that have been modified more than N days ago',
        )
        parser.add_argument(
            '-c', '--computers', metavar='label', nargs='+', type=str, action='store', dest='computer',
            help='Include only calculations that were ran on these computers'
        )

        if not is_dbenv_loaded():
            load_dbenv()

        from aiida.backends.utils import get_automatic_user
        from aiida.backends.utils import get_authinfo
        from aiida.common.utils import query_yes_no
        from aiida.orm.computer import Computer as OrmComputer
        from aiida.orm.user import User as OrmUser
        from aiida.orm.calculation import Calculation as OrmCalculation
        from aiida.orm.querybuilder import QueryBuilder
        from aiida.utils import timezone
        import datetime

        parsed_args = parser.parse_args(args)

        # If a pk is given then the -o & -p options should not be specified
        if parsed_args.pk is not None:
            if (parsed_args.past_days is not None or parsed_args.older_than is not None):
                print("You cannot specify both a list of calculation pks and the -p or -o options")
                return

        # If no pk is given then at least one of the -o & -p options should be specified
        else:
            if (parsed_args.past_days is None and parsed_args.older_than is None):
                print("You should specify at least a list of calculations or the -p, -o options")
                return

        qb_user_filters = dict()
        user = OrmUser(dbuser=get_automatic_user())
        qb_user_filters["email"] = user.email

        qb_computer_filters = dict()
        if parsed_args.computer is not None:
            qb_computer_filters["name"] = {"in": parsed_args.computer}

        qb_calc_filters = dict()
        if parsed_args.past_days is not None:
            pd_ts = timezone.now() - datetime.timedelta(days=parsed_args.past_days)
            qb_calc_filters["mtime"] = {">": pd_ts}
        if parsed_args.older_than is not None:
            ot_ts = timezone.now() - datetime.timedelta(days=parsed_args.older_than)
            qb_calc_filters["mtime"] = {"<": ot_ts}
        if parsed_args.pk is not None:
            print("parsed_args.pk: ", parsed_args.pk)
            qb_calc_filters["id"] = {"in": parsed_args.pk}

        qb = QueryBuilder()
        qb.append(OrmCalculation, tag="calc",
                  filters=qb_calc_filters,
                  project=["id", "uuid", "attributes.remote_workdir"])
        qb.append(OrmComputer, computer_of="calc", tag="computer",
                  project=["*"],
                  filters=qb_computer_filters)
        qb.append(OrmUser, creator_of="calc", tag="user",
                  project=["*"],
                  filters=qb_user_filters)

        no_of_calcs = qb.count()
        if no_of_calcs == 0:
            print("No calculations found with the given criteria.")
            return

        print("Found {} calculations with the given criteria.".format(
            no_of_calcs))

        if not parsed_args.force:
            if not query_yes_no("Are you sure you want to clean the work "
                                "directory?", "no"):
                return

        # get the uuids of all calculations matching the filters
        calc_list_data = qb.dict()

        # get all computers associated to the calc uuids above, and load them
        # we group them by uuid to avoid computer duplicates
        comp_uuid_to_computers = {_["computer"]["*"].uuid: _["computer"]["*"] for _ in calc_list_data}

        # now build a dictionary with the info of folders to delete
        remotes = {}
        for computer in comp_uuid_to_computers.values():
            # initialize a key of info for a given computer
            remotes[computer.name] = {'transport': get_authinfo(
                computer=computer, aiidauser=user._dbuser).get_transport(),
                                      'computer': computer,
            }

            # select the calc pks done on this computer
            this_calc_pks = [_["calc"]["id"] for _ in calc_list_data
                             if _["computer"]["*"].id == computer.id]

            this_calc_uuids = [unicode(_["calc"]["uuid"])
                               for _ in calc_list_data
                               if _["computer"]["*"].id == computer.id]

            remote_workdirs = [_["calc"]["attributes.remote_workdir"]
                               for _ in calc_list_data
                               if _["calc"]["id"] in this_calc_pks
                               if _["calc"]["attributes.remote_workdir"]
                               is not None]

            remotes[computer.name]['remotes'] = remote_workdirs
            remotes[computer.name]['uuids'] = this_calc_uuids

        # now proceed to cleaning
        for computer, dic in remotes.iteritems():
            print("Cleaning the work directory on computer {}.".format(computer))
            counter = 0
            t = dic['transport']
            with t:
                remote_user = remote_user = t.whoami()
                aiida_workdir = dic['computer'].get_workdir().format(
                    username=remote_user)

                t.chdir(aiida_workdir)
                # Hardcoding the sharding equal to 3 parts!
                existing_folders = t.glob('*/*/*')

                folders_to_delete = [i for i in existing_folders if
                                     i.replace("/", "") in dic['uuids']]

                for folder in folders_to_delete:
                    t.rmtree(folder)
                    counter += 1
                    if counter % 20 == 0 and counter > 0:
                        print("Deleted work directories: {}".format(counter))

            print("{} remote folder(s) cleaned.".format(counter))
Пример #27
0
# Initialize the pw_job2 calculation node.
calc2 = PwimmigrantCalculation(computer=computer,
                               resources=resources,
                               remote_workdir='/scratch/',
                               input_file_name='pw_job2.in',
                               output_file_name='pw_job2.out')

# Link the code that was used to run the calculations.
calc1.use_code(code)
calc2.use_code(code)

# Get the computer's transport and create an instance.
from aiida.backends.utils import get_authinfo, get_automatic_user

authinfo = get_authinfo(computer=computer, aiidauser=get_automatic_user())
transport = a.get_transport()

# Open the transport for the duration of the immigrations, so it's not
# reopened for each one. This is best performed using the transport's
# context guard through the ``with`` statement.
with transport as open_transport:

    # Parse the calculations' input files to automatically generate and link the
    # calculations' input nodes.
    calc1.create_input_nodes(open_transport)
    calc2.create_input_nodes(open_transport)

    # Store the calculations and their input nodes and tell the daeomon the output
    # is ready to be retrieved and parsed.
    calc1.prepare_for_retrieval_and_parsing(open_transport)
Пример #28
0
    def next(self, next_method):
        """
        Adds the a new step to be called after the completion of the caller method's calculations and subworkflows.

        This method must be called inside a Workflow step, otherwise an error is thrown. The
        code finds the caller method and stores in the database the input next_method as the next
        method to be called. At this point no execution in made, only configuration updates in the database.

        If during the execution of the caller method the user launched calculations or subworkflows, this
        method will add them to the database, making them available to the workflow manager to be launched.
        In fact all the calculation and subworkflow submissions are lazy method, really executed by this call.

        :param next_method: a Workflow step method to execute after the caller method
        :raise: AiidaException: in case the caller method cannot be found or validated
        :return: the wrapped methods, decorated with the correct step name
        """

        md5 = self.dbworkflowinstance.script_md5
        script_path = self.dbworkflowinstance.script_path

        # TODO: in principles, the file containing the workflow description
        # should be copied in a repository, and, on that file, the workflow
        # should check to be sure of loading the same description of the
        # workflow. At the moment, this is not done and is checking the source
        # in aiida/workflows/... resulting essentially in the impossibility of
        # developing a workflow without rendering most of the trial run
        # unaccessible. I comment these lines for this moment.

        # if md5 != md5_file(script_path):
        #    raise ValidationError("Unable to load the original workflow module from {}, MD5 has changed".format(script_path))

        # ATTENTION: Do not move this code outside or encapsulate it in a function
        curframe = inspect.currentframe()
        calframe = inspect.getouterframes(curframe, 2)
        caller_method = calframe[1][3]

        if next_method is None:
            raise AiidaException(
                "The next method is None, probably you passed a method with parenthesis ??"
            )

        if not self.has_step(caller_method):
            raise AiidaException(
                "The caller method is either not a step or has not been registered as one"
            )

        if not next_method.__name__ == wf_exit_call:
            try:
                is_wf_step = getattr(next_method, "is_wf_step", None)
            except AttributeError:
                raise AiidaException(
                    "Cannot add as next call a method not decorated as Workflow method"
                )

        # TODO SP: abstract this, this depends on the DB. The better would be
        # to add a method to the DbWorkflow from SQLA and Django to get steps
        # with particular filters, in order to avoid repetition of all the code
        # arround

        # Retrieve the caller method
        method_step = self.dbworkflowinstance.steps.get(
            name=caller_method, user=get_automatic_user())

        # Attach calculations
        if caller_method in self.attach_calc_lazy_storage:
            for c in self.attach_calc_lazy_storage[caller_method]:
                method_step.add_calculation(c)

        # Attach sub-workflows
        if caller_method in self.attach_subwf_lazy_storage:
            for w in self.attach_subwf_lazy_storage[caller_method]:
                method_step.add_sub_workflow(w)

        # Set the next method
        if not next_method.__name__ == wf_exit_call:
            next_method_name = next_method.wf_step_name
        else:
            next_method_name = wf_exit_call

        # logger.info("Adding step {0} after {1} in {2}".format(next_method_name, caller_method, self.uuid))
        method_step.set_nextcall(next_method_name)
        #
        self.dbworkflowinstance.set_state(wf_states.RUNNING)
        method_step.set_state(wf_states.RUNNING)
Пример #29
0
    def computer_test(self, *args):
        """
        Test the connection to a computer.

        It tries to connect, to get the list of calculations on the queue and
        to perform other tests.
        """
        import argparse
        import traceback

        if not is_dbenv_loaded():
            load_dbenv()

        from django.core.exceptions import ObjectDoesNotExist
        from aiida.common.exceptions import NotExistent
        from aiida.orm.user import User
        from aiida.backends.utils import get_automatic_user
        from aiida.orm.computer import Computer as OrmComputer

        parser = argparse.ArgumentParser(prog=self.get_full_command_name(),
                                         description='Test a remote computer')
        # The default states are those that are shown if no option is given
        parser.add_argument(
            '-u',
            '--user',
            type=str,
            metavar='EMAIL',
            dest='user_email',
            help="Test the connection for a given AiiDA user."
            "If not specified, uses the current "
            "default user.",
        )
        parser.add_argument(
            '-t',
            '--traceback',
            action='store_true',
            help="Print the full traceback in case an exception "
            "is raised",
        )
        parser.add_argument('computer',
                            type=str,
                            help="The name of the computer that you "
                            "want to test")

        parsed_args = parser.parse_args(args)

        user_email = parsed_args.user_email
        computername = parsed_args.computer
        print_traceback = parsed_args.traceback

        try:
            computer = self.get_computer(name=computername)
        except NotExistent:
            print >> sys.stderr, "No computer exists with name '{}'".format(
                computername)
            sys.exit(1)

        if user_email is None:
            user = User(dbuser=get_automatic_user())
        else:
            user_list = User.search_for_users(email=user_email)
            # If no user is found
            if not user_list:
                print >> sys.stderr, ("No user with email '{}' in the "
                                      "database.".format(user_email))
                sys.exit(1)
            user = user_list[0]

        print "Testing computer '{}' for user {}...".format(
            computername, user.email)
        try:
            dbauthinfo = computer.get_dbauthinfo(user._dbuser)
        except NotExistent:
            print >> sys.stderr, ("User with email '{}' is not yet configured "
                                  "for computer '{}' yet.".format(
                                      user.email, computername))
            sys.exit(1)

        warning_string = None
        if not dbauthinfo.enabled:
            warning_string = (
                "** NOTE! Computer is disabled for the "
                "specified user!\n   Do you really want to test it? [y/N] ")
        if not computer.is_enabled():
            warning_string = ("** NOTE! Computer is disabled!\n"
                              "   Do you really want to test it? [y/N] ")
        if warning_string:
            answer = raw_input(warning_string)
            if not (answer == 'y' or answer == 'Y'):
                sys.exit(0)

        s = OrmComputer(dbcomputer=dbauthinfo.dbcomputer).get_scheduler()
        t = dbauthinfo.get_transport()

        ## STARTING TESTS HERE
        num_failures = 0
        num_tests = 0

        try:
            print "> Testing connection..."
            with t:
                s.set_transport(t)
                num_tests += 1
                for test in [
                        self._computer_test_get_jobs,
                        self._computer_create_temp_file
                ]:
                    num_tests += 1
                    try:
                        succeeded = test(transport=t,
                                         scheduler=s,
                                         dbauthinfo=dbauthinfo)
                    except Exception as e:
                        print "* The test raised an exception!"
                        if print_traceback:
                            print "** Full traceback:"
                            # Indent
                            print "\n".join([
                                "   {}".format(l)
                                for l in traceback.format_exc().splitlines()
                            ])
                        else:
                            print "** {}: {}".format(e.__class__.__name__,
                                                     e.message)
                            print(
                                "** (use the --traceback option to see the "
                                "full traceback)")
                        succeeded = False

                    if not succeeded:
                        num_failures += 1

            if num_failures:
                print "Some tests failed! ({} out of {} failed)".format(
                    num_failures, num_tests)
            else:
                print "Test completed (all {} tests succeeded)".format(
                    num_tests)
        except Exception as e:
            print "** Error while trying to connect to the computer! I cannot "
            print "   perform following tests, so I stop."
            if print_traceback:
                print "** Full traceback:"
                # Indent
                print "\n".join([
                    "   {}".format(l)
                    for l in traceback.format_exc().splitlines()
                ])
            else:
                print "{}: {}".format(e.__class__.__name__, e.message)
                print(
                    "(use the --traceback option to see the "
                    "full traceback)")
            succeeded = False
Пример #30
0
    def calculation_cleanworkdir(self, *args):
        """
        Clean all the content of all the output remote folders of calculations,
        passed as a list of pks, or identified by modification time.

        If a list of calculation PKs is not passed through -c option, one of
        the option -p or -u has to be specified (if both are given, a logical
        AND is done between the 2 - you clean out calculations modified AFTER
        [-p option] days from now but BEFORE [-o option] days from now).
        If you also pass the -f option, no confirmation will be asked.
        """
        import argparse

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description="Clean work directory (i.e. remote folder) of AiiDA "
            "calculations.")
        parser.add_argument("-k",
                            "--pk",
                            metavar="PK",
                            type=int,
                            nargs="+",
                            help="The principal key (PK) of the calculations "
                            "to clean the workdir of",
                            dest="pk")
        parser.add_argument("-f",
                            "--force",
                            action="store_true",
                            help="Force the cleaning (no prompt)")
        parser.add_argument("-p",
                            "--past-days",
                            metavar="N",
                            help="Add a filter to clean workdir of "
                            "calculations modified during the past N "
                            "days",
                            type=int,
                            action="store",
                            dest="past_days")
        parser.add_argument("-o",
                            "--older-than",
                            metavar="N",
                            help="Add a filter to clean workdir of "
                            "calculations that have been modified on a "
                            "date before N days ago",
                            type=int,
                            action="store",
                            dest="older_than")
        parser.add_argument("-c",
                            "--computers",
                            metavar="label",
                            nargs="+",
                            help="Add a filter to clean workdir of "
                            "calculations on this computer(s) only",
                            type=str,
                            action="store",
                            dest="computer")

        if not is_dbenv_loaded():
            load_dbenv()

        from aiida.backends.utils import get_automatic_user
        from aiida.backends.utils import get_authinfo
        from aiida.common.utils import query_yes_no
        from aiida.orm.computer import Computer as OrmComputer
        from aiida.orm.user import User as OrmUser
        from aiida.orm.calculation import Calculation as OrmCalculation
        from aiida.orm.querybuilder import QueryBuilder
        from aiida.utils import timezone
        import datetime

        parsed_args = parser.parse_args(args)

        # If a pk is given then the -o & -p options should not be specified
        if parsed_args.pk is not None:
            if ((parsed_args.past_days is not None)
                    or (parsed_args.older_than is not None)):
                print(
                    "You cannot specify both a list of calculation pks and "
                    "the -p or -o options")
                return
        # If no pk is given then at least one of the -o & -p options should be
        # specified
        else:
            if ((parsed_args.past_days is None)
                    and (parsed_args.older_than is None)):
                print(
                    "You should specify at least a list of calculations or "
                    "the -p, -o options")
                return

        # At this point we know that either the pk or the -p -o options are
        # specified

        # We also check that not both -o & -p options are specified
        if ((parsed_args.past_days is not None)
                and (parsed_args.older_than is not None)):
            print(
                "Not both of the -p, -o options can be specified in the "
                "same time")
            return

        qb_user_filters = dict()
        user = OrmUser(dbuser=get_automatic_user())
        qb_user_filters["email"] = user.email

        qb_computer_filters = dict()
        if parsed_args.computer is not None:
            qb_computer_filters["name"] = {"in": parsed_args.computer}

        qb_calc_filters = dict()
        if parsed_args.past_days is not None:
            pd_ts = timezone.now() - datetime.timedelta(
                days=parsed_args.past_days)
            qb_calc_filters["mtime"] = {">": pd_ts}
        if parsed_args.older_than is not None:
            ot_ts = timezone.now() - datetime.timedelta(
                days=parsed_args.older_than)
            qb_calc_filters["mtime"] = {"<": ot_ts}
        if parsed_args.pk is not None:
            print("parsed_args.pk: ", parsed_args.pk)
            qb_calc_filters["id"] = {"in": parsed_args.pk}

        qb = QueryBuilder()
        qb.append(OrmCalculation,
                  tag="calc",
                  filters=qb_calc_filters,
                  project=["id", "uuid", "attributes.remote_workdir"])
        qb.append(OrmComputer,
                  computer_of="calc",
                  project=["*"],
                  filters=qb_computer_filters)
        qb.append(OrmUser,
                  creator_of="calc",
                  project=["*"],
                  filters=qb_user_filters)

        no_of_calcs = qb.count()
        if no_of_calcs == 0:
            print("No calculations found with the given criteria.")
            return

        print("Found {} calculations with the given criteria.".format(
            no_of_calcs))

        if not parsed_args.force:
            if not query_yes_no(
                    "Are you sure you want to clean the work "
                    "directory?", "no"):
                return

        # get the uuids of all calculations matching the filters
        calc_list_data = qb.dict()

        # get all computers associated to the calc uuids above, and load them
        # we group them by uuid to avoid computer duplicates
        comp_uuid_to_computers = {
            _["computer"]["*"].uuid: _["computer"]["*"]
            for _ in calc_list_data
        }

        # now build a dictionary with the info of folders to delete
        remotes = {}
        for computer in comp_uuid_to_computers.values():
            # initialize a key of info for a given computer
            remotes[computer.name] = {
                'transport':
                get_authinfo(computer=computer,
                             aiidauser=user._dbuser).get_transport(),
                'computer':
                computer,
            }

            # select the calc pks done on this computer
            this_calc_pks = [
                _["calc"]["id"] for _ in calc_list_data
                if _["computer"]["*"].id == computer.id
            ]

            this_calc_uuids = [
                unicode(_["calc"]["uuid"]) for _ in calc_list_data
                if _["computer"]["*"].id == computer.id
            ]

            remote_workdirs = [
                _["calc"]["attributes.remote_workdir"] for _ in calc_list_data
                if _["calc"]["id"] in this_calc_pks
                if _["calc"]["attributes.remote_workdir"] is not None
            ]

            remotes[computer.name]['remotes'] = remote_workdirs
            remotes[computer.name]['uuids'] = this_calc_uuids

        # now proceed to cleaning
        for computer, dic in remotes.iteritems():
            print(
                "Cleaning the work directory on computer {}.".format(computer))
            counter = 0
            t = dic['transport']
            with t:
                remote_user = remote_user = t.whoami()
                aiida_workdir = dic['computer'].get_workdir().format(
                    username=remote_user)

                t.chdir(aiida_workdir)
                # Hardcoding the sharding equal to 3 parts!
                existing_folders = t.glob('*/*/*')

                folders_to_delete = [
                    i for i in existing_folders
                    if i.replace("/", "") in dic['uuids']
                ]

                for folder in folders_to_delete:
                    t.rmtree(folder)
                    counter += 1
                    if counter % 20 == 0 and counter > 0:
                        print("Deleted work directories: {}".format(counter))

            print("{} remote folder(s) cleaned.".format(counter))