def get_step(self, step_method): """ Retrieves by name a step from the Workflow. :param step_method: a string with the name of the step to retrieve or a method :raise: ObjectDoesNotExist: if there is no step with the specific name. :return: a DbWorkflowStep object. """ if isinstance(step_method, basestring): step_method_name = step_method else: if not getattr(step_method, "is_wf_step"): raise AiidaException("Cannot get step calculations from a method not decorated as Workflow method") step_method_name = step_method.wf_step_name if (step_method_name == wf_exit_call): raise InternalError("Cannot query a step with name {0}, reserved string".format(step_method_name)) try: step = self.dbworkflowinstance.steps.get(name=step_method_name, user=get_automatic_user()) return step except ObjectDoesNotExist: return None
def __init__(self, **kwargs): from aiida.backends.djsite.db.models import DbGroup dbgroup = kwargs.pop('dbgroup', None) if dbgroup is not None: if isinstance(dbgroup, (int, long)): try: dbgroup = DbGroup.objects.get(pk=dbgroup) except ObjectDoesNotExist: raise NotExistent( "Group with pk={} does not exist".format(dbgroup)) if not isinstance(dbgroup, DbGroup): raise TypeError("dbgroup is not a DbGroup instance, it is " "instead {}".format(str(type(dbgroup)))) if kwargs: raise ValueError("If you pass a dbgroups, you cannot pass any " "further parameter") self._dbgroup = dbgroup else: name = kwargs.pop('name', None) if name is None: raise ValueError("You have to specify a group name") group_type = kwargs.pop('type_string', "") # By default, an user group user = kwargs.pop('user', get_automatic_user()) description = kwargs.pop('description', "") self._dbgroup = DbGroup(name=name, description=description, user=user, type=group_type) if kwargs: raise ValueError("Too many parameters passed to Group, the " "unknown parameters are: {}".format(", ".join( kwargs.keys())))
def kill_all(): from aiida.backends.djsite.db.models import DbWorkflow q_object = Q(user=get_automatic_user()) q_object.add(~Q(state=wf_states.FINISHED), Q.AND) w_list = DbWorkflow.objects.filter(q_object) for w in w_list: Workflow.get_subclass_from_uuid(w.uuid).kill()
def get_or_create_famgroup(cls, famname): '''Returns a PAW family group, creates it if it didn't exists''' from aiida.orm import Group from aiida.backends.djsite.utils import get_automatic_user # TODO: maybe replace with Group.get_or_create? try: group = Group.get(name=famname, type_string=cls.group_type) group_created = False except NotExistent: group = Group(name=famname, type_string=cls.group_type, user=get_automatic_user()) group_created = True if group.user != get_automatic_user(): raise UniquenessError("There is already a UpfFamily group " "with name {}, but it belongs to user {}," " therefore you cannot modify it".format( famname, group.user.email)) return group, group_created
def get_paw_groups(cls, elements=set(), symbols=set(), user=None): from aiida.orm import Group from aiida.backends.djsite.utils import get_automatic_user params = { 'type_string': cls.group_type, 'node_attributes': { 'element': elements, 'symbol': symbols } } if user: params['user'] = user else: params['user'] = get_automatic_user() res = Group.query(**params) groups = [(g.name, g) for g in res] # Sort by name groups.sort() # Return the groups, without name return [i[1] for i in groups]
def test_query(self): """ Test if queries are working """ from aiida.orm.group import Group from aiida.common.exceptions import NotExistent, MultipleObjectsError from aiida.backends.djsite.db.models import DbUser from aiida.backends.djsite.utils import get_automatic_user g1 = Group(name='testquery1').store() g2 = Group(name='testquery2').store() n1 = Node().store() n2 = Node().store() n3 = Node().store() n4 = Node().store() g1.add_nodes([n1, n2]) g2.add_nodes([n1, n3]) newuser = DbUser.objects.create_user(email='*****@*****.**', password='') g3 = Group(name='testquery3', user=newuser).store() # I should find it g1copy = Group.get(uuid=g1.uuid) self.assertEquals(g1.pk, g1copy.pk) # Try queries res = Group.query(nodes=n4) self.assertEquals([_.pk for _ in res], []) res = Group.query(nodes=n1) self.assertEquals([_.pk for _ in res], [_.pk for _ in [g1, g2]]) res = Group.query(nodes=n2) self.assertEquals([_.pk for _ in res], [_.pk for _ in [g1]]) # I try to use 'get' with zero or multiple results with self.assertRaises(NotExistent): Group.get(nodes=n4) with self.assertRaises(MultipleObjectsError): Group.get(nodes=n1) self.assertEquals(Group.get(nodes=n2).pk, g1.pk) # Query by user res = Group.query(user=newuser) self.assertEquals(set(_.pk for _ in res), set(_.pk for _ in [g3])) # Same query, but using a string (the username=email) instead of # a DbUser object res = Group.query(user=newuser.email) self.assertEquals(set(_.pk for _ in res), set(_.pk for _ in [g3])) res = Group.query(user=get_automatic_user()) self.assertEquals(set(_.pk for _ in res), set(_.pk for _ in [g1, g2])) # Final cleanup g1.delete() g2.delete() newuser.delete()
def __init__(self, **kwargs): from aiida.backends.djsite.db.models import DbNode super(Node, self).__init__() self._temp_folder = None dbnode = kwargs.pop('dbnode', None) # Set the internal parameters # Can be redefined in the subclasses self._init_internal_params() if dbnode is not None: if not isinstance(dbnode, DbNode): raise TypeError("dbnode is not a DbNode instance") if dbnode.pk is None: raise ValueError("If cannot load an aiida.orm.Node instance " "from an unsaved Django DbNode object.") if kwargs: raise ValueError("If you pass a dbnode, you cannot pass any " "further parameter") # If I am loading, I cannot modify it self._to_be_stored = False self._dbnode = dbnode # If this is changed, fix also the importer self._repo_folder = RepositoryFolder(section=self._section_name, uuid=self._dbnode.uuid) # NO VALIDATION ON __init__ BY DEFAULT, IT IS TOO SLOW SINCE IT OFTEN # REQUIRES MULTIPLE DB HITS # try: # # Note: the validation often requires to load at least one # # attribute, and therefore it will take a lot of time # # because it has to cache every attribute. # self._validate() # except ValidationError as e: # raise DbContentError("The data in the DB with UUID={} is not " # "valid for class {}: {}".format( # uuid, self.__class__.__name__, e.message)) else: # TODO: allow to get the user from the parameters user = get_automatic_user() self._dbnode = DbNode(user=user, uuid=get_new_uuid(), type=self._plugin_type_string) self._to_be_stored = True # As creating the temp folder may require some time on slow # filesystems, we defer its creation self._temp_folder = None # Used only before the first save self._attrs_cache = {} # If this is changed, fix also the importer self._repo_folder = RepositoryFolder(section=self._section_name, uuid=self.uuid) # Automatically set all *other* attributes, if possible, otherwise # stop self._set_with_defaults(**kwargs)
def __init__(self, **kwargs): """ Initializes the Workflow super class, store the instance in the DB and in case stores the starting parameters. If initialized with an uuid the Workflow is loaded from the DB, if not a new workflow is generated and added to the DB following the stack frameworks. This means that only modules inside aiida.workflows are allowed to implements the workflow super calls and be stored. The caller names, modules and files are retrieved from the stack. :param uuid: a string with the uuid of the object to be loaded. :param params: a dictionary of storable objects to initialize the specific workflow :raise: NotExistent: if there is no entry of the desired workflow kind with the given uuid. """ from aiida.backends.djsite.db.models import DbWorkflow self._to_be_stored = True self._logger = logger.getChild(self.__class__.__name__) uuid = kwargs.pop('uuid', None) if uuid is not None: self._to_be_stored = False if kwargs: raise ValueError("If you pass a UUID, you cannot pass any further parameter") try: self._dbworkflowinstance = DbWorkflow.objects.get(uuid=uuid) # self.logger.info("Workflow found in the database, now retrieved") self._repo_folder = RepositoryFolder(section=self._section_name, uuid=self.uuid) except ObjectDoesNotExist: raise NotExistent("No entry with the UUID {} found".format(uuid)) else: # ATTENTION: Do not move this code outside or encapsulate it in a function import inspect stack = inspect.stack() # cur_fr = inspect.currentframe() #call_fr = inspect.getouterframes(cur_fr, 2) # Get all the caller data caller_frame = stack[1][0] caller_file = stack[1][1] caller_funct = stack[1][3] caller_module = inspect.getmodule(caller_frame) caller_module_class = caller_frame.f_locals.get('self', None).__class__ if not caller_funct == "__init__": raise SystemError("A workflow must implement the __init__ class explicitly") # Test if the launcher is another workflow # print "caller_module", caller_module # print "caller_module_class", caller_module_class # print "caller_file", caller_file # print "caller_funct", caller_funct # Accept only the aiida.workflows packages if caller_module == None or not caller_module.__name__.startswith("aiida.workflows"): raise SystemError("The superclass can't be called directly") self.caller_module = caller_module.__name__ self.caller_module_class = caller_module_class.__name__ self.caller_file = caller_file self.caller_funct = caller_funct self._temp_folder = SandboxFolder() self.current_folder.insert_path(self.caller_file, self.caller_module_class) # self.store() # Test if there are parameters as input params = kwargs.pop('params', None) if params is not None: if type(params) is dict: self.set_params(params) # This stores the MD5 as well, to test in case the workflow has # been modified after the launch self._dbworkflowinstance = DbWorkflow(user=get_automatic_user(), module=self.caller_module, module_class=self.caller_module_class, script_path=self.caller_file, script_md5=md5_file(self.caller_file)) self.attach_calc_lazy_storage = {} self.attach_subwf_lazy_storage = {}
def _list_calculations_old(cls, states=None, past_days=None, group=None, group_pk=None, all_users=False, pks=[], relative_ctime=True): """ Return a string with a description of the AiiDA calculations. .. todo:: does not support the query for the IMPORTED state (since it checks the state in the Attributes, not in the DbCalcState table). Decide which is the correct logic and implement the correct query. :param states: a list of string with states. If set, print only the calculations in the states "states", otherwise shows all. Default = None. :param past_days: If specified, show only calculations that were created in the given number of past days. :param group: If specified, show only calculations belonging to a user-defined group with the given name. Can use colons to separate the group name from the type, as specified in :py:meth:`aiida.orm.group.Group.get_from_string` method. :param group_pk: If specified, show only calculations belonging to a user-defined group with the given PK. :param pks: if specified, must be a list of integers, and only calculations within that list are shown. Otherwise, all calculations are shown. If specified, sets state to None and ignores the value of the ``past_days`` option.") :param relative_ctime: if true, prints the creation time relative from now. (like 2days ago). Default = True :param all_users: if True, list calculation belonging to all users. Default = False :return: a string with description of calculations. """ # I assume that calc_states are strings. If this changes in the future, # update the filter below from dbattributes__tval to the correct field. from aiida.backends.djsite.db.models import DbAuthInfo, DbAttribute from aiida.daemon.timestamps import get_last_daemon_timestamp if states: for state in states: if state not in calc_states: return "Invalid state provided: {}.".format(state) warnings_list = [] now = timezone.now() if pks: q_object = Q(pk__in=pks) else: q_object = Q() if group is not None: g_pk = Group.get_from_string(group).pk q_object.add(Q(dbgroups__pk=g_pk), Q.AND) if group_pk is not None: q_object.add(Q(dbgroups__pk=group_pk), Q.AND) if not all_users: q_object.add(Q(user=get_automatic_user()), Q.AND) if states is not None: q_object.add(Q(dbattributes__key='state', dbattributes__tval__in=states, ), Q.AND) if past_days is not None: now = timezone.now() n_days_ago = now - datetime.timedelta(days=past_days) q_object.add(Q(ctime__gte=n_days_ago), Q.AND) calc_list_pk = list( cls.query(q_object).distinct().values_list('pk', flat=True)) calc_list = cls.query(pk__in=calc_list_pk).order_by('ctime') scheduler_states = dict( DbAttribute.objects.filter(dbnode__pk__in=calc_list_pk, key='scheduler_state').values_list( 'dbnode__pk', 'tval')) # I do the query now, so that the list of pks gets cached calc_list_data = list( calc_list.filter( # dbcomputer__dbauthinfo__aiidauser=F('user') ).distinct().order_by('ctime').values( 'pk', 'dbcomputer__name', 'ctime', 'type', 'dbcomputer__enabled', 'dbcomputer__pk', 'user__pk')) list_comp_pk = [i['dbcomputer__pk'] for i in calc_list_data] list_aiduser_pk = [i['user__pk'] for i in calc_list_data] enabled_data = DbAuthInfo.objects.filter( dbcomputer__pk__in=list_comp_pk, aiidauser__pk__in=list_aiduser_pk ).values_list('dbcomputer__pk', 'aiidauser__pk', 'enabled') enabled_auth_dict = {(i[0], i[1]): i[2] for i in enabled_data} states = {c.pk: c._get_state_string() for c in calc_list} scheduler_lastcheck = dict(DbAttribute.objects.filter( dbnode__in=calc_list, key='scheduler_lastchecktime').values_list('dbnode__pk', 'dval')) ## Get the last daemon check try: last_daemon_check = get_last_daemon_timestamp('updater', when='stop') except ValueError: last_check_string = ("# Last daemon state_updater check: " "(Error while retrieving the information)") else: if last_daemon_check is None: last_check_string = "# Last daemon state_updater check: (Never)" else: last_check_string = ("# Last daemon state_updater check: " "{} ({})".format( str_timedelta(now - last_daemon_check, negative_to_zero=True), timezone.localtime(last_daemon_check).strftime( "at %H:%M:%S on %Y-%m-%d"))) disabled_ignorant_states = [ None, calc_states.FINISHED, calc_states.SUBMISSIONFAILED, calc_states.RETRIEVALFAILED, calc_states.PARSINGFAILED, calc_states.FAILED ] if not calc_list: return last_check_string else: # first save a matrix of results to be printed res_str_list = [last_check_string] str_matrix = [] title = ['# Pk', 'State', 'Creation', 'Sched. state', 'Computer', 'Type'] str_matrix.append(title) len_title = [len(i) for i in title] for calcdata in calc_list_data: remote_state = "None" calc_state = states[calcdata['pk']] remote_computer = calcdata['dbcomputer__name'] try: sched_state = scheduler_states.get(calcdata['pk'], None) if sched_state is None: remote_state = "(unknown)" else: remote_state = '{}'.format(sched_state) if calc_state == calc_states.WITHSCHEDULER: last_check = scheduler_lastcheck.get(calcdata['pk'], None) if last_check is not None: when_string = " {}".format( str_timedelta(now - last_check, short=True, negative_to_zero=True)) verb_string = "was " else: when_string = "" verb_string = "" remote_state = "{}{}{}".format(verb_string, sched_state, when_string) except ValueError: raise calc_module = \ from_type_to_pluginclassname(calcdata['type']).rsplit(".", 1)[0] prefix = 'calculation.job.' prefix_len = len(prefix) if calc_module.startswith(prefix): calc_module = calc_module[prefix_len:].strip() if relative_ctime: calc_ctime = str_timedelta(now - calcdata['ctime'], negative_to_zero=True, max_num_fields=1) else: calc_ctime = " ".join([timezone.localtime( calcdata['ctime']).isoformat().split('T')[0], timezone.localtime(calcdata[ 'ctime']).isoformat().split( 'T')[1].split('.')[ 0].rsplit(":", 1)[0]]) the_state = states[calcdata['pk']] # decide if it is needed to print enabled/disabled information # By default, if the computer is not configured for the # given user, assume it is user_enabled user_enabled = enabled_auth_dict.get( (calcdata['dbcomputer__pk'], calcdata['user__pk']), True) global_enabled = calcdata["dbcomputer__enabled"] enabled = "" if (user_enabled and global_enabled or the_state in disabled_ignorant_states) else " [Disabled]" str_matrix.append([calcdata['pk'], the_state, calc_ctime, remote_state, remote_computer + "{}".format(enabled), calc_module ]) # prepare a formatted text of minimal row length (to fit in terminals!) rows = [] for j in range(len(str_matrix[0])): rows.append([len(str(i[j])) for i in str_matrix]) line_lengths = [str(max(max(rows[i]), len_title[i])) for i in range(len(rows))] fmt_string = "{:<" + "}|{:<".join(line_lengths) + "}" for row in str_matrix: res_str_list.append(fmt_string.format(*[str(i) for i in row])) res_str_list += ["# {}".format(_) for _ in warnings_list] return "\n".join(res_str_list)