def query_jobcalculations_by_computer_user_state( self, state, computer=None, user=None, only_computer_user_pairs=False, only_enabled=True, limit=None): """ Filter all calculations with a given state. Issue a warning if the state is not in the list of valid states. :param string state: The state to be used to filter (should be a string among those defined in aiida.common.datastructures.calc_states) :param computer: a Django DbComputer entry, or a Computer object, of a computer in the DbComputer table. A string for the hostname is also valid. :param user: a Django entry (or its pk) of a user in the DbUser table; if present, the results are restricted to calculations of that specific user :param bool only_computer_user_pairs: if False (default) return a queryset where each element is a suitable instance of Node (it should be an instance of Calculation, if everything goes right!) If True, return only a list of tuples, where each tuple is in the format ('dbcomputer__id', 'user__id') [where the IDs are the IDs of the respective tables] :param int limit: Limit the number of rows returned :return: a list of calculation objects matching the filters. """ # I assume that calc_states are strings. If this changes in the future, # update the filter below from dbattributes__tval to the correct field. from aiida.orm.computer import Computer from aiida.orm.calculation.job import JobCalculation from aiida.orm.user import User from aiida.orm.querybuilder import QueryBuilder from aiida.common.exceptions import InputValidationError from aiida.common.datastructures import calc_states if state not in calc_states: raise InputValidationError( "querying for calculation state='{}', but it " "is not a valid calculation state".format(state)) calcfilter = {'state': {'==': state}} computerfilter = {"enabled": {'==': True}} userfilter = {} if computer is None: pass elif isinstance(computer, int): computerfilter.update({'id': {'==': computer}}) elif isinstance(computer, Computer): computerfilter.update({'id': {'==': computer.pk}}) else: try: computerfilter.update({'id': {'==': computer.id}}) except AttributeError as e: raise Exception("{} is not a valid computer\n{}".format( computer, e)) if user is None: pass elif isinstance(user, int): userfilter.update({'id': {'==': user}}) else: try: userfilter.update({'id': {'==': int(user.id)}}) # Is that safe? except: raise Exception("{} is not a valid user".format(user)) qb = QueryBuilder() qb.append(type="computer", tag='computer', filters=computerfilter) qb.append(JobCalculation, filters=calcfilter, tag='calc', has_computer='computer') qb.append(type="user", tag='user', filters=userfilter, creator_of="calc") if only_computer_user_pairs: qb.add_projection("computer", "*") qb.add_projection("user", "*") returnresult = qb.distinct().all() else: qb.add_projection("calc", "*") if limit is not None: qb.limit(limit) returnresult = qb.all() returnresult = zip(*returnresult)[0] return returnresult
class UpdateRule(Operation): def __init__(self, querybuilder, mode=MODES.APPEND, max_iterations=1, track_edges=False, track_visits=True): def get_spec_from_path(queryhelp, idx): if (queryhelp['path'][idx]['type'].startswith('node') or queryhelp['path'][idx]['type'].startswith('data') or queryhelp['path'][idx]['type'] == ''): return 'nodes' elif queryhelp['path'][idx]['type'] == 'group': return 'groups' else: raise Exception("not understood entity from ( {} )".format( queryhelp['path'][0]['type'])) queryhelp = querybuilder.get_json_compatible_queryhelp() for pathspec in queryhelp['path']: if not pathspec['type']: pathspec['type'] = 'node.Node.' self._querybuilder = QueryBuilder(**queryhelp) queryhelp = self._querybuilder.get_json_compatible_queryhelp() self._first_tag = queryhelp['path'][0]['tag'] self._last_tag = queryhelp['path'][-1]['tag'] self._entity_from = get_spec_from_path(queryhelp, 0) self._entity_to = get_spec_from_path(queryhelp, -1) super(UpdateRule, self).__init__(mode, max_iterations, track_edges=track_edges, track_visits=track_visits) def _init_run(self, entity_set): # Removing all other projections in the QueryBuilder instance: for tag in self._querybuilder._projections.keys(): self._querybuilder._projections[tag] = [] # priming querybuilder to add projection on the key I need: self._querybuilder.add_projection( self._last_tag, entity_set[self._entity_to].identifier) self._entity_to_identifier = entity_set[self._entity_to].identifier if self._track_edges: self._querybuilder.add_projection( self._first_tag, entity_set[self._entity_to].identifier) edge_set = entity_set._dict['{}_{}'.format(self._entity_from, self._entity_to)] self._edge_label = '{}--{}'.format(self._first_tag, self._last_tag) self._edge_keys = tuple( [(self._first_tag, entity_set[self._entity_from].identifier), (self._last_tag, entity_set[self._entity_to].identifier)] + [(self._edge_label, identifier) for identifier in edge_set._additional_identifiers]) try: self._querybuilder.add_projection( self._edge_label, edge_set._additional_identifiers) except InputValidationError as e: raise KeyError( "The key for the edge is invalid.\n" "Are the entities really connected, or have you overwritten the edge-tag?" ) def _load_results(self, target_set, operational_set): """ :param target_set: The set to load the results into :param operational_set: Where the results originate from (walkers) """ # I check that I have primary keys primkeys = operational_set[self._entity_from].get_keys() # Empty the target set, so that only these results are inside target_set.empty() if primkeys: self._querybuilder.add_filter(self._first_tag, { operational_set[self._entity_from].identifier: { 'in': primkeys } }) qres = self._querybuilder.dict() # These are the new results returned by the query target_set[self._entity_to].add_entities([ item[self._last_tag][self._entity_to_identifier] for item in qres ]) if self._track_edges: target_set['{}_{}'.format( self._entity_to, self._entity_to)].add_entities([ tuple(item[key1][key2] for (key1, key2) in self._edge_keys) for item in qres ])
def test_query_path(self): from aiida.orm.querybuilder import QueryBuilder from aiida.orm import Node from aiida.common.links import LinkType from aiida.backends.utils import QueryFactory q = QueryFactory()() n1 = Node() n1.label = 'n1' n1.store() n2 = Node() n2.label = 'n2' n2.store() n3 = Node() n3.label = 'n3' n3.store() n4 = Node() n4.label = 'n4' n4.store() n5 = Node() n5.label = 'n5' n5.store() n6 = Node() n6.label = 'n6' n6.store() n7 = Node() n7.label = 'n7' n7.store() n8 = Node() n8.label = 'n8' n8.store() n9 = Node() n9.label = 'n9' n9.store() # I create a strange graph, inserting links in a order # such that I often have to create the transitive closure # between two graphs # I set everything as an INPUT-links now, because the QueryBuilder path query or # our custom queries don't follow other links than CREATE or INPUT n3.add_link_from(n2, link_type=LinkType.INPUT) n2.add_link_from(n1, link_type=LinkType.INPUT) n5.add_link_from(n3, link_type=LinkType.INPUT) n5.add_link_from(n4, link_type=LinkType.INPUT) n4.add_link_from(n2, link_type=LinkType.INPUT) n7.add_link_from(n6, link_type=LinkType.INPUT) n8.add_link_from(n7, link_type=LinkType.INPUT) # There are no parents to n9, checking that self.assertEqual(set([]), set(q.get_all_parents([n9.pk]))) # There is one parent to n6 self.assertEqual(set([(_, ) for _ in (n6.pk, )]), set([tuple(_) for _ in q.get_all_parents([n7.pk])])) # There are several parents to n4 self.assertEqual(set([(_.pk, ) for _ in (n1, n2)]), set([tuple(_) for _ in q.get_all_parents([n4.pk])])) # There are several parents to n5 self.assertEqual(set([(_.pk, ) for _ in (n1, n2, n3, n4)]), set([tuple(_) for _ in q.get_all_parents([n5.pk])])) # Yet, no links from 1 to 8 self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n1.pk }, tag='anc').append(Node, descendant_of='anc', filters={ 'id': n8.pk }).count(), 0) self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n8.pk }, tag='desc').append(Node, ancestor_of='desc', filters={ 'id': n1.pk }).count(), 0) n6.add_link_from(n5, link_type=LinkType.INPUT) # Yet, now 2 links from 1 to 8 self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n1.pk }, tag='anc').append(Node, descendant_of='anc', filters={ 'id': n8.pk }).count(), 2) self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n8.pk }, tag='desc').append(Node, ancestor_of='desc', filters={ 'id': n1.pk }).count(), 2) self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n8.pk }, tag='desc').append( Node, ancestor_of='desc', filters={ 'id': n1.pk }, edge_filters={ 'depth': { '<': 6 } }, ).count(), 2) self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n8.pk }, tag='desc').append( Node, ancestor_of='desc', filters={ 'id': n1.pk }, edge_filters={ 'depth': 5 }, ).count(), 2) self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n8.pk }, tag='desc').append( Node, ancestor_of='desc', filters={ 'id': n1.pk }, edge_filters={ 'depth': { '<': 5 } }, ).count(), 0) # TODO write a query that can filter certain paths by traversed ID qb = QueryBuilder().append( Node, filters={ 'id': n8.pk }, tag='desc', ).append(Node, ancestor_of='desc', edge_project='path', filters={'id': n1.pk}) queried_path_set = set([frozenset(p) for p, in qb.all()]) paths_there_should_be = set([ frozenset([n1.pk, n2.pk, n3.pk, n5.pk, n6.pk, n7.pk, n8.pk]), frozenset([n1.pk, n2.pk, n4.pk, n5.pk, n6.pk, n7.pk, n8.pk]) ]) self.assertTrue(queried_path_set == paths_there_should_be) qb = QueryBuilder().append(Node, filters={ 'id': n1.pk }, tag='anc').append(Node, descendant_of='anc', filters={'id': n8.pk}, edge_project='path') self.assertTrue( set([frozenset(p) for p, in qb.all()]) == set([ frozenset([n1.pk, n2.pk, n3.pk, n5.pk, n6.pk, n7.pk, n8.pk]), frozenset([n1.pk, n2.pk, n4.pk, n5.pk, n6.pk, n7.pk, n8.pk]) ])) n7.add_link_from(n9, link_type=LinkType.INPUT) # Still two links... self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n1.pk }, tag='anc').append(Node, descendant_of='anc', filters={ 'id': n8.pk }).count(), 2) self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n8.pk }, tag='desc').append(Node, ancestor_of='desc', filters={ 'id': n1.pk }).count(), 2) n9.add_link_from(n6, link_type=LinkType.INPUT) # And now there should be 4 nodes self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n1.pk }, tag='anc').append(Node, descendant_of='anc', filters={ 'id': n8.pk }).count(), 4) self.assertEquals( QueryBuilder().append(Node, filters={ 'id': n8.pk }, tag='desc').append(Node, ancestor_of='desc', filters={ 'id': n1.pk }).count(), 4) qb = QueryBuilder().append(Node, filters={ 'id': n1.pk }, tag='anc').append(Node, descendant_of='anc', filters={'id': n8.pk}, edge_tag='edge') qb.add_projection('edge', 'depth') self.assertTrue(set(zip(*qb.all())[0]), set([5, 6])) qb.add_filter('edge', {'depth': 6}) self.assertTrue(set(zip(*qb.all())[0]), set([6]))
def test_query_path(self): from aiida.orm.querybuilder import QueryBuilder from aiida.orm import Node n1 = Node() n1.label='n1' n1.store() n2 = Node() n2.label='n2' n2.store() n3 = Node() n3.label='n3' n3.store() n4 = Node() n4.label='n4' n4.store() n5 = Node() n5.label='n5' n5.store() n6 = Node() n6.label='n6' n6.store() n7 = Node() n7.label='n7' n7.store() n8 = Node() n8.label='n8' n8.store() n9 = Node() n9.label='n9' n9.store() # I create a strange graph, inserting links in a order # such that I often have to create the transitive closure # between two graphs n3.add_link_from(n2) n2.add_link_from(n1) n5.add_link_from(n3) n5.add_link_from(n4) n4.add_link_from(n2) n7.add_link_from(n6) n8.add_link_from(n7) for with_dbpath in (True, False): # Yet, no links from 1 to 8 self.assertEquals( QueryBuilder(with_dbpath=with_dbpath).append( Node, filters={'id':n1.pk}, tag='anc' ).append(Node, descendant_of='anc', filters={'id':n8.pk} ).count(), 0) self.assertEquals( QueryBuilder(with_dbpath=with_dbpath).append( Node, filters={'id':n8.pk}, tag='desc' ).append(Node, ancestor_of='desc', filters={'id':n1.pk} ).count(), 0) n6.add_link_from(n5) # Yet, now 2 links from 1 to 8 for with_dbpath in (True, False): self.assertEquals( QueryBuilder(with_dbpath=with_dbpath).append( Node, filters={'id':n1.pk}, tag='anc' ).append(Node, descendant_of='anc', filters={'id':n8.pk} ).count(), 2 ) self.assertEquals( QueryBuilder(with_dbpath=with_dbpath).append( Node, filters={'id':n8.pk}, tag='desc' ).append(Node, ancestor_of='desc', filters={'id':n1.pk} ).count(), 2) qb = QueryBuilder(with_dbpath=False,expand_path=True).append( Node, filters={'id':n8.pk}, tag='desc', ).append(Node, ancestor_of='desc', edge_project='path', filters={'id':n1.pk}) queried_path_set = set([frozenset(p) for p, in qb.all()]) paths_there_should_be = set([ frozenset([n1.pk, n2.pk, n3.pk, n5.pk, n6.pk, n7.pk, n8.pk]), frozenset([n1.pk, n2.pk, n4.pk, n5.pk, n6.pk, n7.pk, n8.pk]) ]) self.assertTrue(queried_path_set == paths_there_should_be) qb = QueryBuilder(with_dbpath=False, expand_path=True).append( Node, filters={'id':n1.pk}, tag='anc' ).append( Node, descendant_of='anc', filters={'id':n8.pk}, edge_project='path' ) self.assertTrue(set( [frozenset(p) for p, in qb.all()] ) == set( [frozenset([n1.pk, n2.pk, n3.pk, n5.pk, n6.pk, n7.pk, n8.pk]), frozenset([n1.pk, n2.pk, n4.pk, n5.pk, n6.pk, n7.pk, n8.pk])] )) n7.add_link_from(n9) # Still two links... for with_dbpath in (True, False): self.assertEquals( QueryBuilder(with_dbpath=with_dbpath).append( Node, filters={'id':n1.pk}, tag='anc' ).append(Node, descendant_of='anc', filters={'id':n8.pk} ).count(), 2 ) self.assertEquals( QueryBuilder(with_dbpath=with_dbpath).append( Node, filters={'id':n8.pk}, tag='desc' ).append(Node, ancestor_of='desc', filters={'id':n1.pk} ).count(), 2) n9.add_link_from(n6) # And now there should be 4 nodes for with_dbpath in (True, False): self.assertEquals( QueryBuilder(with_dbpath=with_dbpath).append( Node, filters={'id':n1.pk}, tag='anc' ).append(Node, descendant_of='anc', filters={'id':n8.pk} ).count(), 4) self.assertEquals( QueryBuilder(with_dbpath=with_dbpath).append( Node, filters={'id':n8.pk}, tag='desc' ).append(Node, ancestor_of='desc', filters={'id':n1.pk} ).count(), 4) for with_dbpath in (True, False): qb = QueryBuilder(with_dbpath=True).append( Node, filters={'id':n1.pk}, tag='anc' ).append( Node, descendant_of='anc', filters={'id':n8.pk}, edge_tag='edge' ) qb.add_projection('edge', 'depth') self.assertTrue(set(zip(*qb.all())[0]), set([5,6])) qb.add_filter('edge', {'depth':6}) self.assertTrue(set(zip(*qb.all())[0]), set([6]))
#This code works just in this case and in this moment in time when #I don't have any other calculation in the database. #I will be more systematic with labels, description and extras in the future #in order to make the querying easier. from aiida.orm.querybuilder import QueryBuilder from aiida.orm.calculation.work import WorkCalculation from common_wf import fit_birch_murnaghan_params, birch_murnaghan q = QueryBuilder() q.append(WorkCalculation, tag="wc", filters={'attributes._process_label': 'EquationOfStatesHe'}) q.add_projection('wc', "id") for i in q.iterall(): p = load_node(i[0]) if i[0] == 507: print i[0] else: print i[0], p.inp.pseudo_path #, p.out.result.get_attr("eos_data") k = 0 for i in p.out.initial_structure.sites: k = k + 1 vol = [] en = [] for s in range(5): # print float(p.out.result.get_attr("eos_data")[s][0]/k), float(p.out.result.get_attr("eos_data")[s][1]/k) vol.append(float(p.out.result.get_attr("eos_data")[s][0]) / k) en.append(float(p.out.result.get_attr("eos_data")[s][1]) / k) #print vol, en