def test_ordering_limits_offsets_of_results_for_SQLA(self): from aiida.orm import Node from aiida.orm.querybuilder import QueryBuilder # Creating 10 nodes with an attribute that can be ordered for i in range(10): n = Node() n._set_attr('foo', i) n.store() qb = QueryBuilder().append( Node, project='attributes.foo' ).order_by( {Node:{'attributes.foo':{'cast':'i'}}} ) res = list(zip(*qb.all())[0]) self.assertEqual(res, range(10)) # Now applying an offset: qb.offset(5) res = list(zip(*qb.all())[0]) self.assertEqual(res, range(5,10)) # Now also applying a limit: qb.limit(3) res = list(zip(*qb.all())[0]) self.assertEqual(res, range(5,8))
def test_ordering_limits_offsets_sqla(self): """Test ordering limits offsets of SQLA query results.""" from aiida.orm import Node, Data from aiida.orm.querybuilder import QueryBuilder # Creating 10 nodes with an attribute that can be ordered for i in range(10): node = Data() node.set_attribute('foo', i) node.store() q_b = QueryBuilder().append(Node, project='attributes.foo').order_by( {Node: { 'attributes.foo': { 'cast': 'i' } }}) res = next(zip(*q_b.all())) self.assertEqual(res, tuple(range(10))) # Now applying an offset: q_b.offset(5) res = next(zip(*q_b.all())) self.assertEqual(res, tuple(range(5, 10))) # Now also applying a limit: q_b.limit(3) res = next(zip(*q_b.all())) self.assertEqual(res, tuple(range(5, 8)))
def get_mat_dict(mat_id): """Given a curated-cof label, queries the group and returns a dictionary with tags as keys and nodes as values. If multiple version are available qb.all()[0][0] shuld take the last one computed. """ mat_dict = {} # get all nodes qb = QueryBuilder() qb.append(Group, filters={'label': { 'like': GROUP_DIR + mat_id }}, tag='group') qb.append(Node, project=['extras.{}'.format(TAG_KEY), '*'], with_group='group') for k, v in qb.all(): mat_dict[k] = v # get extra info qb = QueryBuilder() qb.append(Group, filters={'label': { 'like': GROUP_DIR + mat_id }}, project=['*']) g = qb.all()[0][0] mat_dict['mat_id'] = g.extras['mat_id'] mat_dict['name_conventional'] = g.extras['name_conventional'] mat_dict['doi_ref'] = g.extras['doi_ref'] mat_dict['workflow_version'] = g.extras['workflow_version'] return mat_dict
def delete_groups(): qb = QueryBuilder() qb.append(Group, filters={'label': {'like': GROUP_DIR + "%"}}) if qb.all(): print("Groups '{}' found, deleting...".format(GROUP_DIR)) for q in qb.all(): group = q[0] group.clear() Group.objects.delete(group.pk) else: print("No previous Group {} found to delete.".format(GROUP_DIR))
def test_queryhelp(self): """ Here I test the queryhelp by seeing whether results are the same as using the append method. I also check passing of tuples. """ from aiida.orm.data.structure import StructureData from aiida.orm.data.parameter import ParameterData from aiida.orm.data import Data from aiida.orm.querybuilder import QueryBuilder from aiida.orm.group import Group from aiida.orm.computer import Computer g = Group(name='helloworld').store() for cls in (StructureData, ParameterData, Data): obj = cls() obj._set_attr('foo-qh2', 'bar') obj.store() g.add_nodes(obj) for cls, expected_count, subclassing in ( (StructureData, 1, True), (ParameterData, 1, True), (Data, 3, True), (Data, 1, False), ((ParameterData, StructureData), 2, True), ((ParameterData, StructureData), 2, False), ((ParameterData, Data), 2, False), ((ParameterData, Data), 3, True), ((ParameterData, Data, StructureData), 3, False), ): qb = QueryBuilder() qb.append(cls, filters={'attributes.foo-qh2': 'bar'}, subclassing=subclassing, project='uuid') self.assertEqual(qb.count(), expected_count) qh = qb.get_json_compatible_queryhelp() qb_new = QueryBuilder(**qh) self.assertEqual(qb_new.count(), expected_count) self.assertEqual(sorted([uuid for uuid, in qb.all()]), sorted([uuid for uuid, in qb_new.all()])) qb = QueryBuilder().append(Group, filters={'name': 'helloworld'}) self.assertEqual(qb.count(), 1) qb = QueryBuilder().append((Group, ), filters={'name': 'helloworld'}) self.assertEqual(qb.count(), 1) qb = QueryBuilder().append(Computer, ) self.assertEqual(qb.count(), 1) qb = QueryBuilder().append(cls=(Computer, )) self.assertEqual(qb.count(), 1)
def test_and_get_codenode(codenode, expected_code_type, use_exceptions=False): """ Pass a code node and an expected code (plugin) type. Check that the code exists, is unique, and return the Code object. :param codenode: the name of the code to load (in the form label@machine) :param expected_code_type: a string with the plugin that is expected to be loaded. In case no plugins exist with the given name, show all existing plugins of that type :param use_exceptions: if True, raise a ValueError exception instead of calling sys.exit(1) :return: a Code object """ import sys from aiida.common.exceptions import NotExistent from aiida.orm import Code try: if codenode is None: raise ValueError code = codenode if code.get_input_plugin_name() != expected_code_type: raise ValueError except (NotExistent, ValueError): from aiida.orm.querybuilder import QueryBuilder qb = QueryBuilder() qb.append( Code, filters={'attributes.input_plugin': { '==': expected_code_type }}, project='*') valid_code_labels = [ "{}@{}".format(c.label, c.get_computer().name) for [c] in qb.all() ] if valid_code_labels: msg = ("Pass as further parameter a valid code label.\n" "Valid labels with a {} executable are:\n".format( expected_code_type)) msg += "\n".join("* {}".format(l) for l in valid_code_labels) if use_exceptions: raise ValueError(msg) else: print >> sys.stderr, msg sys.exit(1) else: msg = ("Code not valid, and no valid codes for {}.\n" "Configure at least one first using\n" " verdi code setup".format(expected_code_type)) if use_exceptions: raise ValueError(msg) else: print >> sys.stderr, msg sys.exit(1) return code
def get_upf_groups(cls, filter_elements=None, user=None): """Return all names of groups of type UpfFamily, possibly with some filters. :param filter_elements: A string or a list of strings. If present, returns only the groups that contains one UPF for every element present in the list. The default is `None`, meaning that all families are returned. :param user: if None (default), return the groups for all users. If defined, it should be either a `User` instance or the user email. :return: list of `Group` entities of type UPF. """ from aiida.orm import Group from aiida.orm import QueryBuilder from aiida.orm import User builder = QueryBuilder() builder.append(Group, filters={'type_string': {'==': cls.upffamily_type_string}}, tag='group', project='*') if user: builder.append(User, filters={'email': {'==': user}}, with_group='group') if isinstance(filter_elements, str): filter_elements = [filter_elements] if filter_elements is not None: builder.append(UpfData, filters={'attributes.element': {'in': filter_elements}}, with_group='group') builder.order_by({Group: {'id': 'asc'}}) return [group for group, in builder.all()]
def _rehash_cmd(all, class_name, pks): try_load_dbenv() from aiida.orm.querybuilder import QueryBuilder # Get the Node class to match try: node_class = load_class(class_name) except ClassNotFoundException: click.echo("Could not load class '{}'.\nAborted!".format(class_name)) sys.exit(1) # Add the filters for the class and PKs. qb = QueryBuilder() qb.append(node_class, tag='node') if pks: qb.add_filter('node', {'id': {'in': pks}}) else: if not all: click.echo( "Nothing specified, nothing re-hashed.\nExplicitly specify the PK of the nodes, or use '--all'." ) return if not qb.count(): click.echo('No matching nodes found.') return for i, (node, ) in enumerate(qb.all()): if i % 100 == 0: click.echo('.', nl=False) node.rehash() click.echo('\nAll done! {} node(s) re-hashed.'.format(i + 1))
def rehash(nodes, entry_point): """Recompute the hash for nodes in the database The set of nodes that will be rehashed can be filtered by their identifier and/or based on their class. """ from aiida.orm.querybuilder import QueryBuilder if nodes: to_hash = [(node,) for node in nodes if isinstance(node, entry_point)] else: builder = QueryBuilder() builder.append(entry_point, tag='node') to_hash = builder.all() if not to_hash: echo.echo_critical('no matching nodes found') count = 0 for i, (node,) in enumerate(to_hash): if i % 100 == 0: echo.echo('.', nl=False) node.rehash() count += 1 echo.echo('') echo.echo_success('{} nodes re-hashed'.format(count))
def get_data_aiida(inp_list): """Query the AiiDA database: find info in the README.""" from aiida.orm.querybuilder import QueryBuilder from aiida.orm import Node, Dict, Group, CifData filters = {} qb = QueryBuilder() qb.append(Group, filters={'label': {'like': 'group_%'}}, tag='group') qb.append(CifData, with_group='group', filters={'extras.group_tag': 'orig_cif'}, project=['label']) for inp in inp_list: if 'henry_coefficient_average' in inp: proj = 'henry_coefficient_average' # take out _co2, _n2, _ht else: proj = inp qb.append(Dict, with_group='group', filters={'extras.group_tag': get_tag[inp]}, project=['attributes.{}'.format(proj)]) return qb.all()
def get_db_nodes_dict(): """Given return a dictionary with all the curated materials having the material label as key, and a dict of curated nodes as value. IMPROVED FOR SPEED! """ qb = QueryBuilder() qb.append(Group, filters={'label': { 'like': GROUP_DIR + "%" }}, tag='g', project=['extras']) qb.append(Node, filters={'extras': { 'has_key': TAG_KEY }}, with_group='g', project=[f'extras.{TAG_KEY}', 'uuid']) db_nodes_dict = {} for q in qb.all(): # q = [group-label, group-extras, node-tag, node-uuid] mat_id = q[0]['mat_id'] if mat_id not in db_nodes_dict: db_nodes_dict[mat_id] = { 'name_conventional': q[0]['name_conventional'], 'doi_ref': q[0]['doi_ref'], 'workflow_version': q[0]['workflow_version'], } db_nodes_dict[mat_id][q[1]] = q[2] return db_nodes_dict
def get_code_helper(cls, label, machinename=None): """ :param label: the code label identifying the code to load :param machinename: the machine name where code is setup :raise aiida.common.NotExistent: if no code identified by the given string is found :raise aiida.common.MultipleObjectsError: if the string cannot identify uniquely a code """ from aiida.common.exceptions import NotExistent, MultipleObjectsError from aiida.orm.querybuilder import QueryBuilder from aiida.orm.computers import Computer qb = QueryBuilder() qb.append(cls, filters={'label': {'==': label}}, project=['*'], tag='code') if machinename: qb.append(Computer, filters={'name': {'==': machinename}}, with_node='code') if qb.count() == 0: raise NotExistent("'{}' is not a valid code name.".format(label)) elif qb.count() > 1: codes = qb.all(flat=True) retstr = ("There are multiple codes with label '{}', having IDs: ".format(label)) retstr += ', '.join(sorted([str(c.pk) for c in codes])) + '.\n' retstr += ('Relabel them (using their ID), or refer to them with their ID.') raise MultipleObjectsError(retstr) else: return qb.first()[0]
def get_psf_groups(cls, filter_elements=None, user=None): """ Return all names of groups of type PsfFamily, possibly with some filters. :param filter_elements: A string or a list of strings. If present, returns only the groups that contains one Psf for every element present in the list. Default=None, meaning that all families are returned. :param user: if None (default), return the groups for all users. If defined, it should be either a DbUser instance, or a string for the username (that is, the user email). """ from aiida.orm import QueryBuilder from aiida.orm import User from aiida_siesta.groups.pseudos import PsfFamily query = QueryBuilder() query.append(PsfFamily, tag='group', project='*') if user is not None: query.append(User, filters={'email': {'==': user}}, with_group='group') if isinstance(filter_elements, str): filter_elements = [filter_elements] if filter_elements is not None: query.append(PsfData, filters={'attributes.element': {'in': filter_elements}}, with_group='group') query.order_by({PsfFamily: {'id': 'asc'}}) return [_[0] for _ in query.all()]
def delete_trash(): """ This method deletes all AiiDA nodes in the DB, which have a extra trash=True And all their children. Could be advanced to a garbage collector. Be careful to use it. """ #query db for marked trash q = QueryBuilder() nodes_to_delete_pks = [] q.append(Node, filters={'extras.trash': {'==': True}}) res = q.all() for node in res: nodes_to_delete_pks.append(node[0].dbnode.pk) print('pk {}, extras {}'.format(node[0].dbnode.pk, node[0].get_extras())) #Delete the trash nodes print('deleting nodes {}'.format(nodes_to_delete_pks)) delete_nodes(nodes_to_delete_pks) return
def mock_vasp(fresh_aiida_env, localhost): """Points to a mock-up of a VASP executable.""" from aiida.orm import Code from aiida.orm.querybuilder import QueryBuilder query_builder = QueryBuilder() query_builder.append(Code, tag='code') query_builder.add_filter('code', {'label': {'==': 'mock-vasp'}}) query_results = query_builder.all() if query_results: code = query_results[0][0] else: os_env = os.environ.copy() if not localhost.pk: localhost.store() # returns unicode mock_vasp_path = sp.check_output(['which', 'mock-vasp'], env=os_env, universal_newlines=True).strip() code = Code() code.label = 'mock-vasp' code.description = 'Mock VASP for tests' code.set_remote_computer_exec((localhost, mock_vasp_path)) code.set_input_plugin_name('vasp.vasp') aiidapath = py_path.local( fresh_aiida_env._manager.root_dir).join('.aiida') code.set_prepend_text('export AIIDA_PATH={}'.format(aiidapath)) return code
def _get_codes(self, input_plugin): from aiida.orm.querybuilder import QueryBuilder from aiida.orm import Code, Computer from aiida.backends.utils import get_automatic_user current_user = get_automatic_user() qb = QueryBuilder() qb.append(Computer, filters={'enabled': True}, project=['*'], tag='computer') qb.append(Code, filters={ 'attributes.input_plugin': { '==': input_plugin }, 'extras.hidden': { "~==": True } }, project=['*'], has_computer='computer') results = qb.all() # only codes on computers configured for the current user results = [r for r in results if r[0].is_user_configured(current_user)] codes = {"{}@{}".format(r[1].label, r[0].name): r[1] for r in results} return codes
def get_input_folder(): """ Gets or creates the input folder containing the Wannier90 output. """ folder_description = u'Bi Wannier90 output' query_builder = QueryBuilder() query_builder.append( FolderData, filters={'description': { '==': folder_description }} ) res = query_builder.all() if len(res) == 0: # create archive res = FolderData() input_folder = './reference_input' for filename in os.listdir(input_folder): res.put_object_from_file( os.path.abspath(os.path.join(input_folder, filename)), filename ) res.description = folder_description res.store() elif len(res) > 1: raise ValueError( 'Query returned more than one matching FolderData instance.' ) else: res = res[0][0] return res
def test_attribute_existence(self): # I'm storing a value under key whatever: from aiida.orm.node import Node from aiida.orm.querybuilder import QueryBuilder val = 1. res_uuids = set() n1 = Node() n1._set_attr("whatever", 3.) n1._set_attr("test_case", "test_attribute_existence") n1.store() # I want all the nodes where whatever is smaller than 1. or there is no such value: qb = QueryBuilder() qb.append(Node, filters={ 'or': [{ 'attributes': { '!has_key': 'whatever' } }, { 'attributes.whatever': { '<': val } }], }, project='uuid') res_query = set([str(_[0]) for _ in qb.all()]) self.assertEqual(res_query, res_uuids)
def get_data_aiida(projections, sliders_dict, quantities, plot_info): """Query the AiiDA database""" from aiida import load_dbenv, is_dbenv_loaded from aiida.backends import settings if not is_dbenv_loaded(): load_dbenv(profile=settings.AIIDADB_PROFILE) from aiida.orm.querybuilder import QueryBuilder from aiida.orm.data.parameter import ParameterData filters = {} def add_range_filter(bounds, label): # a bit of cheating until this is resolved # https://github.com/aiidateam/aiida_core/issues/1389 # filters['attributes.'+label] = {'>=':bounds[0]} filters["attributes." + label] = { "and": [{ ">=": bounds[0] }, { "<": bounds[1] }] } for k, v in sliders_dict.items(): # Note: filtering is costly, avoid if possible if not v.value == quantities[k]["range"]: add_range_filter(v.value, k) qb = QueryBuilder() qb.append( ParameterData, filters=filters, project=["attributes." + p for p in projections] + ["uuid", "extras.cif_uuid"], ) nresults = qb.count() if nresults == 0: plot_info.text = "No matching frameworks found." return data_empty plot_info.text = "{} frameworks found. Plotting...".format(nresults) # x,y position x, y, clrs, uuids, names, cif_uuids = zip(*qb.all()) plot_info.text = "{} frameworks queried".format(nresults) x = map(float, x) y = map(float, y) cif_uuids = map(str, cif_uuids) uuids = map(str, uuids) if projections[2] == "group": # clrs = map(lambda clr: bondtypes.index(clr), clrs) clrs = map(str, clrs) else: clrs = map(float, clrs) return dict(x=x, y=y, uuid=cif_uuids, color=clrs, name=names)
def get_upf_family_names(self): """Get the list of all upf family names to which the pseudo belongs.""" from aiida.orm import Group from aiida.orm import QueryBuilder query = QueryBuilder() query.append(Group, filters={'type_string': {'==': self.upffamily_type_string}}, tag='group', project='label') query.append(UpfData, filters={'id': {'==': self.id}}, with_group='group') return [label for label, in query.all()]
def _get(cls, name=None): # check if we can find the basis family from aiida.orm.querybuilder import QueryBuilder qb = QueryBuilder() filters = {} if name is not None: filters['attributes.name'] = {'==': name} qb.append(cls, filters=filters) return [res for [res] in qb.all()]
def get_computer_names(): """ Retrieve the list of computers in the DB. """ from aiida.orm.querybuilder import QueryBuilder builder = QueryBuilder() builder.append(entity_type='computer', project=['name']) if builder.count() > 0: return next(zip(*builder.all())) # return the first entry return []
def from_md5(cls, md5): """ Return a list of all Basis Sets that match a given MD5 hash. Note that the hash has to be stored in a _md5 attribute, otherwise the basis will not be found. """ from aiida.orm.querybuilder import QueryBuilder qb = QueryBuilder() qb.append(cls, filters={'attributes.md5': {'==': md5}}) return [_ for [_] in qb.all()]
def preprocess(self): qb = QueryBuilder() filters = {'extras': {'!has_key': 'formula'}} #filters['or'] = [{'type':CifData._plugin_type_string},{'type':StructureData._plugin_type_string}] qb.append(CifData, filters=filters) for n in qb.all(): # iterall() would interfere with set_extra() try: formula = n[0].get_formula() except: formula = n[0].get_formulae()[0] n[0].set_extra("formula", formula)
def test_db_flushed(configure): from aiida.orm.data.base import Str test_string = 'this string should not be present when the test run starts' tag = 'Test string tag' from aiida.orm.querybuilder import QueryBuilder qb = QueryBuilder() qb.append(Str, filters={'label': {'==': tag}}) assert not qb.all() str_obj = Str(test_string) str_obj.label = tag str_obj.store()
def from_md5(cls, md5): """ Return a list of all CIF files that match a given MD5 hash. .. note:: the hash has to be stored in a ``_md5`` attribute, otherwise the CIF file will not be found. """ from aiida.orm.querybuilder import QueryBuilder builder = QueryBuilder() builder.append(cls, filters={'attributes.md5': {'==': md5}}) return builder.all(flat=True)
def request_calcjobs(): ''' Returns the total number of running CalcJobs ''' qb = QueryBuilder() qb.append( CalcJobNode, # I am appending a CalcJobNode filters={ # Specifying the filters: 'attributes.process_state':{'in':['created', 'running', 'waiting']}, # the calculation has to be finished }, ) return len(qb.all())
def get_all_parents(self, node_pks, return_values=['id']): """ Get all the parents of given nodes :param node_pks: one node pk or an iterable of node pks :return: a list of aiida objects with all the parents of the nodes """ from aiida.orm.querybuilder import QueryBuilder from aiida.orm import Node qb = QueryBuilder() qb.append(Node, tag='low_node', filters={'id': {'in': node_pks}}) qb.append(Node, ancestor_of='low_node', project=return_values) return qb.all()
def get_psf_family_names(self): """ Get the list of all psf family names to which the pseudo belongs """ from aiida.orm import QueryBuilder from aiida_siesta.groups.pseudos import PsfFamily query = QueryBuilder() query.append(PsfFamily, tag='group', project='label') query.append(PsfData, filters={'id': {'==': self.id}}, with_group='group') return [_[0] for _ in query.all()]
def from_md5(cls, md5): """Return a list of all `UpfData` that match the given md5 hash. .. note:: assumes hash of stored `UpfData` nodes is stored in the `md5` attribute :param md5: the file hash :return: list of existing `UpfData` nodes that have the same md5 hash """ from aiida.orm.querybuilder import QueryBuilder builder = QueryBuilder() builder.append(cls, filters={'attributes.md5': {'==': md5}}) return [upf for upf, in builder.all()]
#for queries examplefrom tutorial from aiida.orm.querybuilder import QueryBuilder from aiida.orm.data.remote import RemoteData StructureData=DataFactory("structure") ParameterData=DataFactory("parameter") qb=QueryBuilder() qb.append(RemoteData, tag="remote", project=["*"]) qb.append(Group,group_of="remote", filters={"name":{"in": ["tutorial_pbesol", "tutorial_lda", "tutorial_pbe"] }}) qb.append(ParameterData, project=["attributes.energy_smearing"] #qb.append(ParameterData, project=["attributes.energy_smearing"], # filters={"id":{"==":1}} ) #qb.append(ParameterData, project=["attributes.energy_smearing"] qb.all()
from aiida.orm.data.remote import RemoteData qb=QueryBuilder() #qb.append(Node, project=["id"]) StructureData = DataFactory("structure") ParameterData = DataFactory("parameter") #enumerate the <pk> for each query key #for node, in qb.iterall(): # print node #print #print("Number of species "+str( qb.count())) #qb.append(StructureData, project=["id", "uuid"], # filters={"or":[ # {"id":{"==":285}}, {"id":{"==":3512}} ] }) # Pour etablir des liens entre etats qb.append(RemoteData, tag="remote", project=["*"]) qb.append(Group, group_of="remote") #qb.append(ParameterData, project=["attributes.energy_smearing"]) #, filters=) #qb.append(ParameterData, project=["attributes.element"]) #for i in qb.iterall(): # print i print qb.all()