def get_dbauthinfo(self, user): info = DbAuthInfo.query.filter_by(dbcomputer=self.dbcomputer, aiidauser=user).first() if not info: raise NotExistent("The user '{}' is not configured for " "computer '{}'".format(user.email, self.name)) return info
def __init__(self, **kwargs): from aiida.backends.djsite.db.models import DbGroup dbgroup = kwargs.pop('dbgroup', None) if dbgroup is not None: if isinstance(dbgroup, (int, long)): try: dbgroup = DbGroup.objects.get(pk=dbgroup) except ObjectDoesNotExist: raise NotExistent( "Group with pk={} does not exist".format(dbgroup)) if not isinstance(dbgroup, DbGroup): raise TypeError("dbgroup is not a DbGroup instance, it is " "instead {}".format(str(type(dbgroup)))) if kwargs: raise ValueError("If you pass a dbgroups, you cannot pass any " "further parameter") self._dbgroup = dbgroup else: name = kwargs.pop('name', None) if name is None: raise ValueError("You have to specify a group name") group_type = kwargs.pop('type_string', "") # By default, an user group user = kwargs.pop('user', get_automatic_user()) description = kwargs.pop('description', "") self._dbgroup = DbGroup(name=name, description=description, user=user, type=group_type) if kwargs: raise ValueError("Too many parameters passed to Group, the " "unknown parameters are: {}".format(", ".join( kwargs.keys())))
def get_value_of_sub_field(key, original_get_value): """ Get the value that corresponds to sub-fields of dictionaries stored in a JSON. For example, if there is a dictionary {'b': 'c'} stored as value of the key 'a' value 'a' :param key: The key that can be simple, a string, or complex, a set of keys separated by the separator value. :param original_get_value: The function that should be called to get the original value (which can be a dictionary too). :return: The value that correspond to the complex (or not) key. :raise NotExistent: If the key doesn't correspond to a value """ keys = list() if _sep in key: keys.extend(key.split(_sep)) else: keys.append(key) if len(keys) == 1: return original_get_value(keys[0]) else: try: curr_val = original_get_value(keys[0]) curr_pos = 1 while curr_pos < len(keys): curr_val = curr_val[keys[curr_pos]] curr_pos += 1 return curr_val except TypeError, KeyError: raise NotExistent("The sub-field {} doesn't correspond " "to a value.".format(key))
def get_pseudos_from_structure(structure, family_name): """Given a family name (a Siesta pseudo group in the DB, possibly with mixed psf and psml pseudopotentials) and an AiiDA structure object, return a dictionary associating each 'kind' name in the structure with its object (PsfData or PsmlData). :raise MultipleObjectsError: if more than one pseudo for the same element is found in the group. :raise NotExistent: if no pseudo for an element in the group is found in the group. """ from aiida.common.exceptions import NotExistent, MultipleObjectsError family_pseudos = {} family = Group.get(label=family_name) for node in family.nodes: if isinstance(node, (PsfData, PsmlData)): if node.element in family_pseudos: raise MultipleObjectsError( "More than one pseudo for element {} found in " "family {}".format(node.element, family_name) ) family_pseudos[node.element] = node pseudo_list = {} for kind in structure.kinds: symbol = kind.symbol try: pseudo_list[kind.name] = family_pseudos[symbol] except KeyError: raise NotExistent("No pseudo for element {} found in family {}".format(symbol, family_name)) return pseudo_list
def __init__(self, **kwargs): super(Computer, self).__init__() uuid = kwargs.pop('uuid', None) if uuid is not None: if kwargs: raise ValueError("If you pass a uuid, you cannot pass any " "further parameter") dbcomputer = DbComputer.query.filter_by(uuid=uuid).first() if not dbcomputer: raise NotExistent("No entry with UUID={} found".format(uuid)) self._dbcomputer = dbcomputer else: if 'dbcomputer' in kwargs: dbcomputer = kwargs.pop('dbcomputer') if not (isinstance(dbcomputer, DbComputer)): raise TypeError("dbcomputer must be of type DbComputer") self._dbcomputer = dbcomputer if kwargs: raise ValueError("If you pass a dbcomputer parameter, " "you cannot pass any further parameter") else: self._dbcomputer = DbComputer() # Set all remaining parameters, stop if unknown self.set(**kwargs)
def get_json_files(archive, silent=True, filepath=None, external_module=None): """Get metadata.json and data.json from an exported AiiDA archive :param archive: the relative filename of the archive :param silent: Whether or not the extraction should be silent :param filepath: str of directories of where to find archive (starting "/"s are irrelevant) :param external_module: string with name of external module, where archive can be found """ # Get archive dirpath_archive = get_archive_file(archive, filepath=filepath, external_module=external_module) # Unpack archive with SandboxFolder(sandbox_in_repo=False) as folder: if zipfile.is_zipfile(dirpath_archive): extract_zip(dirpath_archive, folder, silent=silent) elif tarfile.is_tarfile(dirpath_archive): extract_tar(dirpath_archive, folder, silent=silent) else: raise ValueError('invalid file format, expected either a zip archive or gzipped tarball') try: with io.open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle: data = json.load(fhandle) with io.open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata = json.load(fhandle) except IOError: raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) # Return metadata.json and data.json return metadata, data
def get_result_parameterdata_node(self): """ Return the parameterdata node. :raise UniquenessError: if the node is not unique :raise NotExistent: if the node does not exist """ from aiida.orm.data.parameter import ParameterData from aiida.common.exceptions import NotExistent out_parameters = self._calc.get_outputs(node_type=ParameterData, also_labels=True) out_parameter_data = [ i[1] for i in out_parameters if i[0] == self.get_linkname_outparams() ] if not out_parameter_data: raise NotExistent("No output .res ParameterData node found") elif len(out_parameter_data) > 1: from aiida.common.exceptions import UniquenessError raise UniquenessError("Output ParameterData should be found once, " "found it instead {} times".format( len(out_parameter_data))) return out_parameter_data[0]
def load_entity(cls, identifier, identifier_type=None, sub_classes=None, query_with_dashes=True): """ Load an entity that uniquely corresponds to the provided identifier of the identifier type. :param identifier: the identifier :param identifier_type: the type of the identifier :param sub_classes: an optional tuple of orm classes, that should each be strict sub classes of the base orm class of the loader, that will narrow the queryset :returns: the loaded entity :raises aiida.common.MultipleObjectsError: if the identifier maps onto multiple entities :raises aiida.common.NotExistent: if the identifier maps onto not a single entity """ builder, query_parameters = cls.get_query_builder(identifier, identifier_type, sub_classes, query_with_dashes) builder.limit(2) classes = ' or '.join([sub_class.__name__ for sub_class in query_parameters['classes']]) identifier = query_parameters['identifier'] identifier_type = query_parameters['identifier_type'].value try: entity = builder.one()[0] except MultipleObjectsError: error = 'multiple {} entries found with {}<{}>'.format(classes, identifier_type, identifier) raise MultipleObjectsError(error) except NotExistent as exception: error = 'no {} found with {}<{}>: {}'.format(classes, identifier_type, identifier, exception) raise NotExistent(error) return entity
def get_from_string(cls, code_string): """ Get a Computer object with given identifier string in the format label@machinename. See the note below for details on the string detection algorithm. .. note:: the (leftmost) '@' symbol is always used to split code and computername. Therefore do not use '@' in the code name if you want to use this function ('@' in the computer name are instead valid). :param code_string: the code string identifying the code to load :raise aiida.common.NotExistent: if no code identified by the given string is found :raise aiida.common.MultipleObjectsError: if the string cannot identify uniquely a code :raise aiida.common.InputValidationError: if code_string is not of string type """ from aiida.common.exceptions import NotExistent, MultipleObjectsError, InputValidationError try: label, sep, machinename = code_string.partition('@') except AttributeError as exception: raise InputValidationError('the provided code_string is not of valid string type') try: return cls.get_code_helper(label, machinename) except NotExistent: raise NotExistent('{} could not be resolved to a valid code label'.format(code_string)) except MultipleObjectsError: raise MultipleObjectsError('{} could not be uniquely resolved'.format(code_string))
def find(cls, **kwargs): """ Extend :py:meth:`PotcarMetadataMixin.find` with filtering by POTCAR family. If no POTCAR is found, raise a ``NotExistent`` exception. If multiple POTCAR are found, sort them by: * POTCARS belonging to the active user first * oldest first """ family = kwargs.pop('family', None) if not family: return super(PotcarData, cls).find(**kwargs) query = cls.query_by_attrs(**kwargs) group_filters = { 'label': { '==': family }, 'type_string': { '==': cls.potcar_family_type_string } } query.append(Group, tag='family', filters=group_filters, with_node=cls._query_label) query.add_projection(cls._query_label, '*') if not query.count(): raise NotExistent() results = [result[0] for result in query.all()] from functools import cmp_to_key results.sort(key=cmp_to_key(by_older)) return results
def get_pseudos_from_structure(structure, family_name): """ Given a family name (a UpfFamily group in the DB) and a AiiDA structure, return a dictionary associating each kind name with its UpfData object. :raise MultipleObjectsError: if more than one UPF for the same element is found in the group. :raise NotExistent: if no UPF for an element in the group is found in the group. """ from aiida.common.exceptions import NotExistent, MultipleObjectsError family_pseudos = {} family = UpfData.get_upf_group(family_name) for node in family.nodes: if isinstance(node, UpfData): if node.element in family_pseudos: raise MultipleObjectsError( "More than one UPF for element {} found in " "family {}".format(node.element, family_name)) family_pseudos[node.element] = node pseudo_list = {} for kind in structure.kinds: symbol = kind.symbol try: pseudo_list[kind.name] = family_pseudos[symbol] except KeyError: raise NotExistent( "No UPF for element {} found in family {}".format( symbol, family_name)) return pseudo_list
def get_code_helper(cls, label, machinename=None): """ :param label: the code label identifying the code to load :param machinename: the machine name where code is setup :raise aiida.common.NotExistent: if no code identified by the given string is found :raise aiida.common.MultipleObjectsError: if the string cannot identify uniquely a code """ from aiida.common.exceptions import (NotExistent, MultipleObjectsError, InputValidationError) from aiida.orm.querybuilder import QueryBuilder from aiida.orm.computers import Computer qb = QueryBuilder() qb.append(cls, filters={'label': {'==': label}}, project=['*'], tag='code') if machinename: qb.append(Computer, filters={'name': {'==': machinename}}, with_node='code') if qb.count() == 0: raise NotExistent("'{}' is not a valid code name.".format(label)) elif qb.count() > 1: codes = [_ for [_] in qb.all()] retstr = ("There are multiple codes with label '{}', having IDs: ".format(label)) retstr += ', '.join(sorted([str(c.pk) for c in codes])) + '.\n' retstr += ('Relabel them (using their ID), or refer to them with their ID.') raise MultipleObjectsError(retstr) else: return qb.first()[0]
def find(cls, **kwargs): """Extend :py:meth:`PotcarMetadataMixin.find` with filtering by POTCAR family.""" family = kwargs.pop('family', None) if not family: return super(PotcarData, cls).find(**kwargs) query = cls.query_by_attrs(**kwargs) group_filters = { 'name': { '==': family }, 'type': { '==': cls.potcar_family_type_string } } query.append(Group, tag='family', filters=group_filters, group_of=cls._query_label) query.add_projection(cls._query_label, '*') if not query.count(): raise NotExistent() results = [result[0] for result in query.all()] results.sort(by_older) results.sort(by_user) return results
def get_pseudos_from_structure(structure, family_name): """Return a dictionary mapping each kind name of the structure to corresponding `UpfData` from given family. :param structure: a `StructureData` :param family_name: the name of a UPF family group :return: dictionary mapping each structure kind name onto `UpfData` of corresponding element :raise aiida.common.MultipleObjectsError: if more than one UPF for the same element is found in the group. :raise aiida.common.NotExistent: if no UPF for an element in the group is found in the group. """ from aiida.common.exceptions import NotExistent, MultipleObjectsError pseudo_list = {} family_pseudos = {} family = UpfData.get_upf_group(family_name) for node in family.nodes: if isinstance(node, UpfData): if node.element in family_pseudos: raise MultipleObjectsError( f'More than one UPF for element {node.element} found in family {family_name}' ) family_pseudos[node.element] = node for kind in structure.kinds: try: pseudo_list[kind.name] = family_pseudos[kind.symbol] except KeyError: raise NotExistent( f'No UPF for element {kind.symbol} found in family {family_name}' ) return pseudo_list
def set_reference_bandsdata(self, value): """ Sets a reference bandsdata, creates a uuid link between this data object and a bandsdata object, must be set before any projection arrays :param value: a BandsData instance, a uuid or a pk :raise: NotExistent if there was no BandsData associated with uuid or pk """ if isinstance(value, BandsData): uuid = value.uuid else: try: pk = int(value) bands = load_node(pk=pk, type=BandsData) uuid = bands.uuid except ValueError: uuid = str(value) try: bands = load_node(uuid=uuid, type=BandsData) uuid = bands.uuid except: raise NotExistent("The value passed to " "set_reference_bandsdata was not " "associated to any bandsdata") self._set_attr('reference_bandsdata_uuid', uuid)
def get_subclass_from_uuid(cls, uuid): dbworkflowinstance = DbWorkflow.query.filter_by(uuid=uuid).first() if not dbworkflowinstance: raise NotExistent("No entry with the UUID {} found".format(uuid)) return cls.get_subclass_from_dbnode(dbworkflowinstance)
def get_subclass_from_pk(cls, pk): dbworkflowinstance = DbWorkflow.query.filter_by(pk=pk).first() if not dbworkflowinstance: raise NotExistent("No entry with pk= {} found".format(pk)) return cls.get_subclass_from_dbnode(dbworkflowinstance)
def get_dbauthinfo(self, user): from aiida.backends.djsite.db.models import DbAuthInfo try: return DbAuthInfo.objects.get(dbcomputer=self.dbcomputer, aiidauser=user) except ObjectDoesNotExist: raise NotExistent("The user '{}' is not configured for " "computer '{}'".format(user.email, self.name))
def get_subclass_from_pk(cls, pk): from aiida.backends.djsite.db.models import DbWorkflow try: dbworkflowinstance = DbWorkflow.objects.get(pk=pk) return cls.get_subclass_from_dbnode(dbworkflowinstance) except ObjectDoesNotExist: raise NotExistent("No entry with pk= {} found".format(pk))
def get_from_string(cls, code_string): """ Get a Computer object with given identifier string, that can either be the numeric ID (pk), or the label (if unique); the label can either be simply the label, or in the format label@machinename. See the note below for details on the string detection algorithm. .. note:: If a string that can be converted to an integer is given, the numeric ID is verified first (therefore, is a code A with a label equal to the ID of another code B is present, code A cannot be referenced by label). Similarly, the (leftmost) '@' symbol is always used to split code and computername. Therefore do not use '@' in the code name if you want to use this function ('@' in the computer name are instead valid). :param code_string: the code string identifying the code to load :raise NotExistent: if no code identified by the given string is found :raise MultipleObjectsError: if the string cannot identify uniquely a code """ from aiida.common.exceptions import NotExistent, MultipleObjectsError from aiida.orm.utils import load_node try: code_int = int(code_string) try: return load_node(code_int, parent_class=cls) except NotExistent: raise ValueError() # Jump to the following section # to check if a code with the given # label exists. except MultipleObjectsError: raise MultipleObjectsError("More than one code in the DB " "with pk='{}'!".format(code_string)) except ValueError: # Before dying, try to see if the user passed a (unique) label. # split with the leftmost '@' symbol (i.e. code names cannot # contain '@' symbols, computer names can) codename, sep, computername = code_string.partition('@') if sep: codes = cls.query(label=codename, dbcomputer__name=computername) else: codes = cls.query(label=codename) if len(codes) == 0: raise NotExistent("'{}' is not a valid code " "ID or label.".format(code_string)) elif len(codes) > 1: retstr = ( "There are multiple codes with label '{}', having IDs: " "".format(code_string)) retstr += ", ".join(sorted([str(c.pk) for c in codes])) + ".\n" retstr += ("Relabel them (using their ID), or refer to them " "with their ID.") raise MultipleObjectsError(retstr) else: return codes[0]
def test_migrate_v3_to_v4(self): """Test function migrate_v3_to_v4""" from aiida import get_version # Get metadata.json and data.json as dicts from v0.4 file archive metadata_v4, data_v4 = get_json_files('export_v0.4_simple.aiida', **self.core_archive) verify_metadata_version(metadata_v4, version='0.4') # Get metadata.json and data.json as dicts from v0.3 file archive # Cannot use 'get_json_files' for 'export_v0.3_simple.aiida', # because we need to pass the SandboxFolder to 'migrate_v3_to_v4' dirpath_archive = get_archive_file('export_v0.3_simple.aiida', **self.core_archive) with SandboxFolder(sandbox_in_repo=False) as folder: if zipfile.is_zipfile(dirpath_archive): extract_zip(dirpath_archive, folder, silent=True) elif tarfile.is_tarfile(dirpath_archive): extract_tar(dirpath_archive, folder, silent=True) else: raise ValueError('invalid file format, expected either a zip archive or gzipped tarball') try: with io.open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle: data_v3 = jsonload(fhandle) with io.open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata_v3 = jsonload(fhandle) except IOError: raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) verify_metadata_version(metadata_v3, version='0.3') # Migrate to v0.4 migrate_v3_to_v4(metadata_v3, data_v3, folder) verify_metadata_version(metadata_v3, version='0.4') # Remove AiiDA version, since this may change irregardless of the migration function metadata_v3.pop('aiida_version') metadata_v4.pop('aiida_version') # Assert conversion message in `metadata.json` is correct and then remove it for later assertions self.maxDiff = None # pylint: disable=invalid-name conversion_message = 'Converted from version 0.3 to 0.4 with AiiDA v{}'.format(get_version()) self.assertEqual( metadata_v3.pop('conversion_info')[-1], conversion_message, msg='The conversion message after migration is wrong' ) metadata_v4.pop('conversion_info') # Assert changes were performed correctly self.assertDictEqual( metadata_v3, metadata_v4, msg='After migration, metadata.json should equal intended metadata.json from archives' ) self.assertDictEqual( data_v3, data_v4, msg='After migration, data.json should equal intended data.json from archives' )
def get_dbcomputer(cls, computer): """ Return a DbComputer from its name (or from another Computer or DbComputer instance) """ from aiida.orm.computer import Computer if isinstance(computer, basestring): try: dbcomputer = cls.session.query(cls).filter( cls.name == computer).one() except NoResultFound: raise NotExistent( "No computer found in the table of computers with " "the given name '{}'".format(computer)) except MultipleResultsFound: raise DbContentError( "There is more than one computer with name '{}', " "pass a Computer instance".format(computer)) elif isinstance(computer, int): try: dbcomputer = cls.session.query(cls).filter( cls.id == computer).one() except NoResultFound: raise NotExistent( "No computer found in the table of computers with " "the given pk '{}'".format(computer)) elif isinstance(computer, DbComputer): if computer.id is None: raise ValueError( "The computer instance you are passing has not been stored yet" ) dbcomputer = computer elif isinstance(computer, Computer): if computer.dbcomputer.id is None: raise ValueError( "The computer instance you are passing has not been stored yet" ) dbcomputer = computer.dbcomputer else: raise TypeError( "Pass either a computer name, a DbComputer django instance or a Computer object" ) return dbcomputer
def get_subclass_from_uuid(cls, uuid): from aiida.orm.querybuilder import QueryBuilder from sqlalchemy.exc import DatabaseError try: qb = QueryBuilder() qb.append(cls, filters={'uuid': {'==': str(uuid)}}) if qb.count() == 0: raise NotExistent("No entry with UUID={} found".format(uuid)) node = qb.first()[0] if not isinstance(node, cls): raise NotExistent("UUID={} is not an instance of {}".format( uuid, cls.__name__)) return node except DatabaseError as de: raise ValueError(de.message)
def migrate_archive(input_file, output_file, silent=True): """Migrate contents using `migrate_recursively` This is essentially similar to `verdi export migrate`. However, since this command may be disabled, this function simulates it and keeps the tests working. :param input_file: filename with full path for archive to be migrated :param output_file: filename with full path for archive to be created after migration """ from aiida.tools.importexport.migration import migrate_recursively # Unpack archive, migrate, and re-pack archive with SandboxFolder(sandbox_in_repo=False) as folder: if zipfile.is_zipfile(input_file): extract_zip(input_file, folder, silent=silent) elif tarfile.is_tarfile(input_file): extract_tar(input_file, folder, silent=silent) else: raise ValueError( 'invalid file format, expected either a zip archive or gzipped tarball' ) try: with open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle: data = json.load(fhandle) with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata = json.load(fhandle) except IOError: raise NotExistent( 'export archive does not contain the required file {}'.format( fhandle.filename)) # Migrate migrate_recursively(metadata, data, folder) # Write json files with open(folder.get_abs_path('data.json'), 'wb') as fhandle: json.dump(data, fhandle, indent=4) with open(folder.get_abs_path('metadata.json'), 'wb') as fhandle: json.dump(metadata, fhandle, indent=4) # Pack archive compression = zipfile.ZIP_DEFLATED with zipfile.ZipFile(output_file, mode='w', compression=compression, allowZip64=True) as archive: src = folder.abspath for dirpath, dirnames, filenames in os.walk(src): relpath = os.path.relpath(dirpath, src) for filename in dirnames + filenames: real_src = os.path.join(dirpath, filename) real_dest = os.path.join(relpath, filename) archive.write(real_src, real_dest)
def get_subclass_from_uuid(cls, uuid): from aiida.backends.djsite.db.models import DbWorkflow try: dbworkflowinstance = DbWorkflow.objects.get(uuid=uuid) return cls.get_subclass_from_dbnode(dbworkflowinstance) except ObjectDoesNotExist: raise NotExistent("No entry with the UUID {} found".format(uuid))
def find(cls, **kwargs): """Find nodes by POTCAR metadata attributes given in kwargs.""" query_builder = cls.query_by_attrs(**kwargs) if not query_builder.count(): raise NotExistent() results = [result[0] for result in query_builder.all()] results.sort(by_older) results.sort(by_user) return results
def find(cls, **kwargs): """Find nodes by POTCAR metadata attributes given in kwargs.""" query_builder = cls.query_by_attrs(**kwargs) if not query_builder.count(): raise NotExistent() results = [result[0] for result in query_builder.all()] from functools import cmp_to_key results.sort(key=cmp_to_key(by_older)) return results
def __init__(self, **kwargs): # If no arguments are passed, then create a new DbComment if not kwargs: self.dbcomment = DbComment() # If a DbComment is passed as argument. Just use it and # wrap it with a Comment object elif 'dbcomment' in kwargs: # When a dbcomment is passed as argument, then no other arguments # should be passed. if len(kwargs) > 1: raise ValueError("When a DbComment is passed as argument, no" "further arguments are accepted.") dbcomment = kwargs.pop('dbcomment') if not isinstance(dbcomment, DbComment): raise ValueError("Expected a DbComment. Object of a different" "class was given as argument.") self.dbcomment = dbcomment else: id = kwargs.pop('id', None) if id is None: id = kwargs.pop('pk', None) user = kwargs.pop('user', None) dbnode = kwargs.pop('dbnode', None) # Constructing the default query import operator from django.db.models import Q query_list = [] # If an id is specified then we add it to the query if id is not None: query_list.append(Q(pk=id)) # If a user is specified then we add it to the query if user is not None: query_list.append(Q(user=user)) # If a dbnode is specified then we add it to the query if dbnode is not None: query_list.append(Q(dbnode=dbnode)) res = DbComment.objects.filter(reduce(operator.and_, query_list)) ccount = len(res) if ccount > 1: raise MultipleObjectsError( "The arguments that you specified were too vague. More " "than one comments with this data were found in the " "database") elif ccount == 0: raise NotExistent("No comments were found with the given " "arguments") self.dbcomment = res[0]
def get(cls, *args, **kwargs): queryresults = cls.query(*args, **kwargs) if len(queryresults) == 1: return queryresults[0] elif len(queryresults) == 0: raise NotExistent("No Group matching the query found") else: raise MultipleObjectsError("More than one Group found -- " "I found {}".format(len(queryresults)))
def get(cls, element, name=None, version="latest", match_aliases=True, group_label=None, n_el=None): from aiida.orm.querybuilder import QueryBuilder query = QueryBuilder() params = {} if group_label: query.append(Group, filters={"label": group_label}, tag="group") params["with_group"] = "group" query.append(BasisSet, **params) filters = {"attributes.element": {"==": element}} if version != "latest": filters["attributes.version"] = {"==": version} if name: if match_aliases: filters["attributes.aliases"] = {"contains": [name]} else: filters["attributes.name"] = {"==": name} if n_el: filters["attributes.n_el"] = {"==": n_el} query.add_filter(BasisSet, filters) # SQLA ORM only solution: # query.order_by({BasisSet: [{"attributes.version": {"cast": "i", "order": "desc"}}]}) # items = query.first() items = sorted(query.iterall(), key=lambda b: b[0].version, reverse=True) if not items: raise NotExistent( f"No Gaussian Basis Set found for element={element}, name={name}, version={version}" ) # if we get different names there is no well ordering, sorting by version only works if they have the same name if len(set(b[0].name for b in items)) > 1: raise MultipleObjectsError( f"Multiple Gaussian Basis Set found for element={element}, name={name}, version={version}" ) return items[0][0]