def test_no_autogroup(self): """Check if the autogroup is not generated if ``verdi run`` is asked not to.""" from aiida.orm import QueryBuilder, Node, AutoGroup, load_node script_content = textwrap.dedent("""\ from aiida.orm import Data node = Data().store() print(node.pk) """) with tempfile.NamedTemporaryFile(mode='w+') as fhandle: fhandle.write(script_content) fhandle.flush() options = [fhandle.name] # Not storing an autogroup by default result = self.cli_runner.invoke(cmd_run.run, options) self.assertClickResultNoException(result) pk = int(result.output) _ = load_node(pk) # Check if the node can be loaded queryb = QueryBuilder().append(Node, filters={'id': pk}, tag='node') queryb.append(AutoGroup, with_node='node', project='*') all_auto_groups = queryb.all() self.assertEqual(len(all_auto_groups), 0, 'There should be no autogroup generated')
def get_data_aiida(cif_uuid, plot_info): """Query the AiiDA database""" from figure.aiida import load_profile from aiida.orm import QueryBuilder, Dict, CifData load_profile() qb = QueryBuilder() qb.append(CifData, filters={'uuid': { '==': cif_uuid }}, tag='cifs', project='*') qb.append( Dict, descendant_of='cifs', project='*', ) nresults = qb.count() if nresults == 0: plot_info.text = 'No matching COF found.' return None return qb.one()
def reuse_kpoints_grid(grid, lowest_pk=False): """ Retrieve previously stored kpoints mesh data node. If there is no such ``KpointsData``, a new node will be created. Will return the one with highest pk :param grid: Grid to be retrieved :param bool lowest_pk: If set to True will return the node with lowest pk :returns: A KpointsData node representing the grid requested """ from aiida.orm import QueryBuilder from aiida.orm import KpointsData qbd = QueryBuilder() qbd.append(KpointsData, tag="kpoints", filters={ "attributes.mesh.0": grid[0], "attributes.mesh.1": grid[1], "attributes.mesh.2": grid[2] }) if lowest_pk: order = "asc" else: order = "desc" qbd.order_by({"kpoints": [{"id": {"order": order}}]}) if qbd.count() >= 1: return qbd.first()[0] kpoints = KpointsData() kpoints.set_kpoints_mesh(grid) return kpoints
def link_triple_exists(source, target, link_type, link_label): """Return whether a link with the given type and label exists between the given source and target node. :param source: node from which the link is outgoing :param target: node to which the link is incoming :param link_type: the link type :param link_label: the link label :return: boolean, True if the link triple exists, False otherwise """ from aiida.orm import Node, QueryBuilder # First check if the triple exist in the cache, in the case of an unstored target node if target._incoming_cache and LinkTriple(source, link_type, link_label) in target._incoming_cache: # pylint: disable=protected-access return True # If either node is unstored (i.e. does not have a pk), the link cannot exist in the database, so no need to check if source.pk is None or target.pk is None: return False # Here we have two stored nodes, so we need to check if the same link already exists in the database. # Finding just a single match is sufficient so we can use the `limit` clause for efficiency builder = QueryBuilder() builder.append(Node, filters={'id': source.id}, project=['id']) builder.append(Node, filters={'id': target.id}, edge_filters={'type': link_type.value, 'label': link_label}) builder.limit(1) return builder.count() != 0
def test_autogroup(self): """Check if the autogroup is properly generated.""" from aiida.orm import QueryBuilder, Node, AutoGroup, load_node script_content = textwrap.dedent("""\ from aiida.orm import Data node = Data().store() print(node.pk) """) with tempfile.NamedTemporaryFile(mode='w+') as fhandle: fhandle.write(script_content) fhandle.flush() options = ['--auto-group', fhandle.name] result = self.cli_runner.invoke(cmd_run.run, options) self.assertClickResultNoException(result) pk = int(result.output) _ = load_node(pk) # Check if the node can be loaded queryb = QueryBuilder().append(Node, filters={'id': pk}, tag='node') queryb.append(AutoGroup, with_node='node', project='*') all_auto_groups = queryb.all() self.assertEqual( len(all_auto_groups), 1, 'There should be only one autogroup associated with the node just created' )
def get_pseudo(self, element): """Return the pseudo potential for the given element. :param element: the element for which to return the corresponding pseudo potential. :return: pseudo potential instance if it exists :raises ValueError: if the family does not contain a pseudo potential for the given element """ try: pseudo = self.pseudos[element] except KeyError: builder = QueryBuilder() builder.append(self.__class__, filters={'id': self.pk}, tag='group') builder.append(self._pseudo_types, filters={'attributes.element': element}, with_group='group') try: pseudo = builder.one()[0] except exceptions.MultipleObjectsError as exception: raise RuntimeError(f'family `{self.label}` contains multiple pseudos for `{element}`') from exception except exceptions.NotExistent as exception: raise ValueError( f'family `{self.label}` does not contain pseudo for element `{element}`' ) from exception else: self.pseudos[element] = pseudo return pseudo
def check_existence_wf(input_nodes, successful=True): """ This methods checks in the database waether a certain type of node with the given input nodes already exists. If yes it returns the output nodes of that node. param: input_nodes : List of input nodes returns output nodes """ #TODO: some checks and inputnodes could be parsed in different formats inputnodesuuid = [node.uuid for node in input_nodes] qb = QueryBuilder() qb.append(JobCalculation, tag='calc', project='*', filters={'state': { '==': 'FINISHED' }}) for idx, uuid in enumerate(inputnodesuuid): qb.append(Node, input_of='calc', filters={'uuid': uuid}, tag='input_{}'.format(idx)) qb.order_by({JobCalculation: 'ctime'}) res = qb.all() if res: return res[-1][0].get_outputs() else: return None
def _find(entity_type: Entity, **kwargs) -> QueryBuilder: """Workhorse function to perform AiiDA QueryBuilder query""" for key in kwargs: if key not in { "filters", "order_by", "limit", "project", "offset" }: raise ValueError( f"You supplied key {key}. _find() only takes the keys: " '"filters", "order_by", "limit", "project", "offset"') filters = kwargs.get("filters", {}) order_by = kwargs.get("order_by", None) order_by = { entity_type: order_by } if order_by else { entity_type: { "id": "asc" } } limit = kwargs.get("limit", None) offset = kwargs.get("offset", None) project = kwargs.get("project", []) query = QueryBuilder(limit=limit, offset=offset) query.append(entity_type, project=project, filters=filters) query.order_by(order_by) return query
def get(cls, **kwargs): """ Custom get for group which can be used to get a group with the given attributes :param kwargs: the attributes to match the group to :return: the group :type nodes: :class:`aiida.orm.Node` or list """ from aiida.orm import QueryBuilder filters = {} if 'type_string' in kwargs: if not isinstance(kwargs['type_string'], six.string_types): raise exceptions.ValidationError( 'type_string must be {}, you provided an object of type ' '{}'.format(str, type(kwargs['type_string']))) query = QueryBuilder() for key, val in kwargs.items(): filters[key] = val query.append(cls, filters=filters) results = query.all() if len(results) > 1: raise exceptions.MultipleObjectsError( "Found {} groups matching criteria '{}'".format( len(results), kwargs)) if not results: raise exceptions.NotExistent( "No group found matching criteria '{}'".format(kwargs)) return results[0][0]
def get_basis(self, element): """Return the basis for the given element. :param element: the element for which to return the corresponding basis. :return: basis instance if it exists :raises ValueError: if the basis set does not contain a basis for the given element """ try: basis = self.bases[element] except KeyError: builder = QueryBuilder() builder.append(self.__class__, filters={'id': self.pk}, tag='group') builder.append(self._basis_types, filters={'attributes.element': element}, with_group='group') try: basis = builder.one()[0] except exceptions.MultipleObjectsError as exception: raise RuntimeError( f'basis set `{self.label}` contains multiple bases for `{element}`' ) from exception except exceptions.NotExistent as exception: raise ValueError( f'basis set `{self.label}` does not contain basis for element `{element}`' ) from exception else: self.bases[element] = basis return basis
def find_latest_uuid(): IterHarmonicApprox = WorkflowFactory('phonopy.iter_ha') qb = QueryBuilder() qb.append(IterHarmonicApprox) qb.order_by({IterHarmonicApprox: {'ctime': 'desc'}}) qb.first() return qb.first()[0].uuid
def get_code(entry_point, computer): """Get local code. Sets up code for given entry point on given computer. :param entry_point: Entry point of calculation plugin :param computer: (local) AiiDA computer :return: The code node :rtype: :py:class:`aiida.orm.Code` """ from aiida.orm import Code, QueryBuilder, Computer try: executable = EXECUTABLE[entry_point] except KeyError: raise KeyError( "Entry point '{}' not recognized. Allowed values: {}".format( entry_point, list(EXECUTABLE.keys()))) qbuilder = QueryBuilder() qbuilder.append(Computer, filters={'id': computer.pk}) qbuilder.append(Code, with_computer=Computer, filters={'label': executable}) codes = [_[0] for _ in qbuilder.all()] if codes: return codes[0] path = get_path_to_executable(executable) code = Code( input_plugin_name=entry_point, remote_computer_exec=[computer, path], ) code.label = executable return code.store()
def test_configure_from_file(configure): from aiida.orm import QueryBuilder from aiida.orm import User qb = QueryBuilder() qb.append(User) user = qb.first()[0] assert user.first_name == 'AiiDA'
def get_code(entry_point, executable, computer=aiida_localhost, label=None, prepend_text=None, append_text=None): """Get local code. Sets up code for given entry point on given computer. :param entry_point: Entry point of calculation plugin :param executable: name of executable; will be searched for in local system PATH. :param computer: (local) AiiDA computer :param prepend_text: a string of code that will be put in the scheduler script before the execution of the code. :param append_text: a string of code that will be put in the scheduler script after the execution of the code. :return: the `Code` either retrieved from the database or created if it did not yet exist. :rtype: :py:class:`aiida.orm.Code` """ from aiida.common import exceptions from aiida.orm import Code, Computer, QueryBuilder if label is None: label = executable builder = QueryBuilder().append(Computer, filters={'uuid': computer.uuid}, tag='computer') builder.append(Code, filters={ 'label': label, 'attributes.input_plugin': entry_point }, with_computer='computer') try: code = builder.one()[0] except (exceptions.MultipleObjectsError, exceptions.NotExistent): code = None else: return code executable_path = shutil.which(executable) if not executable_path: raise ValueError( 'The executable "{}" was not found in the $PATH.'.format( executable)) code = Code(input_plugin_name=entry_point, remote_computer_exec=[computer, executable_path]) code.label = label code.description = label if prepend_text is not None: code.set_prepend_text(prepend_text) if append_text is not None: code.set_append_text(append_text) return code.store()
def get_basis_family_names(self): """Get the list of all basiset family names to which the basis belongs.""" query = QueryBuilder() query.append(BasisSetFamily, tag="group", project="label") query.append( self.__class__, filters={"id": {"==": self.id}}, with_group="group" ) return query.all(flat=True)
def query_by_attrs(cls, query=None, **kwargs): family_name = kwargs.pop('family_name', None) if family_name: group_filters = {'label': {'==': family_name}, 'type_string': {'==': cls.potcar_family_type_string}} query = QueryBuilder() query.append(Group, tag='family', filters=group_filters) query.append(cls, tag=cls._query_label, with_group='family') return super(PotcarData, cls).query_by_attrs(query=query, **kwargs)
def test_autogroup_clashing_label(self): """Check if the autogroup label is properly (re)generated when it clashes with an existing group name.""" from aiida.orm import QueryBuilder, Node, AutoGroup, load_node script_content = textwrap.dedent("""\ from aiida.orm import Data node = Data().store() print(node.pk) """) autogroup_label = 'SOME_repeated_group_LABEL' with tempfile.NamedTemporaryFile(mode='w+') as fhandle: fhandle.write(script_content) fhandle.flush() # First run options = [ fhandle.name, '--auto-group', '--auto-group-label-prefix', autogroup_label ] result = self.cli_runner.invoke(cmd_run.run, options) self.assertClickResultNoException(result) pk = int(result.output) _ = load_node(pk) # Check if the node can be loaded queryb = QueryBuilder().append(Node, filters={'id': pk}, tag='node') queryb.append(AutoGroup, with_node='node', project='*') all_auto_groups = queryb.all() self.assertEqual( len(all_auto_groups), 1, 'There should be only one autogroup associated with the node just created' ) self.assertEqual(all_auto_groups[0][0].label, autogroup_label) # A few more runs with the same label - it should not crash but append something to the group name for _ in range(10): options = [ fhandle.name, '--auto-group', '--auto-group-label-prefix', autogroup_label ] result = self.cli_runner.invoke(cmd_run.run, options) self.assertClickResultNoException(result) pk = int(result.output) _ = load_node(pk) # Check if the node can be loaded queryb = QueryBuilder().append(Node, filters={'id': pk}, tag='node') queryb.append(AutoGroup, with_node='node', project='*') all_auto_groups = queryb.all() self.assertEqual( len(all_auto_groups), 1, 'There should be only one autogroup associated with the node just created' ) self.assertTrue( all_auto_groups[0][0].label.startswith(autogroup_label))
def retrieve_numactive_calculations(): qb = QueryBuilder() qb.append(CalcJobNode, filters={ 'attributes.process_state': { '!in': ['finished', 'excepted', 'killed'] } }) return len(qb.all())
def _get_unique_node_property(self, project: str) -> Union[Node, Any]: query = QueryBuilder(limit=1) query.append(self.AIIDA_ENTITY, filters={"id": self._pk}, project=project) if query.count() != 1: raise AiidaEntityNotFound( f"Could not find {self.AIIDA_ENTITY} with PK {self._pk}.") return query.first()[0]
def get_data_aiida(projections, sliders_dict, quantities, plot_info): """Query the AiiDA database""" from figure.aiida import load_profile from aiida.orm import QueryBuilder, Dict load_profile() filters = {} def add_range_filter(bounds, label): # a bit of cheating until this is resolved # https://github.com/aiidateam/aiida_core/issues/1389 #filters['attributes.'+label] = {'>=':bounds[0]} filters['attributes.' + label] = { 'and': [{ '>=': bounds[0] }, { '<': bounds[1] }] } for k, v in sliders_dict.items(): # Note: filtering is costly, avoid if possible if not v.value == quantities[k]['range']: add_range_filter(v.value, k) qb = QueryBuilder() qb.append( Dict, filters=filters, project=['attributes.' + p for p in projections] + ['uuid', 'extras.cif_uuid'], ) nresults = qb.count() if nresults == 0: plot_info.text = 'No matching COFs found.' return data_empty plot_info.text = '{} COFs found. Plotting...'.format(nresults) # x,y position x, y, clrs, uuids, names, cif_uuids = list(zip(*qb.all())) plot_info.text = '{} COFs queried'.format(nresults) x = list(map(float, x)) y = list(map(float, y)) cif_uuids = list(map(str, cif_uuids)) uuids = list(map(str, uuids)) if projections[2] == 'bond_type': #clrs = map(lambda clr: bondtypes.index(clr), clrs) clrs = list(map(str, clrs)) else: clrs = list(map(float, clrs)) return dict(x=x, y=y, uuid=cif_uuids, color=clrs, name=names)
def is_loop_finished(self): qb = QueryBuilder() qb.append(Group, filters={'label': {'==': self.uuid}}) if qb.count() == 1: self.report("Iteration loop is manually terminated at step %d." % self.ctx.iteration) return False self.ctx.iteration += 1 return self.ctx.iteration <= self.inputs.max_iteration.value
def get_all_node_links(): """ Get all Node links currently in the DB """ builder = QueryBuilder() builder.append(Node, project='uuid', tag='input') builder.append(Node, project='uuid', tag='output', edge_project=['label', 'type'], with_incoming='input') return builder.all()
def retrieve_numactive_elastic(): from aiida.orm import QueryBuilder from aiida.orm import CalcJobNode qb = QueryBuilder() qb.append(CalcJobNode, filters={'attributes.process_state': {'!in': ['finished', 'excepted', 'killed']}, 'attributes._process_label':'ElasticWorkChain'} ) return len(qb.all())
def get_structure(label): from aiida.orm import QueryBuilder qb = QueryBuilder() qb.append(DataFactory('structure'), filters={'label': { '==': label }}, tag='structure') # Pick any structure with this label, here, just the first return qb.all()[0][0]
def from_md5(cls, md5): """ Return a list of all Basis Sets that match a given MD5 hash. Note that the hash has to be stored in a _md5 attribute, otherwise the basis will not be found. """ qb = QueryBuilder() qb.append(cls, filters={"attributes.md5": {"==": md5}}) return qb.all(flat=True)
def get_outputcalcs(node): q = QueryBuilder() q.append(WorkChainNode, filters={"uuid": node.uuid}, tag="worknode") q.append(WorkChainNode, tag="worknode2", with_incoming="worknode", project=["id", "ctime", "*"]) q.order_by({"worknode2": "ctime"}) child_nodes = [x[2] for x in q.all()] return child_nodes
def test_db_flushed(configure): from aiida.orm import Str test_string = 'this string should not be present when the test run starts' tag = 'Test string tag' from aiida.orm import QueryBuilder qb = QueryBuilder() qb.append(Str, filters={'label': {'==': tag}}) assert not qb.all() str_obj = Str(test_string) str_obj.label = tag str_obj.store()
def from_tags(cls, name=None, element=None, version=None, functional=None, checksum=None): """ Query database for potentials containing a set of given tags. To query the database at least one of the available tags has to be given. If multiple tags are defined only potentials matching **all** of the defined tags will be returned. :param name: fully qualified name of the potential (i.e. Li_sv, Li, Ge_sv_GW, P, ...) :type name: str :param element: name of the element associated with a given potential (i.e. Cl, Li, S, ...) :type element: str :param version: version (i.e. the creation date) of the potential in numerical 8-digit integer YYYYMMDD representation :type version: int :param functional: functional filter to query only for potentials associated with a specific functional. Allowed values are: lda_us, lda, lda_52, lda_54, pbe, pbe_52, pbe_54, pw91 and pw91_us :type functional: str :param checksum: the SHA-256 hash value associated with the contents of a potcar file :type hash: str :return: a list of :class:`VaspPotcarFile` nodes in the database matching the given tags :rtype: list(:class:`VaspPotcarFile`) """ # build the filters for the database query filters = {} if name is not None: filters.update({'attributes.name': {'==': name}}) if element is not None: filters.update({'attributes.element': {'==': element}}) if version is not None: filters.update({'attributes.version': {'==': version}}) if checksum is not None: filters.update({'attributes.hash': {'==': checksum}}) if functional is not None: filters.update({'attributes.functional': {'==': functional}}) # setup query for VaspPotcarFile objects with generated filter list if filters: database_potential_query = QueryBuilder() database_potential_query.append(cls, filters=filters) else: raise VaspPotcarFileError("Database query for potcar file nodes " "failed because not tags were given") # return results obtained by the query builder return [_ for [_] in database_potential_query.all()]
def query_aiida_1(): """Statistics query for AiiDA 1.0 and above.""" from aiida import load_profile from aiida.orm import QueryBuilder, Node load_profile() qb = QueryBuilder() qb.append(Node, project=['node_type', 'process_type']) return qb.all()
def get_all_parents(node_pks, return_values=('id',)): """Get all the parents of given nodes :param node_pks: one node pk or an iterable of node pks :return: a list of aiida objects with all the parents of the nodes""" from aiida.orm import Node, QueryBuilder q_build = QueryBuilder() q_build.append(Node, tag='low_node', filters={'id': {'in': node_pks}}) q_build.append(Node, with_descendants='low_node', project=return_values) return q_build.all()