def _del_db_attr(self, key): from aiida.backends.djsite.db.models import DbAttribute if not DbAttribute.has_key(self.dbnode, key): raise AttributeError("DbAttribute {} does not exist".format( key)) DbAttribute.del_value_for_node(self.dbnode, key) self._increment_version_number_db()
def _set_attr(self, key, value): from aiida.backends.djsite.db.models import DbAttribute DbAttribute.validate_key(key) if self._to_be_stored: self._attrs_cache[key] = copy.deepcopy(value) else: DbAttribute.set_value_for_node(self.dbnode, key, value) self._increment_version_number_db()
def _del_attr(self, key): from aiida.backends.djsite.db.models import DbAttribute if self._to_be_stored: try: del self._attrs_cache[key] except KeyError: raise AttributeError( "DbAttribute {} does not exist".format(key)) else: if not DbAttribute.has_key(self.dbnode, key): raise AttributeError( "DbAttribute {} does not exist".format(key)) DbAttribute.del_value_for_node(self.dbnode, key) self._increment_version_number_db()
def _set_db_attr(self, key, value): """ Set the value directly in the DB, without checking if it is stored, or using the cache. DO NOT USE DIRECTLY. :param str key: key name :param value: its value """ from aiida.backends.djsite.db.models import DbAttribute DbAttribute.set_value_for_node(self.dbnode, key, value) self._increment_version_number_db()
def _db_attrs(self): # Note: I "duplicate" the code from iterattrs and reimplement it # here, rather than # calling iterattrs from here, because iterattrs is slow on each call # since it has to call .getvalue(). To improve! from aiida.backends.djsite.db.models import DbAttribute attrlist = DbAttribute.list_all_node_elements(self.dbnode) for attr in attrlist: yield attr.key
def iterattrs(self): from aiida.backends.djsite.db.models import DbAttribute # TODO: check what happens if someone stores the object while # the iterator is being used! if self._to_be_stored: for k, v in self._attrs_cache.iteritems(): yield (k, v) else: all_attrs = DbAttribute.get_all_values_for_node(self.dbnode) for attr in all_attrs: yield (attr, all_attrs[attr])
def attrs(self): from aiida.backends.djsite.db.models import DbAttribute # Note: I "duplicate" the code from iterattrs, rather than # calling iterattrs from here, because iterattrs is slow on each call # since it has to call .getvalue(). To improve! if self._to_be_stored: for k in self._attrs_cache.iterkeys(): yield k else: attrlist = DbAttribute.list_all_node_elements(self.dbnode) for attr in attrlist: yield attr.key
def get_attr(self, key, default=_NO_DEFAULT): from aiida.backends.djsite.db.models import DbAttribute try: if self._to_be_stored: try: return self._attrs_cache[key] except KeyError: raise AttributeError( "DbAttribute '{}' does not exist".format(key)) else: return DbAttribute.get_value_for_node(dbnode=self.dbnode, key=key) except AttributeError: if default is _NO_DEFAULT: raise return default
def _get_aiida_res(self, key, res): """ Some instance returned by ORM (django or SA) need to be converted to Aiida instances (eg nodes) :param res: the result returned by the query :param key: the key that this entry would be return with :returns: an aiida-compatible instance """ if key.startswith('attributes.'): # If you want a specific attributes, that key was stored in res. # So I call the getvalue method to expand into a dictionary try: returnval = DbAttribute.objects.get(id=res).getvalue() except ObjectDoesNotExist: # If the object does not exist, return None. This is consistent # with SQLAlchemy inside the JSON returnval = None elif key.startswith('extras.'): # Same as attributes try: returnval = DbExtra.objects.get(id=res).getvalue() except ObjectDoesNotExist: returnval = None elif key == 'attributes': # If you asked for all attributes, the QB return the ID of the node # I use DbAttribute.get_all_values_for_nodepk # to get the dictionary return DbAttribute.get_all_values_for_nodepk(res) elif key == 'extras': # same as attributes return DbExtra.get_all_values_for_nodepk(res) elif key in ('_metadata', 'transport_params'): # Metadata and transport_params are stored as json strings in the DB: return json_loads(res) elif isinstance(res, (self.Group, self.Node, self.Computer, self.User)): returnval = res.get_aiida_class() else: returnval = res return returnval
def store(self, with_transaction=True): """ Store a new node in the DB, also saving its repository directory and attributes. After being called attributes cannot be changed anymore! Instead, extras can be changed only AFTER calling this store() function. :note: After successful storage, those links that are in the cache, and for which also the parent node is already stored, will be automatically stored. The others will remain unstored. :parameter with_transaction: if False, no transaction is used. This is meant to be used ONLY if the outer calling function has already a transaction open! """ # TODO: This needs to be generalized, allowing for flexible methods # for storing data and its attributes. from django.db import transaction from aiida.common.utils import EmptyContextManager from aiida.common.exceptions import ValidationError from aiida.backends.djsite.db.models import DbAttribute import aiida.orm.autogroup if with_transaction: context_man = transaction.commit_on_success() else: context_man = EmptyContextManager() if self._to_be_stored: # As a first thing, I check if the data is valid self._validate() # Verify that parents are already stored. Raises if this is not # the case. self._check_are_parents_stored() # I save the corresponding django entry # I set the folder # NOTE: I first store the files, then only if this is successful, # I store the DB entry. In this way, # I assume that if a node exists in the DB, its folder is in place. # On the other hand, periodically the user might need to run some # bookkeeping utility to check for lone folders. self._repository_folder.replace_with_folder( self._get_temp_folder().abspath, move=True, overwrite=True) # I do the transaction only during storage on DB to avoid timeout # problems, especially with SQLite try: with context_man: # Save the row self._dbnode.save() # Save its attributes 'manually' without incrementing # the version for each add. DbAttribute.reset_values_for_node( self.dbnode, attributes=self._attrs_cache, with_transaction=False) # This should not be used anymore: I delete it to # possibly free memory del self._attrs_cache self._temp_folder = None self._to_be_stored = False # Here, I store those links that were in the cache and # that are between stored nodes. self._store_cached_input_links() # This is one of the few cases where it is ok to do a 'global' # except, also because I am re-raising the exception except: # I put back the files in the sandbox folder since the # transaction did not succeed self._get_temp_folder().replace_with_folder( self._repository_folder.abspath, move=True, overwrite=True) raise # Set up autogrouping used be verdi run autogroup = aiida.orm.autogroup.current_autogroup grouptype = aiida.orm.autogroup.VERDIAUTOGROUP_TYPE if autogroup is not None: if not isinstance(autogroup, aiida.orm.autogroup.Autogroup): raise ValidationError( "current_autogroup is not an AiiDA Autogroup") if autogroup.is_to_be_grouped(self): group_name = autogroup.get_group_name() if group_name is not None: from aiida.orm import Group g = Group.get_or_create(name=group_name, type_string=grouptype)[0] g.add_nodes(self) # This is useful because in this way I can do # n = Node().store() return self
def query(cls, name=None, type_string="", pk=None, uuid=None, nodes=None, user=None, node_attributes=None, past_days=None, name_filters=None, **kwargs): from aiida.backends.djsite.db.models import (DbGroup, DbNode, DbAttribute) # Analyze args and kwargs to create the query queryobject = Q() if name is not None: queryobject &= Q(name=name) if type_string is not None: queryobject &= Q(type=type_string) if pk is not None: queryobject &= Q(pk=pk) if uuid is not None: queryobject &= Q(uuid=uuid) if past_days is not None: queryobject &= Q(time__gte=past_days) if nodes is not None: pk_list = [] if not isinstance(nodes, collections.Iterable): nodes = [nodes] for node in nodes: if not isinstance(node, (Node, DbNode)): raise TypeError("At least one of the elements passed as " "nodes for the query on Group is neither " "a Node nor a DbNode") pk_list.append(node.pk) queryobject &= Q(dbnodes__in=pk_list) if user is not None: if isinstance(user, basestring): queryobject &= Q(user__email=user) else: queryobject &= Q(user=user) if name_filters is not None: name_filters_list = { "name__" + k: v for (k, v) in name_filters.iteritems() if v } queryobject &= Q(**name_filters_list) groups_pk = set( DbGroup.objects.filter(queryobject, **kwargs).values_list('pk', flat=True)) if node_attributes is not None: for k, vlist in node_attributes.iteritems(): if isinstance(vlist, basestring) or not isinstance( vlist, collections.Iterable): vlist = [vlist] for v in vlist: # This will be a dictionary of the type # {'datatype': 'txt', 'tval': 'xxx') for instance, if # the passed data is a string base_query_dict = DbAttribute.get_query_dict(v) # prepend to the key the right django string to SQL-join # on the right table query_dict = { 'dbnodes__dbattributes__{}'.format(k2): v2 for k2, v2 in base_query_dict.iteritems() } # I narrow down the list of groups. # I had to do it in this way, with multiple DB hits and # not a single, complicated query because in SQLite # there is a maximum of 64 tables in a join. # Since typically one requires a small number of filters, # this should be ok. groups_pk = groups_pk.intersection( DbGroup.objects.filter(pk__in=groups_pk, dbnodes__dbattributes__key=k, **query_dict).values_list( 'pk', flat=True)) retlist = [] # Return sorted by pk for dbgroup in sorted(groups_pk): retlist.append(cls(dbgroup=dbgroup)) return retlist
def code_update(self, *args): import datetime from aiida.backends.utils import get_automatic_user if len(args) != 1: print >> sys.stderr, ("after 'code update' there should be one " "argument only, being the code id.") sys.exit(1) code = self.get_code(args[0]) if code.has_children: print "***********************************" print "| |" print "| WARNING! |" print "| Consider to create another code |" print "| You risk of losing the history |" print "| |" print "***********************************" # load existing stuff set_params = CodeInputValidationClass() set_params.label = code.label set_params.description = code.description set_params.input_plugin = code.get_input_plugin_name() was_local_before = code.is_local() set_params.is_local = code.is_local() if code.is_local(): set_params.local_rel_path = code.get_local_executable() # I don't have saved the folder with code, so I will just have the list of files # file_list = [ code._get_folder_pathsubfolder.get_abs_path(i) # for i in code.get_folder_list() ] else: set_params.computer = code.get_computer() set_params.remote_abs_path = code.get_remote_exec_path() set_params.prepend_text = code.get_prepend_text() set_params.append_text = code.get_append_text() # ask for the new values set_params.ask() # prepare a comment containing the previous version of the code now = datetime.datetime.now() new_comment = [] new_comment.append("Code modified on {}".format(now)) new_comment.append("Old configuration was:") new_comment.append("label: {}".format(code.label)) new_comment.append("description: {}".format(code.description)) new_comment.append("input_plugin_name: {}".format( code.get_input_plugin_name())) new_comment.append("is_local: {}".format(code.is_local())) if was_local_before: new_comment.append("local_executable: {}".format( code.get_local_executable())) else: new_comment.append("computer: {}".format(code.get_computer())) new_comment.append("remote_exec_path: {}".format( code.get_remote_exec_path())) new_comment.append("prepend_text: {}".format(code.get_prepend_text())) new_comment.append("append_text: {}".format(code.get_append_text())) comment = "\n".join(new_comment) if set_params.is_local: print "WARNING: => Folder with the code, and" print " => Relative path of the executable, " print " will be ignored! It is not possible to replace " print " the scripts, you have to create a new code for that." else: if was_local_before: # some old files will be left in the repository, and I cannot delete them print >> sys.stderr, ("It is not possible to change a " "code from local to remote.\n" "Modification cancelled.") sys.exit(1) print "WARNING: => computer" print " will be ignored! It is not possible to replace it" print " you have to create a new code for that." code.label = set_params.label code.description = set_params.description code.set_input_plugin_name(set_params.input_plugin) code.set_prepend_text(set_params.prepend_text) code.set_append_text(set_params.append_text) if not was_local_before: if set_params.remote_abs_path != code.get_remote_exec_path(): print "Are you sure about changing the path of the code?" print "This operation may imply loss of provenance." print "[Enter] to continue, [Ctrl + C] to exit" raw_input() from aiida.backends.djsite.db.models import DbAttribute DbAttribute.set_value_for_node(code.dbnode, 'remote_exec_path', set_params.remote_abs_path) # store comment, to track history code.add_comment(comment, user=get_automatic_user())
def _db_iterattrs(self): from aiida.backends.djsite.db.models import DbAttribute all_attrs = DbAttribute.get_all_values_for_node(self.dbnode) for attr in all_attrs: yield (attr, all_attrs[attr])
def _get_db_attr(self, key): from aiida.backends.djsite.db.models import DbAttribute return DbAttribute.get_value_for_node(dbnode=self.dbnode, key=key)
def _db_store(self, with_transaction=True): """ Store a new node in the DB, also saving its repository directory and attributes. After being called attributes cannot be changed anymore! Instead, extras can be changed only AFTER calling this store() function. :note: After successful storage, those links that are in the cache, and for which also the parent node is already stored, will be automatically stored. The others will remain unstored. :parameter with_transaction: if False, no transaction is used. This is meant to be used ONLY if the outer calling function has already a transaction open! :param bool use_cache: Whether I attempt to find an equal node in the DB. """ # TODO: This needs to be generalized, allowing for flexible methods # for storing data and its attributes. from django.db import transaction from aiida.common.utils import EmptyContextManager from aiida.common.exceptions import ValidationError from aiida.backends.djsite.db.models import DbAttribute import aiida.orm.autogroup if with_transaction: context_man = transaction.atomic() else: context_man = EmptyContextManager() # I save the corresponding django entry # I set the folder # NOTE: I first store the files, then only if this is successful, # I store the DB entry. In this way, # I assume that if a node exists in the DB, its folder is in place. # On the other hand, periodically the user might need to run some # bookkeeping utility to check for lone folders. self._repository_folder.replace_with_folder( self._get_temp_folder().abspath, move=True, overwrite=True) # I do the transaction only during storage on DB to avoid timeout # problems, especially with SQLite try: with context_man: # Save the row self._dbnode.save() # Save its attributes 'manually' without incrementing # the version for each add. DbAttribute.reset_values_for_node(self._dbnode, attributes=self._attrs_cache, with_transaction=False) # This should not be used anymore: I delete it to # possibly free memory del self._attrs_cache self._temp_folder = None self._to_be_stored = False # Here, I store those links that were in the cache and # that are between stored nodes. self._store_cached_input_links() # This is one of the few cases where it is ok to do a 'global' # except, also because I am re-raising the exception except: # I put back the files in the sandbox folder since the # transaction did not succeed self._get_temp_folder().replace_with_folder( self._repository_folder.abspath, move=True, overwrite=True) raise from aiida.backends.djsite.db.models import DbExtra # I store the hash without cleaning and without incrementing the nodeversion number DbExtra.set_value_for_node(self._dbnode, _HASH_EXTRA_KEY, self.get_hash()) return self