Exemplo n.º 1
0
    def _load(self):
        """
        Tries to load the result for the given key from the volatile cache, or executes the query
        if not yet available. Afterwards (if a key is given), the result will be (re)cached
        """
        self.data = self._volatile.get(
            self._key) if self._key is not None else None
        if self.data is None:
            # The query should be a dictionary:
            #     {'object': Disk,  # Object on which the query should be executed
            #      'data'  : DataList.select.XYZ,  # The requested result
            #      'query' : <query>}  # The actual query
            # Where <query> is a query(group) dictionary:
            #     {'type' : DataList.where_operator.ABC,  # Whether the items should be AND/OR
            #      'items': <items>}  # The items in the group
            # Where the <items> is any combination of one or more <filter> or <query>
            # A <filter> tuple example:
            #     (<field>, DataList.operator.GHI, <value>)  # For example EQUALS
            # The field is any property you would also find on the given object. In case of
            # properties, you can dot as far as you like. This means you can combine AND and OR
            # in any possible combination

            Toolbox.log_cache_hit('datalist', False)
            hybrid_structure = HybridRunner.get_hybrids()

            items = self._query['query']['items']
            query_type = self._query['query']['type']
            query_data = self._query['data']
            query_object = self._query['object']
            query_object_id = Descriptor(query_object).descriptor['identifier']
            if query_object_id in hybrid_structure and query_object_id != hybrid_structure[
                    query_object_id]['identifier']:
                query_object = Descriptor().load(
                    hybrid_structure[query_object_id]).get_object()

            invalidations = {query_object.__name__.lower(): ['__all']}
            DataList._build_invalidations(invalidations, query_object, items)

            for class_name in invalidations:
                key = '{0}_{1}'.format(DataList.cachelink, class_name)
                mutex = VolatileMutex('listcache_{0}'.format(class_name))
                try:
                    mutex.acquire(60)
                    cache_list = Toolbox.try_get(key, {})
                    current_fields = cache_list.get(self._key, [])
                    current_fields = list(
                        set(current_fields + ['__all'] +
                            invalidations[class_name]))
                    cache_list[self._key] = current_fields
                    self._volatile.set(key, cache_list)
                    self._persistent.set(key, cache_list)
                finally:
                    mutex.release()

            self.from_cache = False
            namespace = query_object()._namespace
            name = query_object.__name__.lower()
            guids = DataList.get_pks(namespace, name)

            if query_data == DataList.select.COUNT:
                self.data = 0
            else:
                self.data = []

            for guid in guids:
                try:
                    instance = query_object(guid)
                    if query_type == DataList.where_operator.AND:
                        include = self._exec_and(instance, items)
                    elif query_type == DataList.where_operator.OR:
                        include = self._exec_or(instance, items)
                    else:
                        raise NotImplementedError(
                            'The given operator is not yet implemented.')
                    if include:
                        if query_data == DataList.select.COUNT:
                            self.data += 1
                        elif query_data == DataList.select.GUIDS:
                            self.data.append(guid)
                        else:
                            raise NotImplementedError(
                                'The given selector type is not implemented')
                except ObjectNotFoundException:
                    pass

            if self._post_query_hook is not None:
                self._post_query_hook(self)

            if self._key is not None and len(guids) > 0 and self._can_cache:
                invalidated = False
                for class_name in invalidations:
                    key = '{0}_{1}'.format(DataList.cachelink, class_name)
                    cache_list = Toolbox.try_get(key, {})
                    if self._key not in cache_list:
                        invalidated = True
                # If the key under which the list should be saved was already invalidated since the invalidations
                # were saved, the returned list is most likely outdated. This is OK for this result, but the list
                # won't get cached
                if invalidated is False:
                    self._volatile.set(
                        self._key, self.data, 300 +
                        randint(0, 300))  # Cache between 5 and 10 minutes
        else:
            Toolbox.log_cache_hit('datalist', True)
            self.from_cache = True
        return self
Exemplo n.º 2
0
    def save(self, recursive=False, skip=None, _hook=None):
        """
        Save the object to the persistent backend and clear cache, making use
        of the specified conflict resolve settings.
        It will also invalidate certain caches if required. For example lists pointing towards this
        object
        """
        if self.volatile is True:
            raise VolatileObjectException()

        tries = 0
        successful = False
        while successful is False:
            invalid_fields = []
            for prop in self._properties:
                if prop.mandatory is True and self._data[prop.name] is None:
                    invalid_fields.append(prop.name)
            for relation in self._relations:
                if relation.mandatory is True and self._data[relation.name]['guid'] is None:
                    invalid_fields.append(relation.name)
            if len(invalid_fields) > 0:
                raise MissingMandatoryFieldsException('Missing fields on {0}: {1}'.format(self._classname, ', '.join(invalid_fields)))

            if recursive:
                # Save objects that point to us (e.g. disk.vmachine - if this is disk)
                for relation in self._relations:
                    if relation.name != skip:  # disks will be skipped
                        item = getattr(self, relation.name)
                        if item is not None:
                            item.save(recursive=True, skip=relation.foreign_key)

                # Save object we point at (e.g. machine.vdisks - if this is machine)
                relations = RelationMapper.load_foreign_relations(self.__class__)
                if relations is not None:
                    for key, info in relations.iteritems():
                        if key != skip:  # machine will be skipped
                            if info['list'] is True:
                                for item in getattr(self, key).iterloaded():
                                    item.save(recursive=True, skip=info['key'])
                            else:
                                item = getattr(self, key)
                                if item is not None:
                                    item.save(recursive=True, skip=info['key'])

            for relation in self._relations:
                if self._data[relation.name]['guid'] is not None:
                    if relation.foreign_type is None:
                        cls = self.__class__
                    else:
                        cls = relation.foreign_type
                    _ = cls(self._data[relation.name]['guid'])

            try:
                data = self._persistent.get(self._key)
            except KeyNotFoundException:
                if self._new:
                    data = {'_version': 0}
                else:
                    raise ObjectNotFoundException('{0} with guid \'{1}\' was deleted'.format(
                        self.__class__.__name__, self._guid
                    ))
            changed_fields = []
            data_conflicts = []
            for attribute in self._data.keys():
                if attribute == '_version':
                    continue
                if self._data[attribute] != self._original[attribute]:
                    # We changed this value
                    changed_fields.append(attribute)
                    if attribute in data and self._original[attribute] != data[attribute]:
                        # Some other process also wrote to the database
                        if self._datastore_wins is None:
                            # In case we didn't set a policy, we raise the conflicts
                            data_conflicts.append(attribute)
                        elif self._datastore_wins is False:
                            # If the datastore should not win, we just overwrite the data
                            data[attribute] = self._data[attribute]
                        # If the datastore should win, we discard/ignore our change
                    else:
                        # Normal scenario, saving data
                        data[attribute] = self._data[attribute]
                elif attribute not in data:
                    data[attribute] = self._data[attribute]
            for attribute in data.keys():
                if attribute == '_version':
                    continue
                if attribute not in self._data:
                    del data[attribute]
            if data_conflicts:
                raise ConcurrencyException('Got field conflicts while saving {0}. Conflicts: {1}'.format(
                    self._classname, ', '.join(data_conflicts)
                ))

            # Refresh internal data structure
            self._data = copy.deepcopy(data)

            caching_keys = []
            try:
                # First, update reverse index
                try:
                    self._mutex_reverseindex.acquire(60)
                    for relation in self._relations:
                        key = relation.name
                        original_guid = self._original[key]['guid']
                        new_guid = self._data[key]['guid']
                        if original_guid != new_guid:
                            if relation.foreign_type is None:
                                classname = self.__class__.__name__.lower()
                            else:
                                classname = relation.foreign_type.__name__.lower()
                            if original_guid is not None:
                                reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, original_guid)
                                reverse_index = self._volatile.get(reverse_key)
                                if reverse_index is not None:
                                    if relation.foreign_key in reverse_index:
                                        entries = reverse_index[relation.foreign_key]
                                        if self.guid in entries:
                                            entries.remove(self.guid)
                                            reverse_index[relation.foreign_key] = entries
                                            caching_keys.append(reverse_key)
                                            self._volatile.set(reverse_key, reverse_index)
                            if new_guid is not None:
                                reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, new_guid)
                                reverse_index = self._volatile.get(reverse_key)
                                if reverse_index is not None:
                                    if relation.foreign_key in reverse_index:
                                        entries = reverse_index[relation.foreign_key]
                                        if self.guid not in entries:
                                            entries.append(self.guid)
                                            reverse_index[relation.foreign_key] = entries
                                            caching_keys.append(reverse_key)
                                            self._volatile.set(reverse_key, reverse_index)
                                    else:
                                        reverse_index[relation.foreign_key] = [self.guid]
                                        caching_keys.append(reverse_key)
                                        self._volatile.set(reverse_key, reverse_index)
                                else:
                                    reverse_index = {relation.foreign_key: [self.guid]}
                                    caching_keys.append(reverse_key)
                                    self._volatile.set(reverse_key, reverse_index)
                    if self._new is True:
                        reverse_key = 'ovs_reverseindex_{0}_{1}'.format(self._classname, self.guid)
                        reverse_index = self._volatile.get(reverse_key)
                        if reverse_index is None:
                            reverse_index = {}
                            relations = RelationMapper.load_foreign_relations(self.__class__)
                            if relations is not None:
                                for key, _ in relations.iteritems():
                                    reverse_index[key] = []
                            caching_keys.append(reverse_key)
                            self._volatile.set(reverse_key, reverse_index)
                finally:
                    self._mutex_reverseindex.release()

                # Second, invalidate property lists
                try:
                    self._mutex_listcache.acquire(60)
                    cache_key = '{0}_{1}'.format(DataList.cachelink, self._classname)
                    cache_list = Toolbox.try_get(cache_key, {})
                    change = False
                    for list_key in cache_list.keys():
                        fields = cache_list[list_key]
                        if ('__all' in fields and self._new) or list(set(fields) & set(changed_fields)):
                            change = True
                            self._volatile.delete(list_key)
                            del cache_list[list_key]
                    if change is True:
                        self._volatile.set(cache_key, cache_list)
                        self._persistent.set(cache_key, cache_list)
                finally:
                    self._mutex_listcache.release()

                if _hook is not None and hasattr(_hook, '__call__'):
                    _hook()

                # Save the data
                try:
                    self._mutex_version.acquire(30)
                    this_version = self._data['_version']
                    try:
                        store_version = self._persistent.get(self._key)['_version']
                    except KeyNotFoundException:
                        store_version = 0
                    if this_version == store_version:
                        self._data['_version'] = this_version + 1
                        self._persistent.set(self._key, self._data)
                        self._volatile.delete(self._key)
                        successful = True
                    else:
                        tries += 1
                finally:
                    self._mutex_version.release()
                if tries > 5:
                    raise SaveRaceConditionException()
            except:
                for key in caching_keys:
                    self._volatile.delete(key)
                raise

        self.invalidate_dynamics()
        self._original = copy.deepcopy(self._data)

        self.dirty = False
        self._new = False
Exemplo n.º 3
0
    def delete(self, abandon=None, _hook=None):
        """
        Delete the given object. It also invalidates certain lists
        """
        if self.volatile is True:
            raise VolatileObjectException()

        # Check foreign relations
        relations = RelationMapper.load_foreign_relations(self.__class__)
        if relations is not None:
            for key, info in relations.iteritems():
                items = getattr(self, key)
                if info['list'] is True:
                    if len(items) > 0:
                        if abandon is not None and (key in abandon or '_all' in abandon):
                            for item in items.itersafe():
                                setattr(item, info['key'], None)
                                try:
                                    item.save()
                                except ObjectNotFoundException:
                                    pass
                        else:
                            multi = 'are {0} items'.format(len(items)) if len(items) > 1 else 'is 1 item'
                            raise LinkedObjectException('There {0} left in self.{1}'.format(multi, key))
                elif items is not None:
                    # No list (so a 1-to-1 relation), so there should be an object, or None
                    item = items  # More clear naming
                    if abandon is not None and (key in abandon or '_all' in abandon):
                        setattr(item, info['key'], None)
                        try:
                            item.save()
                        except ObjectNotFoundException:
                            pass
                    else:
                        raise LinkedObjectException('There is still an item linked in self.{0}'.format(key))

        # Delete the object out of the persistent store
        try:
            self._persistent.delete(self._key)
        except KeyNotFoundException:
            pass

        # First, update reverse index
        try:
            self._mutex_reverseindex.acquire(60)
            for relation in self._relations:
                key = relation.name
                original_guid = self._original[key]['guid']
                if original_guid is not None:
                    if relation.foreign_type is None:
                        classname = self.__class__.__name__.lower()
                    else:
                        classname = relation.foreign_type.__name__.lower()
                    reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, original_guid)
                    reverse_index = self._volatile.get(reverse_key)
                    if reverse_index is not None:
                        if relation.foreign_key in reverse_index:
                            entries = reverse_index[relation.foreign_key]
                            if self.guid in entries:
                                entries.remove(self.guid)
                                reverse_index[relation.foreign_key] = entries
                                self._volatile.set(reverse_key, reverse_index)
            self._volatile.delete('ovs_reverseindex_{0}_{1}'.format(self._classname, self.guid))
        finally:
            self._mutex_reverseindex.release()

        # Second, invalidate property lists
        try:
            self._mutex_listcache.acquire(60)
            cache_key = '{0}_{1}'.format(DataList.cachelink, self._classname)
            cache_list = Toolbox.try_get(cache_key, {})
            change = False
            for list_key in cache_list.keys():
                fields = cache_list[list_key]
                if '__all' in fields:
                    change = True
                    self._volatile.delete(list_key)
                    del cache_list[list_key]
            if change is True:
                self._volatile.set(cache_key, cache_list)
                self._persistent.set(cache_key, cache_list)
        finally:
            self._mutex_listcache.release()

        # Delete the object and its properties out of the volatile store
        self.invalidate_dynamics()
        self._volatile.delete(self._key)
Exemplo n.º 4
0
    def _load(self):
        """
        Tries to load the result for the given key from the volatile cache, or executes the query
        if not yet available. Afterwards (if a key is given), the result will be (re)cached
        """
        self.data = self._volatile.get(self._key) if self._key is not None else None
        if self.data is None:
            # The query should be a dictionary:
            #     {'object': Disk,  # Object on which the query should be executed
            #      'data'  : DataList.select.XYZ,  # The requested result
            #      'query' : <query>}  # The actual query
            # Where <query> is a query(group) dictionary:
            #     {'type' : DataList.where_operator.ABC,  # Whether the items should be AND/OR
            #      'items': <items>}  # The items in the group
            # Where the <items> is any combination of one or more <filter> or <query>
            # A <filter> tuple example:
            #     (<field>, DataList.operator.GHI, <value>)  # For example EQUALS
            # The field is any property you would also find on the given object. In case of
            # properties, you can dot as far as you like. This means you can combine AND and OR
            # in any possible combination

            Toolbox.log_cache_hit('datalist', False)
            hybrid_structure = HybridRunner.get_hybrids()

            items = self._query['query']['items']
            query_type = self._query['query']['type']
            query_data = self._query['data']
            query_object = self._query['object']
            query_object_id = Descriptor(query_object).descriptor['identifier']
            if query_object_id in hybrid_structure and query_object_id != hybrid_structure[query_object_id]['identifier']:
                query_object = Descriptor().load(hybrid_structure[query_object_id]).get_object()

            invalidations = {query_object.__name__.lower(): ['__all']}
            DataList._build_invalidations(invalidations, query_object, items)

            for class_name in invalidations:
                key = '{0}_{1}'.format(DataList.cachelink, class_name)
                mutex = VolatileMutex('listcache_{0}'.format(class_name))
                try:
                    mutex.acquire(60)
                    cache_list = Toolbox.try_get(key, {})
                    current_fields = cache_list.get(self._key, [])
                    current_fields = list(set(current_fields + ['__all'] + invalidations[class_name]))
                    cache_list[self._key] = current_fields
                    self._volatile.set(key, cache_list)
                    self._persistent.set(key, cache_list)
                finally:
                    mutex.release()

            self.from_cache = False
            namespace = query_object()._namespace
            name = query_object.__name__.lower()
            guids = DataList.get_pks(namespace, name)

            if query_data == DataList.select.COUNT:
                self.data = 0
            else:
                self.data = []

            for guid in guids:
                try:
                    instance = query_object(guid)
                    if query_type == DataList.where_operator.AND:
                        include = self._exec_and(instance, items)
                    elif query_type == DataList.where_operator.OR:
                        include = self._exec_or(instance, items)
                    else:
                        raise NotImplementedError('The given operator is not yet implemented.')
                    if include:
                        if query_data == DataList.select.COUNT:
                            self.data += 1
                        elif query_data == DataList.select.GUIDS:
                            self.data.append(guid)
                        else:
                            raise NotImplementedError('The given selector type is not implemented')
                except ObjectNotFoundException:
                    pass

            if 'post_query' in DataList.test_hooks:
                DataList.test_hooks['post_query'](self)

            if self._key is not None and len(guids) > 0 and self._can_cache:
                invalidated = False
                for class_name in invalidations:
                    key = '{0}_{1}'.format(DataList.cachelink, class_name)
                    cache_list = Toolbox.try_get(key, {})
                    if self._key not in cache_list:
                        invalidated = True
                # If the key under which the list should be saved was already invalidated since the invalidations
                # were saved, the returned list is most likely outdated. This is OK for this result, but the list
                # won't get cached
                if invalidated is False:
                    self._volatile.set(self._key, self.data, 300 + randint(0, 300))  # Cache between 5 and 10 minutes
        else:
            Toolbox.log_cache_hit('datalist', True)
            self.from_cache = True
        return self