Exemple #1
0
class DeployedStore(Persistent):
    """ Deployed entries storage """
    
    implements(IDeployedStore)

    store_length = 10
    
    def __init__(self):
        self._entries = OOBTree()
        self.busy = False
        
    def __iter__(self):
        return reversed([i for i in self._entries.itervalues()])
        
    def add(self, date, user, action, clear, full, status, errmsg=None):
        entry = DeployedEntry(date, user, action, clear, full, status, errmsg)
        self._entries[entry.id] = entry
        if len(self._entries) > self.store_length:
            del self._entries[self._entries.minKey()]
    
    def remove(self, id):
        del self._entries[id]
    
    def get(self, id):
        return self._entries[id]
    
    @iteration_with_status(DEPLOYMENT_STATUS.DONE)
    def done(self): pass
    
    @iteration_with_status(DEPLOYMENT_STATUS.ERROR)
    def error(self): pass
Exemple #2
0
class fsIndex(object):
    def __init__(self, data=None):
        self._data = OOBTree()
        if data:
            self.update(data)

    def __getstate__(self):
        return dict(state_version=1, _data=[(k, v.toString()) for (k, v) in self._data.iteritems()])

    def __setstate__(self, state):
        version = state.pop("state_version", 0)
        getattr(self, "_setstate_%s" % version)(state)

    def _setstate_0(self, state):
        self.__dict__.clear()
        self.__dict__.update(state)

    def _setstate_1(self, state):
        self._data = OOBTree([(k, fsBucket().fromString(v)) for (k, v) in state["_data"]])

    def __getitem__(self, key):
        return str2num(self._data[key[:6]][key[6:]])

    def save(self, pos, fname):
        with open(fname, "wb") as f:
            pickler = cPickle.Pickler(f, 1)
            pickler.fast = True
            pickler.dump(pos)
            for k, v in self._data.iteritems():
                pickler.dump((k, v.toString()))
            pickler.dump(None)

    @classmethod
    def load(class_, fname):
        with open(fname, "rb") as f:
            unpickler = cPickle.Unpickler(f)
            pos = unpickler.load()
            if not isinstance(pos, (int, long)):
                return pos  # Old format
            index = class_()
            data = index._data
            while 1:
                v = unpickler.load()
                if not v:
                    break
                k, v = v
                data[k] = fsBucket().fromString(v)
            return dict(pos=pos, index=index)

    def get(self, key, default=None):
        tree = self._data.get(key[:6], default)
        if tree is default:
            return default
        v = tree.get(key[6:], default)
        if v is default:
            return default
        return str2num(v)

    def __setitem__(self, key, value):
        value = num2str(value)
        treekey = key[:6]
        tree = self._data.get(treekey)
        if tree is None:
            tree = fsBucket()
            self._data[treekey] = tree
        tree[key[6:]] = value

    def __delitem__(self, key):
        treekey = key[:6]
        tree = self._data.get(treekey)
        if tree is None:
            raise KeyError, key
        del tree[key[6:]]
        if not tree:
            del self._data[treekey]

    def __len__(self):
        r = 0
        for tree in self._data.itervalues():
            r += len(tree)
        return r

    def update(self, mapping):
        for k, v in mapping.items():
            self[k] = v

    def has_key(self, key):
        v = self.get(key, self)
        return v is not self

    def __contains__(self, key):
        tree = self._data.get(key[:6])
        if tree is None:
            return False
        v = tree.get(key[6:], None)
        if v is None:
            return False
        return True

    def clear(self):
        self._data.clear()

    def __iter__(self):
        for prefix, tree in self._data.iteritems():
            for suffix in tree:
                yield prefix + suffix

    iterkeys = __iter__

    def keys(self):
        return list(self.iterkeys())

    def iteritems(self):
        for prefix, tree in self._data.iteritems():
            for suffix, value in tree.iteritems():
                yield (prefix + suffix, str2num(value))

    def items(self):
        return list(self.iteritems())

    def itervalues(self):
        for tree in self._data.itervalues():
            for value in tree.itervalues():
                yield str2num(value)

    def values(self):
        return list(self.itervalues())

    # Comment below applies for the following minKey and maxKey methods
    #
    # Obscure:  what if `tree` is actually empty?  We're relying here on
    # that this class doesn't implement __delitem__:  once a key gets
    # into an fsIndex, the only way it can go away is by invoking
    # clear().  Therefore nothing in _data.values() is ever empty.
    #
    # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are
    # very efficient.

    def minKey(self, key=None):
        if key is None:
            smallest_prefix = self._data.minKey()
        else:
            smallest_prefix = self._data.minKey(key[:6])

        tree = self._data[smallest_prefix]

        assert tree

        if key is None:
            smallest_suffix = tree.minKey()
        else:
            try:
                smallest_suffix = tree.minKey(key[6:])
            except ValueError:  # 'empty tree' (no suffix >= arg)
                next_prefix = prefix_plus_one(smallest_prefix)
                smallest_prefix = self._data.minKey(next_prefix)
                tree = self._data[smallest_prefix]
                assert tree
                smallest_suffix = tree.minKey()

        return smallest_prefix + smallest_suffix

    def maxKey(self, key=None):
        if key is None:
            biggest_prefix = self._data.maxKey()
        else:
            biggest_prefix = self._data.maxKey(key[:6])

        tree = self._data[biggest_prefix]

        assert tree

        if key is None:
            biggest_suffix = tree.maxKey()
        else:
            try:
                biggest_suffix = tree.maxKey(key[6:])
            except ValueError:  # 'empty tree' (no suffix <= arg)
                next_prefix = prefix_minus_one(biggest_prefix)
                biggest_prefix = self._data.maxKey(next_prefix)
                tree = self._data[biggest_prefix]
                assert tree
                biggest_suffix = tree.maxKey()

        return biggest_prefix + biggest_suffix
Exemple #3
0
class MultiPointerTrack(Persistent):
    """
    A MultiPointerTrack is a kind of structure that is based on an IOBTree,
    where each entry contains an ordered set (or list, depending on the
    implementation) of elements. Then, several "pointers" can be created,
    which point to different positions of the track (very much like runners
    in a race track).
    This class is abstract, implementations should be derived.
    """
    def __init__(self, elemContainer):
        self._container = OOBTree()
        self._pointers = PersistentMapping()
        self._elemContainer = elemContainer

        # initialize first entry
        #self._container[timestamp(0)] = elemContainer()

    def addPointer(self, pid, startPos=None):
        """
        Registers a new pointer
        """
        self._pointers[pid] = None
        if startPos:
            self.movePointer(pid, startPos)

    def removePointer(self, pid):
        """
        Removes a pointer from the list
        """
        del self._pointers[pid]

    def prepareEntry(self, ts):
        """
        Creates an empty sub-structure (elemContainer) for a given timestamp
        """
        self._container[timestamp(ts)] = self._elemContainer()

    def getCurrentPosition(self, pid):
        """
        Returns the current entry (set/list) for a given pointer id
        """
        currentPos = self._pointers[pid]
        # TODO: assertion? check?
        return self._container[timestamp(currentPos)]

    def getPointerTimestamp(self, pid):
        """
        Gets the current 'position' of a pointer (id)
        """
        return self._pointers[pid]

    def __getitem__(self, ts):
        """
        Implements __getitem__, so that mpt[timestamp] works
        """
        if isinstance(ts, slice):
            return self._getSlice(ts)
        else:
            return self._container[timestamp(ts)]

    def _getSlice(self, s):
        """
        Calculates a slice of the structure (timestamp-wise)
        """
        if s.step != None:
            raise TypeError('Extended slices are not accepted here')
        return self._container.values(s.start, s.stop)

    def values(self, *args):
        """
        Return values or ranges (timestamps) of the structure
        """

        fargs = []

        for a in args:
            if a == None:
                fargs.append(None)
            else:
                fargs.append(timestamp(a))

        return self._container.values(*fargs)

    def _append(self, ts, val):
        """
        Should be overloaded.
        """
        raise Exception("Unimplemented method")

    def add(self, intTS, value):
        """
        Adds a value to the container corresponding to a specific timestamp
        """
        ts = timestamp(intTS)
        if ts not in self._container:
            self.prepareEntry(intTS)

        self._append(ts, value)

    def _pointerIterator(self, pid, func, till=None):
        """
        Iterates over the positions that are left (till the end of the track)
        for a given pointer (id). Takes a function that is applied to yielded
        values
        """

        return self.iterate(self._pointers[pid], till, func)

    def iterate(self, fromPos=None, till=None, func=(lambda x: x)):
        """
        Generator that iterates through the data structure
        """
        if till != None:
            till = timestamp(till)
            # negative numbers mean "last but one", "last but two", etc...
            if till == timestamp(-1):
                # most common case
                till = self._container.maxKey() - timestamp(1)

        if fromPos != None:
            fromPos = timestamp(fromPos)

        for ts, entry in self._container.iteritems(till, fromPos):
            if fromPos and ts == fromPos:
                # stop immediately if we're past fromPos
                raise StopIteration

            for elem in entry:
                yield func((int(ts), elem))

    def mostRecentTS(self, maximum=None):
        """
        Returns most recent timestamp in track (minimum key)
        If 'maximum' is provided, return it if less recent
        """

        # check that the tree has something
        if len(self._container) == 0:
            raise EmptyTrackException()

        mr = self._container.minKey()

        if maximum:
            maximum = timestamp(maximum)
            # in timestamp logic, max() returns the oldest
            return max(mr, maximum)
        else:
            return mr

    def oldestTS(self):
        """
        Returns least recent timestamp in track (maximum key)
        """

        # check that the tree has something
        if len(self._container) == 0:
            raise EmptyTrackException()

        return self._container.maxKey()

    def pointerIterValues(self, pid, till=None):
        """
        Iterates over the positions that are left (till the end of the track)
        for a given pointer (id) - iterates over values
        """
        return self._pointerIterator(pid, lambda x: x[1], till=till)

    def pointerIterItems(self, pid, till=None):
        """
        Iterates over the positions that are left (till the end of the track)
        for a given pointer (id) - iterates over key-value pairs (iteritems)
        """

        return self._pointerIterator(pid, lambda x: x, till=till)

    def movePointer(self, pid, pos):
        """
        Moves a given pointer (id) to a given timestamp
        """
        if pid not in self._pointers:
            raise KeyError("Pointer '%s' doesn't seem to exist!" % pid)

        # check that the tree has something
        if len(self._container) == 0:
            raise EmptyTrackException()

        self._pointers[pid] = pos

    def __len__(self):
        """
        Returns the number of timestamp entries
        """
        return len(self._container)

    def __delitem__(self, item):
        """
        Deletes a given timestamp entry (or range)
        """
        self._container.__delitem__(item)

    def __iter__(self):
        """
        Iterates over the whole structure, element by elements
        (goes inside containers)
        """
        return self.iterate()

    def __contains__(self, ts):
        return timestamp(ts) in self._container
Exemple #4
0
class fsIndex(object):

    def __init__(self, data=None):
        self._data = OOBTree()
        if data:
            self.update(data)

    def __getstate__(self):
        return dict(
            state_version = 1,
            _data = [(k, v.toString())
                     for (k, v) in six.iteritems(self._data)
                     ]
            )

    def __setstate__(self, state):
        version = state.pop('state_version', 0)
        getattr(self, '_setstate_%s' % version)(state)

    def _setstate_0(self, state):
        self.__dict__.clear()
        self.__dict__.update(state)
        self._data = OOBTree([
            (ensure_bytes(k), v)
            for (k, v) in self._data.items()
            ])

    def _setstate_1(self, state):
        self._data = OOBTree([
            (ensure_bytes(k), fsBucket().fromString(ensure_bytes(v)))
            for (k, v) in state['_data']
            ])

    def __getitem__(self, key):
        assert isinstance(key, bytes)
        return str2num(self._data[key[:6]][key[6:]])

    def save(self, pos, fname):
        with open(fname, 'wb') as f:
            pickler = Pickler(f, _protocol)
            pickler.fast = True
            pickler.dump(pos)
            for k, v in six.iteritems(self._data):
                pickler.dump((k, v.toString()))
            pickler.dump(None)

    @classmethod
    def load(class_, fname):
        with open(fname, 'rb') as f:
            unpickler = Unpickler(f)
            pos = unpickler.load()
            if not isinstance(pos, INT_TYPES):
                # NB: this might contain OIDs that got unpickled
                # into Unicode strings on Python 3; hope the caller
                # will pipe the result to fsIndex().update() to normalize
                # the keys
                return pos                  # Old format
            index = class_()
            data = index._data
            while 1:
                v = unpickler.load()
                if not v:
                    break
                k, v = v
                data[ensure_bytes(k)] = fsBucket().fromString(ensure_bytes(v))
            return dict(pos=pos, index=index)

    def get(self, key, default=None):
        assert isinstance(key, bytes)
        tree = self._data.get(key[:6], default)
        if tree is default:
            return default
        v = tree.get(key[6:], default)
        if v is default:
            return default
        return str2num(v)

    def __setitem__(self, key, value):
        assert isinstance(key, bytes)
        value = num2str(value)
        treekey = key[:6]
        tree = self._data.get(treekey)
        if tree is None:
            tree = fsBucket()
            self._data[treekey] = tree
        tree[key[6:]] = value

    def __delitem__(self, key):
        assert isinstance(key, bytes)
        treekey = key[:6]
        tree = self._data.get(treekey)
        if tree is None:
            raise KeyError(key)
        del tree[key[6:]]
        if not tree:
            del self._data[treekey]

    def __len__(self):
        r = 0
        for tree in six.itervalues(self._data):
            r += len(tree)
        return r

    def update(self, mapping):
        for k, v in mapping.items():
            self[ensure_bytes(k)] = v

    def has_key(self, key):
        v = self.get(key, self)
        return v is not self

    def __contains__(self, key):
        assert isinstance(key, bytes)
        tree = self._data.get(key[:6])
        if tree is None:
            return False
        v = tree.get(key[6:], None)
        if v is None:
            return False
        return True

    def clear(self):
        self._data.clear()

    def __iter__(self):
        for prefix, tree in six.iteritems(self._data):
            for suffix in tree:
                yield prefix + suffix

    iterkeys = __iter__

    def keys(self):
        return list(self.iterkeys())

    def iteritems(self):
        for prefix, tree in six.iteritems(self._data):
            for suffix, value in six.iteritems(tree):
                yield (prefix + suffix, str2num(value))

    def items(self):
        return list(self.iteritems())

    def itervalues(self):
        for tree in six.itervalues(self._data):
            for value in six.itervalues(tree):
                yield str2num(value)

    def values(self):
        return list(self.itervalues())

    # Comment below applies for the following minKey and maxKey methods
    #
    # Obscure:  what if `tree` is actually empty?  We're relying here on
    # that this class doesn't implement __delitem__:  once a key gets
    # into an fsIndex, the only way it can go away is by invoking
    # clear().  Therefore nothing in _data.values() is ever empty.
    #
    # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are
    # very efficient.

    def minKey(self, key=None):
        if key is None:
            smallest_prefix = self._data.minKey()
        else:
            smallest_prefix = self._data.minKey(key[:6])

        tree = self._data[smallest_prefix]

        assert tree

        if key is None:
            smallest_suffix = tree.minKey()
        else:
            try:
                smallest_suffix = tree.minKey(key[6:])
            except ValueError: # 'empty tree' (no suffix >= arg)
                next_prefix = prefix_plus_one(smallest_prefix)
                smallest_prefix = self._data.minKey(next_prefix)
                tree = self._data[smallest_prefix]
                assert tree
                smallest_suffix = tree.minKey()

        return smallest_prefix + smallest_suffix

    def maxKey(self, key=None):
        if key is None:
            biggest_prefix = self._data.maxKey()
        else:
            biggest_prefix = self._data.maxKey(key[:6])

        tree = self._data[biggest_prefix]

        assert tree

        if key is None:
            biggest_suffix = tree.maxKey()
        else:
            try:
                biggest_suffix = tree.maxKey(key[6:])
            except ValueError: # 'empty tree' (no suffix <= arg)
                next_prefix = prefix_minus_one(biggest_prefix)
                biggest_prefix = self._data.maxKey(next_prefix)
                tree = self._data[biggest_prefix]
                assert tree
                biggest_suffix = tree.maxKey()

        return biggest_prefix + biggest_suffix
Exemple #5
0
class MultiPointerTrack(Persistent):
    """
    A MultiPointerTrack is a kind of structure that is based on an IOBTree,
    where each entry contains an ordered set (or list, depending on the
    implementation) of elements. Then, several "pointers" can be created,
    which point to different positions of the track (very much like runners
    in a race track).
    This class is abstract, implementations should be derived.
    """

    def __init__(self, elemContainer):
        self._container = OOBTree()
        self._pointers = PersistentMapping()
        self._elemContainer = elemContainer

        # initialize first entry
        #self._container[timestamp(0)] = elemContainer()

    def addPointer(self, pid, startPos=None):
        """
        Registers a new pointer
        """
        self._pointers[pid] = None
        if startPos:
            self.movePointer(pid, startPos)

    def removePointer(self, pid):
        """
        Removes a pointer from the list
        """
        del self._pointers[pid]

    def prepareEntry(self, ts):
        """
        Creates an empty sub-structure (elemContainer) for a given timestamp
        """
        self._container[timestamp(ts)] = self._elemContainer()

    def getCurrentPosition(self, pid):
        """
        Returns the current entry (set/list) for a given pointer id
        """
        currentPos = self._pointers[pid]
        # TODO: assertion? check?
        return self._container[timestamp(currentPos)]

    def getPointerTimestamp(self, pid):
        """
        Gets the current 'position' of a pointer (id)
        """
        return self._pointers[pid]

    def __getitem__(self, ts):
        """
        Implements __getitem__, so that mpt[timestamp] works
        """
        if isinstance(ts, slice):
            return self._getSlice(ts)
        else:
            return self._container[timestamp(ts)]

    def _getSlice(self, s):
        """
        Calculates a slice of the structure (timestamp-wise)
        """
        if s.step != None:
            raise TypeError('Extended slices are not accepted here')
        return self._container.values(s.start, s.stop)

    def values(self, *args):
        """
        Return values or ranges (timestamps) of the structure
        """

        fargs = []

        for a in args:
            if a == None:
                fargs.append(None)
            else:
                fargs.append(timestamp(a))

        return self._container.values(*fargs)

    def _append(self, ts, val):
        """
        Should be overloaded.
        """
        raise Exception("Unimplemented method")

    def add(self, intTS, value):
        """
        Adds a value to the container corresponding to a specific timestamp
        """
        ts = timestamp(intTS)
        if ts not in self._container:
            self.prepareEntry(intTS)

        self._append(ts, value)

    def _pointerIterator(self, pid, func, till=None):
        """
        Iterates over the positions that are left (till the end of the track)
        for a given pointer (id). Takes a function that is applied to yielded
        values
        """

        return self.iterate(self._pointers[pid], till, func)

    def iterate(self, fromPos=None, till=None, func=(lambda x: x)):
        """
        Generator that iterates through the data structure
        """
        if till != None:
            till = timestamp(till)
            # negative numbers mean "last but one", "last but two", etc...
            if till == timestamp(-1):
                # most common case
                till = self._container.maxKey() - timestamp(1)

        if fromPos != None:
            fromPos = timestamp(fromPos)

        for ts, entry in self._container.iteritems(till, fromPos):
            if fromPos and ts == fromPos:
                # stop immediately if we're past fromPos
                raise StopIteration

            for elem in entry:
                yield func((int(ts), elem))

    def mostRecentTS(self, maximum=None):
        """
        Returns most recent timestamp in track (minimum key)
        If 'maximum' is provided, return it if less recent
        """

        # check that the tree has something
        if len(self._container) == 0:
            raise EmptyTrackException()

        mr = self._container.minKey()

        if maximum:
            maximum = timestamp(maximum)
            # in timestamp logic, max() returns the oldest
            return max(mr, maximum)
        else:
            return mr

    def oldestTS(self):
        """
        Returns least recent timestamp in track (maximum key)
        """

        # check that the tree has something
        if len(self._container) == 0:
            raise EmptyTrackException()

        return self._container.maxKey()

    def pointerIterValues(self, pid, till=None):
        """
        Iterates over the positions that are left (till the end of the track)
        for a given pointer (id) - iterates over values
        """
        return self._pointerIterator(pid, lambda x: x[1], till=till)

    def pointerIterItems(self, pid, till=None):
        """
        Iterates over the positions that are left (till the end of the track)
        for a given pointer (id) - iterates over key-value pairs (iteritems)
        """

        return self._pointerIterator(pid, lambda x: x, till=till)

    def is_empty(self):
        for __, ___ in self._container.iteritems():
            return False
        return True

    def movePointer(self, pid, pos):
        """
        Moves a given pointer (id) to a given timestamp
        """
        if pid not in self._pointers:
            raise KeyError("Pointer '%s' doesn't seem to exist!" % pid)

        # check that the tree has something
        if self.is_empty():
            raise EmptyTrackException()

        self._pointers[pid] = pos

    def __len__(self):
        """
        Returns the number of timestamp entries
        """
        return len(self._container)

    def __delitem__(self, item):
        """
        Deletes a given timestamp entry (or range)
        """
        self._container.__delitem__(item)

    def __iter__(self):
        """
        Iterates over the whole structure, element by elements
        (goes inside containers)
        """
        return self.iterate()

    def __contains__(self, ts):
        return timestamp(ts) in self._container
Exemple #6
0
class Topics(Persistent):
    interface.implements(ITopics)

    def __init__(self):
        self.topics = OOBTree()
        self.btopics = IOBTree()
        self.__len = Length(0)

    def __len__(self):
        return self.__len()

    def __iter__(self):
        return iter(xrange(len(self)))

    def __getitem__(self, key):
        topics = self.topics
        keys = topics.keys()

        if isinstance(key, types.SliceType):
            length = len(self)

            start = key.start or 0
            stop = key.stop or length
            if stop > length:
                stop = length

            getitem = self.__getitem__
            return [getitem(idx) for idx in range(start, stop)]
        else:
            if key < 0:
                key = - key - 1
            else:
                key = len(self) - key - 1

            return topics[keys[key]]

    def append(self, topic):
        self.remove(topic)

        id = int(topic.__name__)

        idx = time.time()
        while idx in self.topics:
            idx = idx + 0.00001

        self.topics[idx] = topic
        self.btopics[id] = idx
        self.__len.change(1)

    def remove(self, topic):
        id = int(topic.__name__)

        if id in self.btopics:
            idx = self.btopics[id]
            del self.btopics[id]
            del self.topics[idx]
            self.__len.change(-1)

    def next(self, topic):
        """ return next topic """
        id = int(topic.__name__)

        idx = self.btopics.get(id)
        if idx is not None:
            idx = idx - 0.000001

            try:
                idx = self.topics.maxKey(idx)
            except ValueError:
                return None

            return self.topics.get(idx)

    def previous(self, topic):
        """ return prev topic """
        id = int(topic.__name__)

        idx = self.btopics.get(id)
        if idx is not None:
            idx = idx + 0.000001

            try:
                idx = self.topics.minKey(idx)
            except ValueError:
                return None

            return self.topics.get(idx)
Exemple #7
0
class _Records(object):
    """The records stored in the registry. This implements dict-like access
    to records, where as the Registry object implements dict-like read-only
    access to values.
    """
    __parent__ = None

    # Similar to zope.schema._field._isdotted, but allows up to one '/'
    _validkey = re.compile(r"([a-zA-Z][a-zA-Z0-9_-]*)"
                           r"([.][a-zA-Z][a-zA-Z0-9_-]*)*"
                           r"([/][a-zA-Z][a-zA-Z0-9_-]*)?"
                           r"([.][a-zA-Z][a-zA-Z0-9_-]*)*"
                           # use the whole line
                           r"$").match

    def __init__(self, parent):
        self.__parent__ = parent

        self._fields = OOBTree()
        self._values = OOBTree()

    def __setitem__(self, name, record):
        if not self._validkey(name):
            raise InvalidRegistryKey(record)
        if not IRecord.providedBy(record):
            raise ValueError("Value must be a record")

        self._setField(name, record.field)
        self._values[name] = record.value

        record.__name__ = name
        record.__parent__ = self.__parent__

        notify(RecordAddedEvent(record))

    def __delitem__(self, name):
        record = self[name]

        # unbind the record so that it won't attempt to look up values from
        # the registry anymore
        record.__parent__ = None

        del self._fields[name]
        del self._values[name]

        notify(RecordRemovedEvent(record))

    def __getitem__(self, name):

        field = self._getField(name)
        value = self._values[name]

        record = Record(field, value, _validate=False)
        record.__name__ = name
        record.__parent__ = self.__parent__

        return record

    def get(self, name, default=None):
        try:
            return self[name]
        except KeyError:
            return default

    def __nonzero__(self):
        return self._values.__nonzero__()

    def __len__(self):
        return self._values.__len__()

    def __iter__(self):
        return self._values.__iter__()

    def has_key(self, name):
        return self._values.__contains__(name)

    def __contains__(self, name):
        return self._values.__contains__(name)

    def keys(self, min=None, max=None):
        return self._values.keys(min, max)

    def maxKey(self, key=None):
        return self._values.maxKey(key)

    def minKey(self, key=None):
        return self._values.minKey(key)

    def values(self, min=None, max=None):
        return [self[name] for name in self.keys(min, max)]

    def items(self, min=None, max=None):
        return [(
            name,
            self[name],
        ) for name in self.keys(min, max)]

    def setdefault(self, key, value):
        if key not in self:
            self[key] = value
        return self[key]

    def clear(self):
        self._fields.clear()
        self._values.clear()

    # Helper methods

    def _getField(self, name):
        field = self._fields[name]

        # Handle field reference pointers
        if isinstance(field, basestring):
            recordName = field
            while isinstance(field, basestring):
                recordName = field
                field = self._fields[recordName]
            field = FieldRef(recordName, field)

        return field

    def _setField(self, name, field):
        if not IPersistentField.providedBy(field):
            raise ValueError("The record's field must be an IPersistentField.")
        if IFieldRef.providedBy(field):
            if field.recordName not in self._fields:
                raise ValueError(
                    "Field reference points to non-existent record")
            self._fields[name] = field.recordName  # a pointer, of sorts
        else:
            field.__name__ = 'value'
            self._fields[name] = field
Exemple #8
0
class DataBucketStream(Document):
    """
  Represents data stored in many small files inside a "stream".
  Each file is "addressed" by its key similar to dict.
  """

    meta_type = 'ERP5 Data Bucket Stream'
    portal_type = 'Data Bucket Stream'
    add_permission = Permissions.AddPortalContent

    # Declarative security
    security = ClassSecurityInfo()
    security.declareObjectProtected(Permissions.AccessContentsInformation)

    # Declarative properties
    property_sheets = (PropertySheet.CategoryCore, PropertySheet.SortIndex)

    def __init__(self, id, **kw):
        self.initBucketTree()
        self.initIndexTree()
        Document.__init__(self, id, **kw)

    def __len__(self):
        return len(self._tree)

    def initBucketTree(self):
        """
      Initialize the Bucket Tree
    """
        self._tree = OOBTree()

    def initIndexTree(self):
        """
      Initialize the Index Tree
    """
        self._long_index_tree = LOBTree()

    def getMaxKey(self, key=None):
        """
    Return the maximum key
    """
        try:
            return self._tree.maxKey(key)
        except ValueError:
            return None

    def getMaxIndex(self, index=None):
        """
    Return the maximum index
    """
        try:
            return self._long_index_tree.maxKey(index)
        except ValueError:
            return None

    def getMinKey(self, key=None):
        """
    Return the minimum key
    """
        try:
            return self._tree.minKey(key)
        except ValueError:
            return None

    def getMinIndex(self, index=None):
        """
    Return the minimum key
    """
        try:
            return self._long_index_tree.minKey(index)
        except ValueError:
            return None

    def _getOb(self, id, *args, **kw):
        return None

    def getBucketByKey(self, key=None):
        """
      Get one bucket
    """
        return self._tree[key].value

    def getBucketByIndex(self, index=None):
        """
      Get one bucket
    """
        key = self._long_index_tree[index]
        return self.getBucketByKey(key).value

    def getBucket(self, key):
        log('DeprecationWarning: Please use getBucketByKey')
        return self.getBucketByKey(key)

    def hasBucketKey(self, key):
        """
      Wether bucket with such key exists
    """
        return key in self._tree

    def hasBucketIndex(self, index):
        """
      Wether bucket with such index exists
    """
        return self._long_index_tree.has_key(index)

    def insertBucket(self, key, value):
        """
      Insert one bucket
    """
        try:
            count = self._long_index_tree.maxKey() + 1
        except ValueError:
            count = 0
        except AttributeError:
            pass
        try:
            self._long_index_tree.insert(count, key)
        except AttributeError:
            pass
        value = PersistentString(value)
        is_new_key = self._tree.insert(key, value)
        if not is_new_key:
            self.log("Reingestion of same key")
            self._tree[key] = value

    def getBucketKeySequenceByKey(self,
                                  start_key=None,
                                  stop_key=None,
                                  count=None,
                                  exclude_start_key=False,
                                  exclude_stop_key=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._tree.keys(min=start_key,
                                   max=stop_key,
                                   excludemin=exclude_start_key,
                                   excludemax=exclude_stop_key)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketKeySequenceByIndex(self,
                                    start_index=None,
                                    stop_index=None,
                                    count=None,
                                    exclude_start_index=False,
                                    exclude_stop_index=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._long_index_tree.values(min=start_index,
                                                max=stop_index,
                                                excludemin=exclude_start_index,
                                                excludemax=exclude_stop_index)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketKeySequence(self, start_key=None, count=None):
        log('DeprecationWarning: Please use getBucketKeySequenceByKey')
        return self.getBucketKeySequenceByKey(start_key=start_key, count=count)

    def getBucketIndexKeySequenceByIndex(self,
                                         start_index=None,
                                         stop_index=None,
                                         count=None,
                                         exclude_start_index=False,
                                         exclude_stop_index=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._long_index_tree.items(min=start_index,
                                               max=stop_index,
                                               excludemin=exclude_start_index,
                                               excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexKeySequence(self, sequence)

    def getBucketIndexSequenceByIndex(self,
                                      start_index=None,
                                      stop_index=None,
                                      count=None,
                                      exclude_start_index=False,
                                      exclude_stop_index=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._long_index_tree.keys(min=start_index,
                                              max=stop_index,
                                              excludemin=exclude_start_index,
                                              excludemax=exclude_stop_index)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketValueSequenceByKey(self,
                                    start_key=None,
                                    stop_key=None,
                                    count=None,
                                    exclude_start_key=False,
                                    exclude_stop_key=False):
        """
      Get a lazy sequence of bucket values
    """
        sequence = self._tree.values(min=start_key,
                                     max=stop_key,
                                     excludemin=exclude_start_key,
                                     excludemax=exclude_stop_key)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketValueSequenceByIndex(self,
                                      start_index=None,
                                      stop_index=None,
                                      count=None,
                                      exclude_start_index=False,
                                      exclude_stop_index=False):
        """
      Get a lazy sequence of bucket values
    """
        sequence = self._long_index_tree.values(min=start_index,
                                                max=stop_index,
                                                excludemin=exclude_start_index,
                                                excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexValueSequence(self, sequence)

    def getBucketValueSequence(self, start_key=None, count=None):
        log('DeprecationWarning: Please use getBucketValueSequenceByKey')
        return self.getBucketValueSequenceByKey(start_key=start_key,
                                                count=count)

    def getBucketKeyItemSequenceByKey(self,
                                      start_key=None,
                                      stop_key=None,
                                      count=None,
                                      exclude_start_key=False,
                                      exclude_stop_key=False):
        """
      Get a lazy sequence of bucket items
    """
        sequence = self._tree.items(min=start_key,
                                    max=stop_key,
                                    excludemin=exclude_start_key,
                                    excludemax=exclude_stop_key)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketItemSequence(self,
                              start_key=None,
                              count=None,
                              exclude_start_key=False):
        log('DeprecationWarning: Please use getBucketKeyItemSequenceByKey')
        return self.getBucketKeyItemSequenceByKey(
            start_key=start_key,
            count=count,
            exclude_start_key=exclude_start_key)

    def getBucketIndexItemSequenceByIndex(self,
                                          start_index=None,
                                          stop_index=None,
                                          count=None,
                                          exclude_start_index=False,
                                          exclude_stop_index=False):
        """
      Get a lazy sequence of bucket items
    """
        sequence = self._long_index_tree.items(min=start_index,
                                               max=stop_index,
                                               excludemin=exclude_start_index,
                                               excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexItemSequence(self, sequence)

    def getBucketIndexKeyItemSequenceByIndex(self,
                                             start_index=None,
                                             stop_index=None,
                                             count=None,
                                             exclude_start_index=False,
                                             exclude_stop_index=False):
        """
      Get a lazy sequence of bucket items
    """
        sequence = self._long_index_tree.items(min=start_index,
                                               max=stop_index,
                                               excludemin=exclude_start_index,
                                               excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexKeyItemSequence(self, sequence)

    def getItemList(self):
        """
      Return a list of all key, value pairs
    """
        return [item for item in self._tree.items()]

    def getKeyList(self):
        """
      Return a list of all keys
    """
        return [key for key in self._tree.keys()]

    def getIndexList(self):
        """
      Return a list of all indexes
    """
        return [key for key in self._long_index_tree.keys()]

    def getIndexKeyTupleList(self):
        """
      Return a list of all indexes
    """
        return [key for key in self._long_index_tree.items()]

    def getMd5sum(self, key):
        """
      Get hexdigest of bucket.
    """
        h = hashlib.md5()
        h.update(self.getBucketByKey(key))
        return h.hexdigest()

    def delBucketByKey(self, key):
        """
      Remove the bucket.
    """
        del self._tree[key]
        for index, my_key in list(self.getBucketIndexKeySequenceByIndex()):
            if my_key == key:
                del self._long_index_tree[index]

    def delBucketByIndex(self, index):
        """
      Remove the bucket.
    """
        key = self._long_index_tree[index]
        del self._tree[key]
        del self._long_index_tree[index]

    def rebuildIndexTreeByKeyOrder(self):
        """
        Clear and rebuild the index tree by order of keys
    """
        self.initIndexTree()
        for count, key in enumerate(self.getBucketKeySequenceByKey()):
            self._long_index_tree.insert(count, key)
Exemple #9
0
class SingleValueIndex(Index):
    """An `index <Index>` where each key may only point to a single value."""

    accepts_multiple_values = False

    @overrides(Index.__init__)
    def __init__(self, pairs = None):
        self.__items = OOBTree()
        self.__descending_items = OOBTree()
        Index.__init__(self, pairs)

    @overrides(Index.add)
    def add(self, key, value):

        if key is None:
            raise ValueError("Can't use None as a key for a SingleValueIndex")

        self.__items[key] = value
        self.__descending_items[Descending(key)] = value

    @overrides(Index.remove)
    def remove(self, key, value = undefined):

        if value is not undefined \
        and self.get(key, undefined) != value:
            return

        if value is undefined:
            try:
                del self.__items[key]
                del self.__descending_items[key]
            except KeyError:
                pass

    @overrides(Index.items)
    def items(
        self,
        min = undefined,
        max = undefined,
        exclude_min = False,
        exclude_max = False,
        descending = False
    ):
        min = self.__boundary(min, descending)
        max = self.__boundary(max, descending)

        if descending:
            min, max = max, min
            exclude_min, exclude_max = exclude_max, exclude_min

            for desc_key, value in self.__descending_items.iteritems(
                min = min,
                max = max,
                excludemin = exclude_min,
                excludemax = exclude_max
            ):
                yield (desc_key.value, value)
        else:
            for pair in self.__items.iteritems(
                min = min,
                max = max,
                excludemin = exclude_min,
                excludemax = exclude_max
            ):
                yield pair

    def __boundary(self, boundary, descending):
        if boundary is undefined:
            return None
        return Descending(boundary) if descending else boundary

    @overrides(Index.min_key)
    def min_key(self, exclude_none = False):
        if exclude_none:
            for key in list(self.keys()):
                if key is not None:
                    return key
        else:
            return self.__items.minKey()

    @overrides(Index.max_key)
    def max_key(self):
        return self.__descending_items.minKey().value

    @overrides(Index.__len__)
    def __len__(self):
        return len(self.__items)

    @overrides(Index.__bool__)
    def __bool__(self):
        return bool(self.__items)

    @overrides(Index.__contains__)
    def __contains__(self, key):
        return key in self.__items

    def __getitem__(self, key):
        """Get the value for the specified key.

        :param key: The key to retrieve the value for.

        :return: The value for the specified key.

        :raise KeyError: Raised if the indicated key isn't present in the
            index.
        """
        if isinstance(key, slice):
            raise ValueError(
                "Slicing an index is not supported; use keys()/values() "
                "instead")
        else:
            return self.__items[key]

    def get(self, key, default = None):
        """Get the value for the specified key, returning `default` if the key
        is undefined.

        :param key: The key to retrieve the value for.
        :param default: The value that should be returned if the key is not
            defined by the index.

        :return: The value for the specified key.
        """
        return self.__items.get(key, default)
Exemple #10
0
class _Records(object):
    """The records stored in the registry. This implements dict-like access
    to records, where as the Registry object implements dict-like read-only
    access to values.
    """
    __parent__ = None

    # Similar to zope.schema._field._isdotted, but allows up to one '/'
    _validkey = re.compile(
        r"([a-zA-Z][a-zA-Z0-9_-]*)"
        r"([.][a-zA-Z][a-zA-Z0-9_-]*)*"
        r"([/][a-zA-Z][a-zA-Z0-9_-]*)?"
        r"([.][a-zA-Z][a-zA-Z0-9_-]*)*"
        # use the whole line
        r"$").match

    def __init__(self, parent):
        self.__parent__ = parent

        self._fields = OOBTree()
        self._values = OOBTree()

    def __setitem__(self, name, record):
        if not self._validkey(name):
            raise InvalidRegistryKey(record)
        if not IRecord.providedBy(record):
            raise ValueError("Value must be a record")

        self._setField(name, record.field)
        self._values[name] = record.value

        record.__name__ = name
        record.__parent__ = self.__parent__

        notify(RecordAddedEvent(record))

    def __delitem__(self, name):
        record = self[name]

        # unbind the record so that it won't attempt to look up values from
        # the registry anymore
        record.__parent__ = None

        del self._fields[name]
        del self._values[name]

        notify(RecordRemovedEvent(record))

    def __getitem__(self, name):

        field = self._getField(name)
        value = self._values[name]

        record = Record(field, value, _validate=False)
        record.__name__ = name
        record.__parent__ = self.__parent__

        return record

    def get(self, name, default=None):
        try:
            return self[name]
        except KeyError:
            return default

    def __nonzero__(self):
        return self._values.__nonzero__()

    def __len__(self):
        return self._values.__len__()

    def __iter__(self):
        return self._values.__iter__()

    def has_key(self, name):
        return self._values.__contains__(name)

    def __contains__(self, name):
        return self._values.__contains__(name)

    def keys(self, min=None, max=None):
        return self._values.keys(min, max)

    def maxKey(self, key=None):
        return self._values.maxKey(key)

    def minKey(self, key=None):
        return self._values.minKey(key)

    def values(self, min=None, max=None):
        return [self[name] for name in self.keys(min, max)]

    def items(self, min=None, max=None):
        return [(name, self[name],) for name in self.keys(min, max)]

    def setdefault(self, key, value):
        if key not in self:
            self[key] = value
        return self[key]

    def clear(self):
        self._fields.clear()
        self._values.clear()

    # Helper methods

    def _getField(self, name):
        field = self._fields[name]

        # Handle field reference pointers
        if isinstance(field, basestring):
            recordName = field
            while isinstance(field, basestring):
                recordName = field
                field = self._fields[recordName]
            field = FieldRef(recordName, field)

        return field

    def _setField(self, name, field):
        if not IPersistentField.providedBy(field):
            raise ValueError("The record's field must be an IPersistentField.")
        if IFieldRef.providedBy(field):
            if field.recordName not in self._fields:
                raise ValueError(
                    "Field reference points to non-existent record"
                )
            self._fields[name] = field.recordName  # a pointer, of sorts
        else:
            field.__name__ = 'value'
            self._fields[name] = field
class EventIndex(SimpleItem):

    implements(IPluggableIndex)

    meta_type = "EventIndex"

    manage_options = (
        {'label': 'Settings',
         'action': 'manage_main'},
    )

    manage = manage_main = DTMLFile('www/manageEventIndex', globals())
    manage_main._setName('manage_main')

    def __init__(self, id, extra=None, caller=None):
        self._id = id
        self.start_attr = extra and extra['start_attr'] or 'start'
        self.end_attr = extra and extra['end_attr'] or 'end'
        self.recurrence_attr = extra and extra['recurrence_attr'] or 'recurrence'
        self.clear()

    def clear(self):
        """Empty the index"""
        self._length = Length()
        self._end2uid = OOBTree()
        self._start2uid = OOBTree()
        self._uid2end = IOBTree()  # Contains the index used in _end2uid
        self._uid2duration = IOBTree()  # Contains the duration
        self._uid2start = IOBTree()
        self._uid2recurrence = IOBTree()

    def getId(self):
        """Return Id of index."""
        return self._id

    def getEntryForObject(self, documentId, default=''):
        """Get all information contained for 'documentId'."""
        uid2start = self._uid2start.get(documentId)
        if uid2start:
            return {
                'start': uid2start,
                'end': self._uid2end[documentId],
                'recurrence': self._uid2recurrence[documentId],
                'duration': self._uid2duration[documentId]
            }
        else:
            return default

    def getIndexSourceNames(self):
        """Get a sequence of attribute names that are indexed by the index.
        """
        return self.start_attr, self.end_attr, self.recurrence_attr

    def _getattr(self, name, obj):
        attr = getattr(obj, name, None)
        if callable(attr):
            attr = attr()

        if isinstance(attr, DateTime):
            attr = attr.utcdatetime()
        return attr

    def index_object(self, documentId, obj, threshold=None):
        """Index an object.

        - ``documentId`` is the integer ID of the document.

        - ``obj`` is the object to be indexed.

        - ``threshold`` is the number of words to process between committing
          subtransactions.  If None, subtransactions are disabled.

        For each name in ``getIndexSourceNames``, try to get the named
        attribute from ``obj``.

        - If the object does not have the attribute, do not add it to the
          index for that name.

        - If the attribute is a callable, call it to get the value.  If
          calling it raises an AttributeError, do not add it to the index.
          for that name.
        """
        # Clear the data structures before indexing the object. This will ensure
        # we don't leave any stale data behind when an object gets reindexed.
        self.unindex_object(documentId)

        ### 1. Get the values.
        start = self._getattr(self.start_attr, obj)
        end = self._getattr(self.end_attr, obj)
        if start is None:
            # Ignore calls if the obj does not have the start field.
            return False

        if end is None:
            # Singular event
            end = start

        recurrence = self._getattr(self.recurrence_attr, obj)
        if not recurrence:
            rule = None
        elif isinstance(recurrence, basestring):
            # XXX trap and log errors
            rule = rrule.rrulestr(recurrence, dtstart=start)
        elif isinstance(recurrence, rrule.rrulebase):
            rule = recurrence
        else:
            #XXX Log error
            rule = None

        # Strip out times from the recurrence:
        if rule is not None:
            sync_timezone(rule, start.tzinfo)

        ### 2. Make them into what should be indexed.
        # XXX Naive events are not comparable to timezoned events, so we convert
        # everything to utctimetuple(). This means naive events are assumed to
        # be GMT, but we can live with that at the moment.
        start_value = start.utctimetuple()
        end_value = end.utctimetuple()

        # The end value should be the end of the recurrence, if any:
        if rule is not None:
            if is_open_ended(rule):
                # This recurrence is open ended
                end_value = None
            else:
                duration = end - start
                allrecs = [x for x in rule._iter()]
                if allrecs:
                    last = allrecs[-1] + duration
                else:
                    # Real data may have invalud recurrence rules,
                    # which end before the start for example.
                    # Then we end up here.
                    last = end
                end_value = last.utctimetuple()

        ### 3. Store everything in the indexes:
        row = self._start2uid.get(start_value, None)
        if row is None:
            row = IITreeSet((documentId,))
            self._start2uid[start_value] = row
        else:
            row.insert(documentId)

        row = self._end2uid.get(end_value, None)
        if row is None:
            row = IITreeSet((documentId,))
            self._end2uid[end_value] = row
        else:
            row.insert(documentId)

        self._uid2start[documentId] = start_value
        self._uid2recurrence[documentId] = rule
        self._uid2end[documentId] = end_value
        self._uid2duration[documentId] = end - start

        return True

    def _remove_id(self, documentId, from_uid, to_uid):
        """Remove documentId based on point.
        Helper method for unindex_object method.

        :param documentId: Integer
        :type documentId: int

        :param from_uid:
        :type to_uid:

        :param to_uid:
        :type to_uid:
        """
        fuid = from_uid.pop(documentId, 'No ID found')
        row = to_uid.get(fuid)
        if row is not None:
            if documentId in row:
                row.remove(documentId)
            if len(row) == 0:
                to_uid.pop(fuid, 'Not Found')

    def unindex_object(self, documentId):
        """Remove the documentId from the index."""
        self._remove_id(documentId, self._uid2start, self._start2uid)
        self._remove_id(documentId, self._uid2end, self._end2uid)
        self._uid2duration.pop(documentId, 'No ID found')
        self._uid2recurrence.pop(documentId, 'No ID found')

    def _get_position(self, request, position):
        """Get position from certain ID.

        :param request: Request
        :type request: object

        :param position: start or end
        :type position: str
        """
        pos = request[self._id].get(position)
        if isinstance(pos, DateTime):
            pos = pos.utcdatetime()
        return pos

    def _finalize_index(self, result, start, end, used_fields):
        filtered_result = IITreeSet()
        # used_recurrence = False

        for documentId in result:
            recurrence = self._uid2recurrence.get(documentId)
            if recurrence is None:
                # This event isn't recurring, so it's a match:
                filtered_result.add(documentId)
                continue

            # used_recurrence = True
            match = False
            # This is a possible place where optimizations can be done if
            # necessary. For example, for periods where the start and end
            # date is the same, we can first check if the start time and
            # and time of the date falls inbetween the start and end times
            # of the period, so to avoid expansion. But most likely this
            # will have a very small impact on speed, so I skip this until
            # it actually becomes a problem.

            if start is not None:
                event_start = datetime(*self._uid2start[documentId][:6])
            else:
                event_start = None
            if end is not None:
                event_duration = self._uid2duration[documentId]
                event_end = event_start + event_duration
            else:
                event_end = None

            for occurrence in recurrence._iter():
                utc_occurrence = datetime(*occurrence.utctimetuple()[:6])
                if event_start is not None and utc_occurrence < event_start:
                    # XXX we should add a counter and break after 10000 occurrences.
                    continue
                if event_end is not None and utc_occurrence > event_end:
                    break

                # The start of this occurrence starts between the start and end date of
                # the query:
                match = True
                break

            if match:
                filtered_result.add(documentId)
            # if used_recurrence:
            used_fields += (self.recurrence_attr,)
        return filtered_result, used_fields

    def _apply_index(self, request, resultset=None):
        """Apply the index to query parameters given in 'request'.

        The argument should be a mapping object.

        If the request does not contain the needed parameters, then
        None is returned.

        If the request contains a parameter with the name of the
        column and this parameter is either a Record or a class
        instance then it is assumed that the parameters of this index
        are passed as attribute (Note: this is the recommended way to
        pass parameters since Zope 2.4)

        Otherwise two objects are returned.  The first object is a
        ResultSet containing the record numbers of the matching
        records.  The second object is a tuple containing the names of
        all data fields used.

        The resultset argument contains the resultset, as already calculated by
        ZCatalog's search method.
        """
        if not request.has_key(self._id):  # 'in' doesn't work with this object
            return IITreeSet(self._uid2end.keys()), ()

        start = self._get_position(request, 'start')
        end = self._get_position(request, 'end')

        used_fields = ()

        # We don't want the events who end before the start. In other
        # words we want to find those evens whose end >= the start query,
        # or None as None means they have infinite recurrence.
        try:
            maxkey = self._end2uid.maxKey()
        except ValueError:  # No events at all
            return IITreeSet(), used_fields

        if start is None or maxkey is None:
            # start is None, so we need to search right from the start; or
            # (amazingly) all events have infinite recurrence.
            # This means we must return *all* uids.
            start_uids = IITreeSet(self._uid2end.keys())
        else:
            used_fields += (self.start_attr,)
            #start_uids = IITreeSet()
            start = start.utctimetuple()
            try:
                minkey = self._end2uid.minKey(start)
                # Events that end on exactly the same same time as the
                # search period start should not be included:
                if minkey == start:
                    excludemin = True
                else:
                    excludemin = False

                start_uids = multiunion(self._end2uid.values(minkey, maxkey, excludemin=excludemin))

            except ValueError:
                # No ending events
                start_uids = IITreeSet()

            # Include open ended events, if any
            if self._end2uid.has_key(None):
                start_uids = union(start_uids, self._end2uid[None])

        # XXX At this point an intersection with the resultset might be
        # beneficial. It would stop us from calculating the recurrence
        # of ids that won't be returned. It could be done after the
        # intersection with end_uids below as well, performance tests will tell.

        # We also do not want the events whose start come after the end query.
        # In other words, we find all events where start <= end.
        if end is not None:
            end = end.utctimetuple()
            try:
                minkey = self._start2uid.minKey()
                end_uids = multiunion(self._start2uid.values(minkey, end))
                used_fields += (self.end_attr,)

            except ValueError:
                # No events
                return IITreeSet(), used_fields

            result = intersection(start_uids, end_uids)
        else:
            # No end specified, take all:
            result = start_uids

        return self._finalize_index(result, start, end, used_fields)

    def numObjects(self):
        """Return the number of indexed objects."""
        return len(self._uid2start.keys())
Exemple #12
0
class fsIndex(object):

    def __init__(self):
        self._data = OOBTree()

    def __getitem__(self, key):
        return str2num(self._data[key[:6]][key[6:]])

    def get(self, key, default=None):
        tree = self._data.get(key[:6], default)
        if tree is default:
            return default
        v = tree.get(key[6:], default)
        if v is default:
            return default
        return str2num(v)

    def __setitem__(self, key, value):
        value = num2str(value)
        treekey = key[:6]
        tree = self._data.get(treekey)
        if tree is None:
            tree = fsBucket()
            self._data[treekey] = tree
        tree[key[6:]] = value

    def __delitem__(self, key):
        treekey = key[:6]
        tree = self._data.get(treekey)
        if tree is None:
            raise KeyError, key
        del tree[key[6:]]
        if not tree:
            del self._data[treekey]

    def __len__(self):
        r = 0
        for tree in self._data.itervalues():
            r += len(tree)
        return r

    def update(self, mapping):
        for k, v in mapping.items():
            self[k] = v

    def has_key(self, key):
        v = self.get(key, self)
        return v is not self

    def __contains__(self, key):
        tree = self._data.get(key[:6])
        if tree is None:
            return False
        v = tree.get(key[6:], None)
        if v is None:
            return False
        return True

    def clear(self):
        self._data.clear()

    def __iter__(self):
        for prefix, tree in self._data.iteritems():
            for suffix in tree:
                yield prefix + suffix

    iterkeys = __iter__

    def keys(self):
        return list(self.iterkeys())

    def iteritems(self):
        for prefix, tree in self._data.iteritems():
            for suffix, value in tree.iteritems():
                yield (prefix + suffix, str2num(value))

    def items(self):
        return list(self.iteritems())

    def itervalues(self):
        for tree in self._data.itervalues():
            for value in tree.itervalues():
                yield str2num(value)

    def values(self):
        return list(self.itervalues())

    # Comment below applies for the following minKey and maxKey methods
    #
    # Obscure:  what if `tree` is actually empty?  We're relying here on
    # that this class doesn't implement __delitem__:  once a key gets
    # into an fsIndex, the only way it can go away is by invoking
    # clear().  Therefore nothing in _data.values() is ever empty.
    #
    # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are
    # very efficient.

    def minKey(self, key=None):
        if key is None:
            smallest_prefix = self._data.minKey()
        else:
            smallest_prefix = self._data.minKey(key[:6])

        tree = self._data[smallest_prefix]

        assert tree

        if key is None:
            smallest_suffix = tree.minKey()
        else:
            try:
                smallest_suffix = tree.minKey(key[6:])
            except ValueError: # 'empty tree' (no suffix >= arg)
                next_prefix = prefix_plus_one(smallest_prefix)
                smallest_prefix = self._data.minKey(next_prefix)
                tree = self._data[smallest_prefix]
                assert tree
                smallest_suffix = tree.minKey()

        return smallest_prefix + smallest_suffix

    def maxKey(self, key=None):
        if key is None:
            biggest_prefix = self._data.maxKey()
        else:
            biggest_prefix = self._data.maxKey(key[:6])

        tree = self._data[biggest_prefix]

        assert tree

        if key is None:
            biggest_suffix = tree.maxKey()
        else:
            try:
                biggest_suffix = tree.maxKey(key[6:])
            except ValueError: # 'empty tree' (no suffix <= arg)
                next_prefix = prefix_minus_one(biggest_prefix)
                biggest_prefix = self._data.maxKey(next_prefix)
                tree = self._data[biggest_prefix]
                assert tree
                biggest_suffix = tree.maxKey()

        return biggest_prefix + biggest_suffix
Exemple #13
0
class fsIndex(object):

    def __init__(self):
        self._data = OOBTree()

    def __getitem__(self, key):
        return str2num(self._data[key[:6]][key[6:]])

    def get(self, key, default=None):
        tree = self._data.get(key[:6], default)
        if tree is default:
            return default
        v = tree.get(key[6:], default)
        if v is default:
            return default
        return str2num(v)

    def __setitem__(self, key, value):
        value = num2str(value)
        treekey = key[:6]
        tree = self._data.get(treekey)
        if tree is None:
            tree = fsBucket()
            self._data[treekey] = tree
        tree[key[6:]] = value

    def __len__(self):
        r = 0
        for tree in self._data.itervalues():
            r += len(tree)
        return r

    def update(self, mapping):
        for k, v in mapping.items():
            self[k] = v

    def has_key(self, key):
        v = self.get(key, self)
        return v is not self

    def __contains__(self, key):
        tree = self._data.get(key[:6])
        if tree is None:
            return False
        v = tree.get(key[6:], None)
        if v is None:
            return False
        return True

    def clear(self):
        self._data.clear()

    def __iter__(self):
        for prefix, tree in self._data.iteritems():
            for suffix in tree:
                yield prefix + suffix

    iterkeys = __iter__

    def keys(self):
        return list(self.iterkeys())

    def iteritems(self):
        for prefix, tree in self._data.iteritems():
            for suffix, value in tree.iteritems():
                yield (prefix + suffix, str2num(value))

    def items(self):
        return list(self.iteritems())

    def itervalues(self):
        for tree in self._data.itervalues():
            for value in tree.itervalues():
                yield str2num(value)

    def values(self):
        return list(self.itervalues())

    # Comment below applies for the following minKey and maxKey methods
    #
    # Obscure:  what if `tree` is actually empty?  We're relying here on
    # that this class doesn't implement __delitem__:  once a key gets
    # into an fsIndex, the only way it can go away is by invoking
    # clear().  Therefore nothing in _data.values() is ever empty.
    #
    # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are
    # very efficient.

    def minKey(self, key=None):
        if key is None:
            smallest_prefix = self._data.minKey()
        else:
            smallest_prefix = self._data.minKey(key[:6])

        tree = self._data[smallest_prefix]

        assert tree

        if key is None:
            smallest_suffix = tree.minKey()
        else:
            try:
                smallest_suffix = tree.minKey(key[6:])
            except ValueError: # 'empty tree' (no suffix >= arg)
                next_prefix = prefix_plus_one(smallest_prefix)
                smallest_prefix = self._data.minKey(next_prefix)
                tree = self._data[smallest_prefix]
                assert tree
                smallest_suffix = tree.minKey()

        return smallest_prefix + smallest_suffix

    def maxKey(self, key=None):
        if key is None:
            biggest_prefix = self._data.maxKey()
        else:
            biggest_prefix = self._data.maxKey(key[:6])

        tree = self._data[biggest_prefix]

        assert tree

        if key is None:
            biggest_suffix = tree.maxKey()
        else:
            try:
                biggest_suffix = tree.maxKey(key[6:])
            except ValueError: # 'empty tree' (no suffix <= arg)
                next_prefix = prefix_minus_one(biggest_prefix)
                biggest_prefix = self._data.maxKey(next_prefix)
                tree = self._data[biggest_prefix]
                assert tree
                biggest_suffix = tree.maxKey()

        return biggest_prefix + biggest_suffix