Example #1
0
 def __init__(self, hostname, description='', connection=None):
     self.hostname = hostname
     self.description = description
     self.connection = connection
     self.jobs = LOBTree()
     self.archives = OOBTree()
     self.job_configs = PersistentList()
     data_root().clients[hostname] = self
Example #2
0
    def clear(self):
        self.tagsmap = LOBTree()

        # index
        self.tag_oids = LOBTree()
        self.oid_tags = LOBTree()

        # tag weight
        self.weights = OOSet()
        self.tag_weight = LFBTree()
Example #3
0
    def __init__(self, id="++conversation++default"):
        self.id = id

        # username -> count of comments; key is removed when count reaches 0
        self._commentators = OIBTree()

        # id -> comment - find comment by id
        self._comments = LOBTree()

        # id -> LLSet (children) - find all children for a given comment.
        # 0 signifies root.
        self._children = LOBTree()
    def __init__(self, id="++conversation++default"):
        self.id = id

        # username -> count of comments; key is removed when count reaches 0
        self._commentators = OIBTree()

        # id -> comment - find comment by id
        self._comments = LOBTree()

        # id -> LLSet (children) - find all children for a given comment.
        # 0 signifies root.
        self._children = LOBTree()
class CompassTool(SimpleItem):

    # XXX: all time values here should be stored as UTC,
    # and converted back into the proper "local" timezone
    # (which might differ per request) upon extraction from the tool.
    # But Zope and Plone support for all this must be investigated.

    def __init__(self):
        self.data = LOBTree()

    def __getitem__(self, tstamp):
        tstamp = long(tstamp)
        return self.data[tstamp]

    def __contains__(self, tstamp):
        return long(tstamp) in self.data

    def __len__(self):
        return len(self.data.keys())

    def keys(self, start, step, descending=True):
        # WARNING: I'm totally relying on the output of keys() to be sorted,
        # which is the case, but I couldn't find any formal guarantee
        raw_keys = self.data.keys()
        slice_ = []
        if descending:
            if start == 0:
                slice_ = raw_keys[-(start + step):]
            else:
                slice_ = raw_keys[-(start + step):-(start)]
            slice_ = [i for i in slice_]
            slice_.reverse()
        else:
            slice_ = raw_keys[start:start + step]
        return [k for k in slice_]

    def max_key(self):
        try:
            return self.data.maxKey()
        except ValueError:
            return None

    def add(self, data):
        now = long(time.time())
        self.data[now] = data
        return now

    def remove(self, tstamp):
        tstamp = long(tstamp)
        if tstamp in self.data:
            del self.data[tstamp]
class CompassTool(SimpleItem):

    # XXX: all time values here should be stored as UTC,
    # and converted back into the proper "local" timezone
    # (which might differ per request) upon extraction from the tool.
    # But Zope and Plone support for all this must be investigated.

    def __init__(self):
        self.data = LOBTree()

    def __getitem__(self, tstamp):
        tstamp = long(tstamp)
        return self.data[tstamp]

    def __contains__(self, tstamp):
        return long(tstamp) in self.data

    def __len__(self):
        return len(self.data.keys())

    def keys(self, start, step, descending=True):
        # WARNING: I'm totally relying on the output of keys() to be sorted,
        # which is the case, but I couldn't find any formal guarantee
        raw_keys = self.data.keys()
        slice_ = []
        if descending:
            if start == 0:
                slice_ = raw_keys[-(start + step) :]
            else:
                slice_ = raw_keys[-(start + step) : -(start)]
            slice_ = [i for i in slice_]
            slice_.reverse()
        else:
            slice_ = raw_keys[start : start + step]
        return [k for k in slice_]

    def max_key(self):
        try:
            return self.data.maxKey()
        except ValueError:
            return None

    def add(self, data):
        now = long(time.time())
        self.data[now] = data
        return now

    def remove(self, tstamp):
        tstamp = long(tstamp)
        if tstamp in self.data:
            del self.data[tstamp]
Example #7
0
 def __init__(self,
              name,
              url,
              description='',
              repository_id='',
              remote_borg='borg'):
     self.name = name
     self.url = url
     self.description = description
     self.repository_id = repository_id
     self.remote_borg = remote_borg
     self.jobs = LOBTree()
     self.archives = OOBTree()
     self.job_configs = PersistentList()
Example #8
0
class Client(Evolvable):
    version = 2

    @evolve(1, 2)
    def add_job_configs(self):
        self.job_configs = PersistentList()

    def __init__(self, hostname, description='', connection=None):
        self.hostname = hostname
        self.description = description
        self.connection = connection
        self.jobs = LOBTree()
        self.archives = OOBTree()
        self.job_configs = PersistentList()
        data_root().clients[hostname] = self

    def latest_job(self):
        try:
            return self.jobs[self.jobs.maxKey()]
        except ValueError:
            return

    class Form(forms.Form):
        hostname = forms.CharField(validators=[slug_validator])
        description = forms.CharField(widget=forms.Textarea,
                                      required=False,
                                      initial='')
Example #9
0
    def __init__(self, database):
        self.db = database

        if "users" not in self.db.db_root:
            self.db.db_root["users"] = LOBTree()

        self.users = self.db.db_root["users"]
Example #10
0
class Tasks(BaseFolder):
    """ Container for tasks. """
    implements(ITasks)
    allowed_contexts = () #Not manually addable
    content_type = 'Tasks'
    display_name = _(u"Tasks")
    custom_accessors = {'title':'get_title'}
    

    def __init__(self, data=None, **kwargs):
        super(Tasks, self).__init__(data=data, **kwargs)
        self.__task_ids__ = LOBTree()

    def get_title(self, default='', key=None):
        return self.display_name
    
    def mark_task_id(self):
        if len(self.__task_ids__) == 0:
            id = 1 #Start at 1
        else:
            id = self.__task_ids__.maxKey()+1

        suggest_name = unicode(id)
        self.add_task_id(id, suggest_name)
        return suggest_name
    
    def add_task_id(self, id, value):
        if id in self.__task_ids__:
            raise ValueError("id %s already exist in %s" % (id, self))
        self.__task_ids__[id] = value
Example #11
0
 def log_storage(self):
     try:
         return self.context.__log_storage__
     except AttributeError:
         #For speed
         self.context.__log_storage__ = LOBTree()
         return self.context.__log_storage__
Example #12
0
    def __init__(self, database):
        self.db = database

        if "games" not in self.db.db_root:
            self.db.db_root["games"] = LOBTree()

        self.games = self.db.db_root["games"]
        self.user_manager = UserManager(database)
    def _initStorage(self, clear=False):
        inited = base_hasattr(self, '_inputStorage') and \
                 base_hasattr(self, '_inputItems') and \
                 base_hasattr(self, '_length')

        if not inited or clear:
            self._inputStorage = LOBTree()
            self._inputItems = 0
            self._length = Length()
Example #14
0
 def __init__(self, chunk_size=None, max_chunk_size=None):
     """
     chunk_size (int, None)
         If non-None, aggregate consecutive writes up to this size.
         Overlaping or larger writes may exceed this size, though.
     max_chunk_size (int, None)
         If non-None, prevent chunks from exceeding this size.
     """
     self._tree = LOBTree()
     self.chunk_size = chunk_size
     self.max_chunk_size = max_chunk_size
class UserJoinAdapter(FormActionAdapter):
    """A form action adapter that will save form input data and
       keep them until Manager use this data to create new site's members"""

    meta_type      = 'UserJoinAdapter'
    portal_type    = 'UserJoinAdapter'
    security       = ClassSecurityInfo()

    schema = FormAdapterSchema.copy() + UserJoinAdapterSchema.copy()

    security.declarePrivate('pfgFieldVocabulary')
    def pfgFieldVocabulary(self):
        return atapi.DisplayList(
            [['', '']] +  [[field.getName(), field.widget.label] for field in self.fgFields()])

    security.declarePrivate('userFieldVocabulary')
    def userFieldVocabulary(self):
        portal_memberdata = getToolByName(self, 'portal_memberdata')
        return atapi.DisplayList(
            [['', '']] + [[x,x] for x in portal_memberdata.propertyIds() if x not in config.RESERVED_PROPS])

    def _initStorage(self, clear=False):
        inited = base_hasattr(self, '_inputStorage') and \
                 base_hasattr(self, '_inputItems') and \
                 base_hasattr(self, '_length')

        if not inited or clear:
            self._inputStorage = LOBTree()
            self._inputItems = 0
            self._length = Length()

    def _addDataRow(self, value):
        # Stolen from saveDataDapter
        self._initStorage()
        id = int(time.time() * 1000)
        self._inputStorage[id] = value
        self._length.change(1)
        return id

    def onSuccess(self, fields, REQUEST=None):
        """Store data in the inner registry"""
        data = {}
        userid_provided = REQUEST.form.get(self.getUseridField())
        if userid_provided:
            rtool = getToolByName(self, 'portal_registration')
            if api.user.get(username=userid_provided) is not None:
                return {self.getUseridField(): _('username_already_taken_error',
                                                 default=u'The username is already in use')}
            if userid_provided=='Anonymous User' or \
                    not rtool._ALLOWED_MEMBER_ID_PATTERN.match(userid_provided):
                return {self.getUseridField(): _('username_invalid_error',
                                                 default=u'The username is invalid')}            
            # userid already stored in the registry
            for v in self._inputStorage.values():
                if userid_provided==v.get(self.getUseridField()):
                    return {self.getUseridField(): _('username_already_taken_error',
                                                     default=u'The username is already in use')}                    
        for field in fields:
            # we do not handle files for now
            if field.isFileField() or field.isLabel():
                continue
            val = REQUEST.form.get(field.fgField.getName(), '')
            if not type(val) in StringTypes:
                # Zope has marshalled the field into
                # something other than a string
                val = str(val)
            data[field.fgField.getName()] = val
            data['__timestamp__'] = DateTime()
        id = self._addDataRow(data)
        REQUEST['pfguserjoin_obj'] = self
        REQUEST['pfguserjoin_newid'] = id
Example #16
0
 def _rolesdata(self):
     return LOBTree()
Example #17
0
 def __init__(self, username, created):
     self.data = LOBTree()
     self.username = username
     self.created = created
Example #18
0
    def _storage(self, create_if_missing=False):
        ann = IAnnotations(self.context)
        if self.ANNOTATION_KEY not in ann.keys() and create_if_missing:
            ann[self.ANNOTATION_KEY] = LOBTree()

        return ann.get(self.ANNOTATION_KEY, None)
Example #19
0
 def __init__(self, username, created=None):
     self.data = LOBTree()
     self.username = username  # not inbox owner but other user
     if created is None:
         created = datetime.now(pytz.utc)
     self.created = created
class Conversation(Traversable, Persistent, Explicit):
    """A conversation is a container for all comments on a content object.

    It manages internal data structures for comment threading and efficient
    comment lookup.
    """

    implements(IConversation)

    __allow_access_to_unprotected_subobjects__ = True

    def __init__(self, id="++conversation++default"):
        self.id = id

        # username -> count of comments; key is removed when count reaches 0
        self._commentators = OIBTree()

        # id -> comment - find comment by id
        self._comments = LOBTree()

        # id -> LLSet (children) - find all children for a given comment.
        # 0 signifies root.
        self._children = LOBTree()

    def getId(self):
        """Get the id of the conversation. This is used to construct a
        URL.
        """
        return self.id

    def enabled(self):
        parent = aq_inner(self.__parent__)
        return parent.restrictedTraverse('@@conversation_view').enabled()

    @property
    def total_comments(self):
        return len(self._comments)

    @property
    def last_comment_date(self):
        try:
            return self._comments[self._comments.maxKey()].creation_date
        except (ValueError, KeyError, AttributeError,):
            return None

    @property
    def commentators(self):
        return self._commentators

    def objectIds(self):
        return self._comments.keys()

    def getComments(self, start=0, size=None):
        """Get unthreaded comments
        """
        count = 0l
        for comment in self._comments.values(min=start):
            # Yield the acquisition wrapped comment
            yield self[comment.id]

            count += 1
            if size and count > size:
                return

    def getThreads(self, start=0, size=None, root=0, depth=None):
        """Get threaded comments
        """

        def recurse(comment_id, d=0):
            # Yield the current comment before we look for its children
            yield {'id': comment_id, 'comment': self[comment_id], 'depth': d}

            # Recurse if there are children and we are not out of our depth
            if depth is None or d + 1 < depth:
                children = self._children.get(comment_id, None)
                if children is not None:
                    for child_id in children:
                        for value in recurse(child_id, d+1):
                            yield value

        # Find top level threads
        comments = self._children.get(root, None)
        if comments is not None:
            count = 0l
            for comment_id in comments.keys(min=start):

                # Abort if we have found all the threads we want
                count += 1
                if size and count > size:
                    return

                # Let the closure recurse
                for value in recurse(comment_id):
                    yield value

    def addComment(self, comment):
        """Add a new comment. The parent id should have been set already. The
        comment id may be modified to find a free key. The id used will be
        returned.
        """

        # Make sure we don't have a wrapped object

        comment = aq_base(comment)

        id = long(time.time() * 1e6)
        while id in self._comments:
            id += 1

        comment.comment_id = id
        notify(ObjectWillBeAddedEvent(comment, self, id))
        self._comments[id] = comment

        comment.__parent__ = aq_base(self)

        # Record unique users who've commented (for logged in users only)
        commentator = comment.author_username
        if commentator:
            if not commentator in self._commentators:
                self._commentators[commentator] = 0
            self._commentators[commentator] += 1

        reply_to = comment.in_reply_to
        if not reply_to:
            # top level comments are in reply to the faux id 0
            comment.in_reply_to = reply_to = 0

        if not reply_to in self._children:
            self._children[reply_to] = LLSet()
        self._children[reply_to].insert(id)

        # Add the annotation if not already done
        annotions = IAnnotations(self.__parent__)
        if not ANNOTATION_KEY in annotions:
            annotions[ANNOTATION_KEY] = aq_base(self)

        # Notify that the object is added. The object must here be
        # acquisition wrapped or the indexing will fail.
        notify(ObjectCreatedEvent(comment))
        notify(ObjectAddedEvent(comment.__of__(self), self, id))
        notify(ContainerModifiedEvent(self))

        return id

    # Dict API

    def __len__(self):
        return len(self._comments)

    def __contains__(self, key):
        return long(key) in self._comments

    def __getitem__(self, key):
        """Get an item by its long key
        """
        return self._comments[long(key)].__of__(self)

    def __delitem__(self, key, suppress_container_modified=False):
        """Delete an item by its long key
        """

        key = long(key)

        comment = self[key].__of__(self)
        commentator = comment.author_username

        notify(ObjectWillBeRemovedEvent(comment, self, key))

        # Remove all children
        for child_id in self._children.get(key, []):
            # avoid sending ContainerModifiedEvent multiple times
            self.__delitem__(child_id, suppress_container_modified=True)

        # Remove the comment from _comments
        self._comments.pop(key)

        # Remove this comment as a child of its parent
        if not suppress_container_modified:
            parent = comment.in_reply_to
            if parent is not None:
                parent_children = self._children.get(parent, None)
                if parent_children is not None and key in parent_children:
                    parent_children.remove(key)

        # Remove commentators
        if commentator and commentator in self._commentators:
            if self._commentators[commentator] <= 1:
                del self._commentators[commentator]
            else:
                self._commentators[commentator] -= 1

        notify(ObjectRemovedEvent(comment, self, key))

        if not suppress_container_modified:
            notify(ContainerModifiedEvent(self))

    def __iter__(self):
        return iter(self._comments)

    def get(self, key, default=None):
        comment = self._comments.get(long(key), default)
        if comment is default:
            return default
        return comment.__of__(self)

    def keys(self):
        return self._comments.keys()

    def items(self):
        return [(i[0], i[1].__of__(self),) for i in self._comments.items()]

    def values(self):
        return [v.__of__(self) for v in self._comments.values()]

    def iterkeys(self):
        return self._comments.iterkeys()

    def itervalues(self):
        for v in self._comments.itervalues():
            yield v.__of__(self)

    def iteritems(self):
        for k, v in self._comments.iteritems():
            yield (k, v.__of__(self),)
 def __init__(self):
     self.data = LOBTree()
Example #22
0
 def initIndexTree(self):
     """
   Initialize the Index Tree
 """
     self._long_index_tree = LOBTree()
Example #23
0
class DataBucketStream(Document):
    """
  Represents data stored in many small files inside a "stream".
  Each file is "addressed" by its key similar to dict.
  """

    meta_type = 'ERP5 Data Bucket Stream'
    portal_type = 'Data Bucket Stream'
    add_permission = Permissions.AddPortalContent

    # Declarative security
    security = ClassSecurityInfo()
    security.declareObjectProtected(Permissions.AccessContentsInformation)

    # Declarative properties
    property_sheets = (PropertySheet.CategoryCore, PropertySheet.SortIndex)

    def __init__(self, id, **kw):
        self.initBucketTree()
        self.initIndexTree()
        Document.__init__(self, id, **kw)

    def __len__(self):
        return len(self._tree)

    def initBucketTree(self):
        """
      Initialize the Bucket Tree
    """
        self._tree = OOBTree()

    def initIndexTree(self):
        """
      Initialize the Index Tree
    """
        self._long_index_tree = LOBTree()

    def getMaxKey(self, key=None):
        """
    Return the maximum key
    """
        try:
            return self._tree.maxKey(key)
        except ValueError:
            return None

    def getMaxIndex(self, index=None):
        """
    Return the maximum index
    """
        try:
            return self._long_index_tree.maxKey(index)
        except ValueError:
            return None

    def getMinKey(self, key=None):
        """
    Return the minimum key
    """
        try:
            return self._tree.minKey(key)
        except ValueError:
            return None

    def getMinIndex(self, index=None):
        """
    Return the minimum key
    """
        try:
            return self._long_index_tree.minKey(index)
        except ValueError:
            return None

    def _getOb(self, id, *args, **kw):
        return None

    def getBucketByKey(self, key=None):
        """
      Get one bucket
    """
        return self._tree[key].value

    def getBucketByIndex(self, index=None):
        """
      Get one bucket
    """
        key = self._long_index_tree[index]
        return self.getBucketByKey(key).value

    def getBucket(self, key):
        log('DeprecationWarning: Please use getBucketByKey')
        return self.getBucketByKey(key)

    def hasBucketKey(self, key):
        """
      Wether bucket with such key exists
    """
        return key in self._tree

    def hasBucketIndex(self, index):
        """
      Wether bucket with such index exists
    """
        return self._long_index_tree.has_key(index)

    def insertBucket(self, key, value):
        """
      Insert one bucket
    """
        try:
            count = self._long_index_tree.maxKey() + 1
        except ValueError:
            count = 0
        except AttributeError:
            pass
        try:
            self._long_index_tree.insert(count, key)
        except AttributeError:
            pass
        value = PersistentString(value)
        is_new_key = self._tree.insert(key, value)
        if not is_new_key:
            self.log("Reingestion of same key")
            self._tree[key] = value

    def getBucketKeySequenceByKey(self,
                                  start_key=None,
                                  stop_key=None,
                                  count=None,
                                  exclude_start_key=False,
                                  exclude_stop_key=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._tree.keys(min=start_key,
                                   max=stop_key,
                                   excludemin=exclude_start_key,
                                   excludemax=exclude_stop_key)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketKeySequenceByIndex(self,
                                    start_index=None,
                                    stop_index=None,
                                    count=None,
                                    exclude_start_index=False,
                                    exclude_stop_index=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._long_index_tree.values(min=start_index,
                                                max=stop_index,
                                                excludemin=exclude_start_index,
                                                excludemax=exclude_stop_index)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketKeySequence(self, start_key=None, count=None):
        log('DeprecationWarning: Please use getBucketKeySequenceByKey')
        return self.getBucketKeySequenceByKey(start_key=start_key, count=count)

    def getBucketIndexKeySequenceByIndex(self,
                                         start_index=None,
                                         stop_index=None,
                                         count=None,
                                         exclude_start_index=False,
                                         exclude_stop_index=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._long_index_tree.items(min=start_index,
                                               max=stop_index,
                                               excludemin=exclude_start_index,
                                               excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexKeySequence(self, sequence)

    def getBucketIndexSequenceByIndex(self,
                                      start_index=None,
                                      stop_index=None,
                                      count=None,
                                      exclude_start_index=False,
                                      exclude_stop_index=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._long_index_tree.keys(min=start_index,
                                              max=stop_index,
                                              excludemin=exclude_start_index,
                                              excludemax=exclude_stop_index)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketValueSequenceByKey(self,
                                    start_key=None,
                                    stop_key=None,
                                    count=None,
                                    exclude_start_key=False,
                                    exclude_stop_key=False):
        """
      Get a lazy sequence of bucket values
    """
        sequence = self._tree.values(min=start_key,
                                     max=stop_key,
                                     excludemin=exclude_start_key,
                                     excludemax=exclude_stop_key)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketValueSequenceByIndex(self,
                                      start_index=None,
                                      stop_index=None,
                                      count=None,
                                      exclude_start_index=False,
                                      exclude_stop_index=False):
        """
      Get a lazy sequence of bucket values
    """
        sequence = self._long_index_tree.values(min=start_index,
                                                max=stop_index,
                                                excludemin=exclude_start_index,
                                                excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexValueSequence(self, sequence)

    def getBucketValueSequence(self, start_key=None, count=None):
        log('DeprecationWarning: Please use getBucketValueSequenceByKey')
        return self.getBucketValueSequenceByKey(start_key=start_key,
                                                count=count)

    def getBucketKeyItemSequenceByKey(self,
                                      start_key=None,
                                      stop_key=None,
                                      count=None,
                                      exclude_start_key=False,
                                      exclude_stop_key=False):
        """
      Get a lazy sequence of bucket items
    """
        sequence = self._tree.items(min=start_key,
                                    max=stop_key,
                                    excludemin=exclude_start_key,
                                    excludemax=exclude_stop_key)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketItemSequence(self,
                              start_key=None,
                              count=None,
                              exclude_start_key=False):
        log('DeprecationWarning: Please use getBucketKeyItemSequenceByKey')
        return self.getBucketKeyItemSequenceByKey(
            start_key=start_key,
            count=count,
            exclude_start_key=exclude_start_key)

    def getBucketIndexItemSequenceByIndex(self,
                                          start_index=None,
                                          stop_index=None,
                                          count=None,
                                          exclude_start_index=False,
                                          exclude_stop_index=False):
        """
      Get a lazy sequence of bucket items
    """
        sequence = self._long_index_tree.items(min=start_index,
                                               max=stop_index,
                                               excludemin=exclude_start_index,
                                               excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexItemSequence(self, sequence)

    def getBucketIndexKeyItemSequenceByIndex(self,
                                             start_index=None,
                                             stop_index=None,
                                             count=None,
                                             exclude_start_index=False,
                                             exclude_stop_index=False):
        """
      Get a lazy sequence of bucket items
    """
        sequence = self._long_index_tree.items(min=start_index,
                                               max=stop_index,
                                               excludemin=exclude_start_index,
                                               excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexKeyItemSequence(self, sequence)

    def getItemList(self):
        """
      Return a list of all key, value pairs
    """
        return [item for item in self._tree.items()]

    def getKeyList(self):
        """
      Return a list of all keys
    """
        return [key for key in self._tree.keys()]

    def getIndexList(self):
        """
      Return a list of all indexes
    """
        return [key for key in self._long_index_tree.keys()]

    def getIndexKeyTupleList(self):
        """
      Return a list of all indexes
    """
        return [key for key in self._long_index_tree.items()]

    def getMd5sum(self, key):
        """
      Get hexdigest of bucket.
    """
        h = hashlib.md5()
        h.update(self.getBucketByKey(key))
        return h.hexdigest()

    def delBucketByKey(self, key):
        """
      Remove the bucket.
    """
        del self._tree[key]
        for index, my_key in list(self.getBucketIndexKeySequenceByIndex()):
            if my_key == key:
                del self._long_index_tree[index]

    def delBucketByIndex(self, index):
        """
      Remove the bucket.
    """
        key = self._long_index_tree[index]
        del self._tree[key]
        del self._long_index_tree[index]

    def rebuildIndexTreeByKeyOrder(self):
        """
        Clear and rebuild the index tree by order of keys
    """
        self.initIndexTree()
        for count, key in enumerate(self.getBucketKeySequenceByKey()):
            self._long_index_tree.insert(count, key)
Example #24
0
class Conversation(BTreeDictBase):

    username = None
    new_messages_count = 0
    created = None

    def __init__(self, username, created):
        self.data = LOBTree()
        self.username = username
        self.created = created

    def to_long(self, dt):
        """Turns a `datetime` object into a long."""
        return long(time.mktime(dt.timetuple()) * 1000000 + dt.microsecond)

    def generate_key(self, message):
        """Generate a long int key for a message."""
        key = self.to_long(message.created)
        while key in self.data:
            key = key + 1
        return key

    def add_message(self, message):
        key = self.generate_key(message)
        message.uid = key
        self[key] = message
        return key

    def __setitem__(self, key, message):
        if key != message.uid:
            msg = 'key and message.uid differ ({0}/{1})'
            raise KeyError(msg.format(key, message.uid))

        # delete old message if there is one to make sure the
        # new_messages_count is correct and update the new_messages_count
        # with the new message
        if key in self:
            del self[key]
        if message.new is True:
            self.update_new_messages_count(+1)

        super(Conversation, self).__setitem__(key, message)

    def __delitem__(self, uid):
        message = self[uid]
        if message.new is True:
            self.update_new_messages_count(-1)
        super(Conversation, self).__delitem__(uid)

    def get_messages(self):
        return self.data.values()

    def mark_read(self):
        # use update function to update inbox too
        self.update_new_messages_count(self.new_messages_count * -1)

        # update messages
        for message in self.data.values():
            message.new = False

    def update_new_messages_count(self, difference):
        count = self.new_messages_count
        count = count + difference
        if count < 0:
            # FIXME: Error. Log?
            count = 0
        self.new_messages_count = count

        # update the inbox accordingly
        self.__parent__.update_new_messages_count(difference)

    def to_dict(self):
        member = api.user.get(self.username)
        return {
            'username': self.username,
            'fullname': member.getProperty('fullname'),
            'new_messages_count': self.new_messages_count
        }
Example #25
0
 def numbered_jobs(self):
     self.jobs = NumberTree(self.jobs)
     for state in self.jobs_by_state:
         self.jobs_by_state[state] = LOBTree()
         for id, job in self.jobs.items():
             self.jobs_by_state[state][id] = job
Example #26
0
 def __init__(self, username, created):
     self.data = LOBTree()
     self.username = username
     self.created = created
 def __init__(self):
     self.data = LOBTree()
Example #28
0
 def __init__(self, data=None, **kwargs):
     super(Tasks, self).__init__(data=data, **kwargs)
     self.__task_ids__ = LOBTree()
Example #29
0
class Conversation(BTreeDictBase):

    username = None
    new_messages_count = 0
    created = None

    def __init__(self, username, created):
        self.data = LOBTree()
        self.username = username
        self.created = created

    def to_long(self, dt):
        """Turns a `datetime` object into a long."""
        return long(time.mktime(dt.timetuple()) * 1000000 + dt.microsecond)

    def generate_key(self, message):
        """Generate a long int key for a message."""
        key = self.to_long(message.created)
        while key in self.data:
            key = key + 1
        return key

    def add_message(self, message):
        key = self.generate_key(message)
        message.uid = key
        self[key] = message
        return key

    def __setitem__(self, key, message):
        if key != message.uid:
            msg = 'key and message.uid differ ({0}/{1})'
            raise KeyError(msg.format(key, message.uid))

        # delete old message if there is one to make sure the
        # new_messages_count is correct and update the new_messages_count
        # with the new message
        if key in self:
            del self[key]
        if message.new is True:
            self.update_new_messages_count(+1)

        super(Conversation, self).__setitem__(key, message)

    def __delitem__(self, uid):
        message = self[uid]
        if message.new is True:
            self.update_new_messages_count(-1)
        super(Conversation, self).__delitem__(uid)

    def get_messages(self):
        return self.data.values()

    def mark_read(self):
        # use update function to update inbox too
        self.update_new_messages_count(self.new_messages_count * -1)

        # update messages
        for message in self.data.values():
            message.new = False

    def update_new_messages_count(self, difference):
        count = self.new_messages_count
        count = count + difference
        if count < 0:
            # FIXME: Error. Log?
            count = 0
        self.new_messages_count = count

        # update the inbox accordingly
        self.__parent__.update_new_messages_count(difference)

    def to_dict(self):
        member = api.user.get(self.username)
        return {'username': self.username,
                'fullname': member.getProperty('fullname'),
                'new_messages_count': self.new_messages_count}
Example #30
0
class Conversation(BTreeDictBase):

    username = None  # other user
    new_messages_count = 0
    created = None
    last = None  # last msg from other to inbox owner

    def __init__(self, username, created=None):
        self.data = LOBTree()
        self.username = username  # not inbox owner but other user
        if created is None:
            created = datetime.now(pytz.utc)
        self.created = created

    def to_long(self, dt):
        """Turns a `datetime` object into a long.

        Since this is used as BTree key it must be sequential,
        hence we force UTC.
        """
        if dt.tzinfo != pytz.utc:
            raise ValueError("datetime storage values MUST be UTC")
        return long(time.mktime(dt.timetuple()) * 1000000 + dt.microsecond)

    def generate_key(self, message):
        """Generate a long int key for a message."""
        key = self.to_long(message.created)
        while key in self.data:
            key = key + 1
        return key

    def add_message(self, message):
        key = self.generate_key(message)
        message.uid = key
        self[key] = message
        self.last = message
        return key

    def __setitem__(self, key, message):
        if key != message.uid:
            msg = 'key and message.uid differ ({0}/{1})'
            raise KeyError(msg.format(key, message.uid))

        # delete old message if there is one to make sure the
        # new_messages_count is correct and update the new_messages_count
        # with the new message
        if key in self:
            del self[key]
        if message.new is True:
            self.update_new_messages_count(+1)

        super(Conversation, self).__setitem__(key, message)

    def __delitem__(self, uid):
        message = self[uid]
        if message.new is True:
            self.update_new_messages_count(-1)
        super(Conversation, self).__delitem__(uid)

    def get_messages(self):
        return self.data.values()

    def mark_read(self, message=None):
        if message:
            message.new = False
            self.update_new_messages_count(-1)
        else:
            # use update function to update inbox too
            self.update_new_messages_count(self.new_messages_count * -1)
            # mark all messages as read
            for message in self.data.values():
                message.new = False

    def update_new_messages_count(self, difference):
        count = self.new_messages_count
        count = count + difference
        if count < 0:
            # FIXME: Error. Log?
            count = 0
        self.new_messages_count = count

        # update the inbox accordingly
        self.__parent__.update_new_messages_count(difference)

    def to_dict(self):
        member = api.user.get(self.username)
        return {'username': self.username,
                'fullname': member.getProperty('fullname'),
                'new_messages_count': self.new_messages_count}
Example #31
0
class Repository(Evolvable):
    version = 2

    @evolve(1, 2)
    def add_job_configs(self):
        self.job_configs = PersistentList()

    def __init__(self,
                 name,
                 url,
                 description='',
                 repository_id='',
                 remote_borg='borg'):
        self.name = name
        self.url = url
        self.description = description
        self.repository_id = repository_id
        self.remote_borg = remote_borg
        self.jobs = LOBTree()
        self.archives = OOBTree()
        self.job_configs = PersistentList()

    @property
    def location(self):
        return Location(self.url)

    def latest_job(self):
        try:
            return self.jobs[self.jobs.maxKey()]
        except ValueError:
            return

    def __str__(self):
        return self.name

    @staticmethod
    def oid_get(oid):
        for repository in data_root().repositories:
            if repository.oid == oid:
                return repository
        else:
            raise KeyError

    class Form(forms.Form):
        name = forms.CharField()
        description = forms.CharField(widget=forms.Textarea, required=False)
        url = forms.CharField(help_text=_(
            'For example /data0/repository or user@storage:/path.'),
                              label=_('URL'))
        repository_id = forms.CharField(min_length=64,
                                        max_length=64,
                                        label=_('Repository ID'))
        remote_borg = forms.CharField(
            help_text=_(
                'Remote borg binary name (only applies to remote repositories).'
            ),
            initial='borg',
        )

    class ChoiceField(forms.ChoiceField):
        @staticmethod
        def get_choices():
            for repository in data_root().repositories:
                yield repository.oid, str(repository)

        def __init__(self, **kwargs):
            super().__init__(choices=self.get_choices, **kwargs)

        def clean(self, value):
            value = super().clean(value)
            for repository in data_root().repositories:
                if repository.oid == value:
                    return repository
            else:
                raise ValidationError(self.error_messages['invalid_choice'],
                                      code='invalid_choice')

        def prepare_value(self, value):
            if not value:
                return
            return value.oid
Example #32
0
class Conversation(Traversable, Persistent, Explicit):
    """A conversation is a container for all comments on a content object.

    It manages internal data structures for comment threading and efficient
    comment lookup.
    """

    implements(IConversation, IHideFromBreadcrumbs)

    __allow_access_to_unprotected_subobjects__ = True

    def __init__(self, id="++conversation++default"):
        self.id = id

        # username -> count of comments; key is removed when count reaches 0
        self._commentators = OIBTree()

        # id -> comment - find comment by id
        self._comments = LOBTree()

        # id -> LLSet (children) - find all children for a given comment.
        # 0 signifies root.
        self._children = LOBTree()

    def getId(self):
        """Get the id of the conversation. This is used to construct a
        URL.
        """
        return self.id

    def enabled(self):
        parent = aq_inner(self.__parent__)
        return parent.restrictedTraverse('@@conversation_view').enabled()

    @property
    def total_comments(self):
        public_comments = [
            x for x in self._comments.values()
            if user_nobody.has_permission('View', x)
        ]
        return len(public_comments)

    @property
    def last_comment_date(self):
        # self._comments is an Instance of a btree. The keys
        # are always ordered
        comment_keys = self._comments.keys()
        for comment_key in reversed(comment_keys):
            comment = self._comments[comment_key]
            if user_nobody.has_permission('View', comment):
                return comment.creation_date
        return None

    @property
    def commentators(self):
        return self._commentators

    @property
    def public_commentators(self):
        retval = set()
        for comment in self._comments.values():
            if not user_nobody.has_permission('View', comment):
                continue
            retval.add(comment.author_username)
        return tuple(retval)

    def objectIds(self):
        return self._comments.keys()

    def getComments(self, start=0, size=None):
        """Get unthreaded comments
        """
        count = 0l
        for comment in self._comments.values(min=start):
            # Yield the acquisition wrapped comment
            yield self[comment.id]

            count += 1
            if size and count > size:
                return

    def getThreads(self, start=0, size=None, root=0, depth=None):
        """Get threaded comments
        """
        def recurse(comment_id, d=0):
            # Yield the current comment before we look for its children
            yield {'id': comment_id, 'comment': self[comment_id], 'depth': d}

            # Recurse if there are children and we are not out of our depth
            if depth is None or d + 1 < depth:
                children = self._children.get(comment_id, None)
                if children is not None:
                    for child_id in children:
                        for value in recurse(child_id, d + 1):
                            yield value

        # Find top level threads
        comments = self._children.get(root, None)
        if comments is not None:
            count = 0l
            for comment_id in comments.keys(min=start):

                # Abort if we have found all the threads we want
                count += 1
                if size and count > size:
                    return

                # Let the closure recurse
                for value in recurse(comment_id):
                    yield value

    def addComment(self, comment):
        """Add a new comment. The parent id should have been set already. The
        comment id may be modified to find a free key. The id used will be
        returned.
        """

        # Make sure we don't have a wrapped object

        comment = aq_base(comment)

        id = long(time.time() * 1e6)
        while id in self._comments:
            id += 1

        comment.comment_id = id
        notify(ObjectWillBeAddedEvent(comment, self, id))
        self._comments[id] = comment

        comment.__parent__ = aq_base(self)

        # Record unique users who've commented (for logged in users only)
        commentator = comment.author_username
        if commentator:
            if not commentator in self._commentators:
                self._commentators[commentator] = 0
            self._commentators[commentator] += 1

        reply_to = comment.in_reply_to
        if not reply_to:
            # top level comments are in reply to the faux id 0
            comment.in_reply_to = reply_to = 0

        if not reply_to in self._children:
            self._children[reply_to] = LLSet()
        self._children[reply_to].insert(id)

        # Add the annotation if not already done
        annotions = IAnnotations(self.__parent__)
        if not ANNOTATION_KEY in annotions:
            annotions[ANNOTATION_KEY] = aq_base(self)

        # Notify that the object is added. The object must here be
        # acquisition wrapped or the indexing will fail.
        notify(ObjectCreatedEvent(comment))
        notify(ObjectAddedEvent(comment.__of__(self), self, id))
        notify(ContainerModifiedEvent(self))

        return id

    # Dict API

    def __len__(self):
        return len(self._comments)

    def __contains__(self, key):
        return long(key) in self._comments

    def __getitem__(self, key):
        """Get an item by its long key
        """
        try:
            comment_id = long(key)
        except ValueError:
            return
        return self._comments[comment_id].__of__(self)

    def __delitem__(self, key, suppress_container_modified=False):
        """Delete an item by its long key
        """

        key = long(key)

        comment = self[key].__of__(self)
        commentator = comment.author_username

        notify(ObjectWillBeRemovedEvent(comment, self, key))

        # Remove all children
        for child_id in self._children.get(key, []):
            # avoid sending ContainerModifiedEvent multiple times
            self.__delitem__(child_id, suppress_container_modified=True)

        # Remove the comment from _comments
        self._comments.pop(key)

        # Remove this comment as a child of its parent
        if not suppress_container_modified:
            parent = comment.in_reply_to
            if parent is not None:
                parent_children = self._children.get(parent, None)
                if parent_children is not None and key in parent_children:
                    parent_children.remove(key)

        # Remove commentators
        if commentator and commentator in self._commentators:
            if self._commentators[commentator] <= 1:
                del self._commentators[commentator]
            else:
                self._commentators[commentator] -= 1

        notify(ObjectRemovedEvent(comment, self, key))

        if not suppress_container_modified:
            notify(ContainerModifiedEvent(self))

    def __iter__(self):
        return iter(self._comments)

    def get(self, key, default=None):
        comment = self._comments.get(long(key), default)
        if comment is default:
            return default
        return comment.__of__(self)

    def keys(self):
        return self._comments.keys()

    def items(self):
        return [(
            i[0],
            i[1].__of__(self),
        ) for i in self._comments.items()]

    def values(self):
        return [v.__of__(self) for v in self._comments.values()]

    def iterkeys(self):
        return self._comments.iterkeys()

    def itervalues(self):
        for v in self._comments.itervalues():
            yield v.__of__(self)

    def iteritems(self):
        for k, v in self._comments.iteritems():
            yield (
                k,
                v.__of__(self),
            )
Example #33
0
class TaggingEngine(Persistent):
    interface.implements(ITaggingEngine)

    def __init__(self):
        self.clear()

    @property
    def tagsCount(self):
        return len(self.tag_oids)

    @property
    def itemsCount(self):
        return len(self.oid_tags)

    def clear(self):
        self.tagsmap = LOBTree()

        # index
        self.tag_oids = LOBTree()
        self.oid_tags = LOBTree()

        # tag weight
        self.weights = OOSet()
        self.tag_weight = LFBTree()

    def getHash(self, str):
        if not str:
            return 0 # empty

        res = hash(str)

        # NOTE: workaround for 64bit
        if struct.calcsize("P") * 8 == 64:
            res = ord(str[0]) << 7
            for char in str:
                res = c_mul(1000003, res) ^ ord(char)
            res = res ^ len(str)
            if res == -1:
                res = -2
            if res >= 2**31:
                res -= 2**32

        return res

    def update(self, oid, tags):
        self.remove(oid)

        for tag in tags:
            htag = self.getHash(tag)
            self.tagsmap[htag] = tag

            # add oid -> tag
            oids = self.tag_oids.get(htag)
            if oids is None:
                oids = LFSet()
                self.tag_oids[htag] = oids

            if oid not in oids:
                oids.insert(oid)

            # add tag -> oid
            oid_tags = self.oid_tags.get(oid)
            if oid_tags is None:
                oid_tags = LLSet()
                self.oid_tags[oid] = oid_tags

            if htag not in oid_tags:
                oid_tags.insert(htag)

            # culculate weight
            weight = self.tag_weight.get(htag)
            if weight is not None:
                key = (weight, htag)
                if key in self.weights:
                    self.weights.remove(key)

            weight = float(len(oids))
            self.tag_weight[htag] = weight
            self.weights.insert((weight, htag))

    def remove(self, oid):
        for tag in self.oid_tags.get(oid, ()):
            # remove oid from tag -> oids reference
            oids = self.tag_oids.get(tag)

            if oid in oids:
                oids.remove(oid)

            oids_len = float(len(oids))

            # reculculate weight
            weight = self.tag_weight.get(tag)
            if weight is not None:
                key = (weight, tag)
                if key in self.weights:
                    self.weights.remove(key)

            if oids_len:
                self.tag_weight[tag] = oids_len
                self.weights.insert((oids_len, tag))

            # remove tag
            if not oids_len:
                del self.tag_oids[tag]
                del self.tagsmap[tag]

        if oid in self.oid_tags:
            del self.oid_tags[oid]

    def getItems(self, tags):
        oids = self.tag_oids
        weights = self.tag_weight

        weight, result = 0, LFBTree()
        for tag in tags:
            htag = self.getHash(tag)
            if htag in oids:
                weight, result = weightedUnion(
                    oids.get(htag), result, weights.get(htag), weight)

        return IFBucket(result)

    def getUniqueItems(self, tags):
        oids = self.tag_oids
        weights = self.tag_weight

        weight, result = 1.0, None
        for tag in tags:
            htag = self.getHash(tag)
            if htag in oids:
                if result is None:
                    weight, result = weightedUnion(
                        oids.get(htag), LFBTree(), weights.get(htag), weight)
                else:
                    weight, result = weightedIntersection(
                        result, oids.get(htag), weight, weights.get(htag))

        return IFBucket(result)

    def getTagCloud(self, reverse=False):
        total = len(self.oid_tags)
        if not total:
            return

        tags = self.tagsmap
        data = self.weights.keys()

        percent = total / 100.0

        if reverse:
            first = len(data)-1

            for idx in xrange(first, -1, -1):
                weight, htag = data[idx]
                yield weight / percent, tags.get(htag)
        else:
            for weight, htag in data:
                yield weight / percent, tags.get(htag)

    def getItemsTagCloud(self, items):
        oid_tags = self.oid_tags

        tags = [oid_tags[oid] for oid in items if oid in oid_tags]
        if tags:
            tags = multiunion(tags)
        else:
            return

        total = len(oid_tags)
        data = self.weights.keys()
        weights = self.tag_weight

        percent = total / 100.0

        for tag in tags:
            yield weights[tag] / percent, self.tagsmap.get(tag)

    def getFrequency(self, tags):
        tagsmap = self.tagsmap
        tag_weight = self.tag_weight

        for tag in tags:
            yield (tag, tag_weight.get(self.getHash(tag), 0))

    def __nonzero__(self):
        return bool(self.tag_oids)

    def __contains__(self, tag):
        return self.getHash(tag) in self.tagsmap
Example #34
0
 def _makeOne(self):
     from BTrees.LOBTree import LOBTree
     return LOBTree()
Example #35
0
 def __init__(self):
     self._tree = LOBTree()