class Conversation(BTreeDictBase): username = None # other user new_messages_count = 0 created = None last = None # last msg from other to inbox owner def __init__(self, username, created=None): self.data = LOBTree() self.username = username # not inbox owner but other user if created is None: created = datetime.now(pytz.utc) self.created = created def to_long(self, dt): """Turns a `datetime` object into a long. Since this is used as BTree key it must be sequential, hence we force UTC. """ if dt.tzinfo != pytz.utc: raise ValueError("datetime storage values MUST be UTC") return long(time.mktime(dt.timetuple()) * 1000000 + dt.microsecond) def generate_key(self, message): """Generate a long int key for a message.""" key = self.to_long(message.created) while key in self.data: key = key + 1 return key def add_message(self, message): key = self.generate_key(message) message.uid = key self[key] = message self.last = message return key def __setitem__(self, key, message): if key != message.uid: msg = 'key and message.uid differ ({0}/{1})' raise KeyError(msg.format(key, message.uid)) # delete old message if there is one to make sure the # new_messages_count is correct and update the new_messages_count # with the new message if key in self: del self[key] if message.new is True: self.update_new_messages_count(+1) super(Conversation, self).__setitem__(key, message) def __delitem__(self, uid): message = self[uid] if message.new is True: self.update_new_messages_count(-1) super(Conversation, self).__delitem__(uid) def get_messages(self): return self.data.values() def mark_read(self, message=None): if message: message.new = False self.update_new_messages_count(-1) else: # use update function to update inbox too self.update_new_messages_count(self.new_messages_count * -1) # mark all messages as read for message in self.data.values(): message.new = False def update_new_messages_count(self, difference): count = self.new_messages_count count = count + difference if count < 0: # FIXME: Error. Log? count = 0 self.new_messages_count = count # update the inbox accordingly self.__parent__.update_new_messages_count(difference) def to_dict(self): member = api.user.get(self.username) return {'username': self.username, 'fullname': member.getProperty('fullname'), 'new_messages_count': self.new_messages_count}
class Conversation(Traversable, Persistent, Explicit): """A conversation is a container for all comments on a content object. It manages internal data structures for comment threading and efficient comment lookup. """ implements(IConversation, IHideFromBreadcrumbs) __allow_access_to_unprotected_subobjects__ = True def __init__(self, id="++conversation++default"): self.id = id # username -> count of comments; key is removed when count reaches 0 self._commentators = OIBTree() # id -> comment - find comment by id self._comments = LOBTree() # id -> LLSet (children) - find all children for a given comment. # 0 signifies root. self._children = LOBTree() def getId(self): """Get the id of the conversation. This is used to construct a URL. """ return self.id def enabled(self): parent = aq_inner(self.__parent__) return parent.restrictedTraverse('@@conversation_view').enabled() @property def total_comments(self): public_comments = [ x for x in self._comments.values() if user_nobody.has_permission('View', x) ] return len(public_comments) @property def last_comment_date(self): # self._comments is an Instance of a btree. The keys # are always ordered comment_keys = self._comments.keys() for comment_key in reversed(comment_keys): comment = self._comments[comment_key] if user_nobody.has_permission('View', comment): return comment.creation_date return None @property def commentators(self): return self._commentators @property def public_commentators(self): retval = set() for comment in self._comments.values(): if not user_nobody.has_permission('View', comment): continue retval.add(comment.author_username) return tuple(retval) def objectIds(self): return self._comments.keys() def getComments(self, start=0, size=None): """Get unthreaded comments """ count = 0l for comment in self._comments.values(min=start): # Yield the acquisition wrapped comment yield self[comment.id] count += 1 if size and count > size: return def getThreads(self, start=0, size=None, root=0, depth=None): """Get threaded comments """ def recurse(comment_id, d=0): # Yield the current comment before we look for its children yield {'id': comment_id, 'comment': self[comment_id], 'depth': d} # Recurse if there are children and we are not out of our depth if depth is None or d + 1 < depth: children = self._children.get(comment_id, None) if children is not None: for child_id in children: for value in recurse(child_id, d + 1): yield value # Find top level threads comments = self._children.get(root, None) if comments is not None: count = 0l for comment_id in comments.keys(min=start): # Abort if we have found all the threads we want count += 1 if size and count > size: return # Let the closure recurse for value in recurse(comment_id): yield value def addComment(self, comment): """Add a new comment. The parent id should have been set already. The comment id may be modified to find a free key. The id used will be returned. """ # Make sure we don't have a wrapped object comment = aq_base(comment) id = long(time.time() * 1e6) while id in self._comments: id += 1 comment.comment_id = id notify(ObjectWillBeAddedEvent(comment, self, id)) self._comments[id] = comment comment.__parent__ = aq_base(self) # Record unique users who've commented (for logged in users only) commentator = comment.author_username if commentator: if not commentator in self._commentators: self._commentators[commentator] = 0 self._commentators[commentator] += 1 reply_to = comment.in_reply_to if not reply_to: # top level comments are in reply to the faux id 0 comment.in_reply_to = reply_to = 0 if not reply_to in self._children: self._children[reply_to] = LLSet() self._children[reply_to].insert(id) # Add the annotation if not already done annotions = IAnnotations(self.__parent__) if not ANNOTATION_KEY in annotions: annotions[ANNOTATION_KEY] = aq_base(self) # Notify that the object is added. The object must here be # acquisition wrapped or the indexing will fail. notify(ObjectCreatedEvent(comment)) notify(ObjectAddedEvent(comment.__of__(self), self, id)) notify(ContainerModifiedEvent(self)) return id # Dict API def __len__(self): return len(self._comments) def __contains__(self, key): return long(key) in self._comments def __getitem__(self, key): """Get an item by its long key """ try: comment_id = long(key) except ValueError: return return self._comments[comment_id].__of__(self) def __delitem__(self, key, suppress_container_modified=False): """Delete an item by its long key """ key = long(key) comment = self[key].__of__(self) commentator = comment.author_username notify(ObjectWillBeRemovedEvent(comment, self, key)) # Remove all children for child_id in self._children.get(key, []): # avoid sending ContainerModifiedEvent multiple times self.__delitem__(child_id, suppress_container_modified=True) # Remove the comment from _comments self._comments.pop(key) # Remove this comment as a child of its parent if not suppress_container_modified: parent = comment.in_reply_to if parent is not None: parent_children = self._children.get(parent, None) if parent_children is not None and key in parent_children: parent_children.remove(key) # Remove commentators if commentator and commentator in self._commentators: if self._commentators[commentator] <= 1: del self._commentators[commentator] else: self._commentators[commentator] -= 1 notify(ObjectRemovedEvent(comment, self, key)) if not suppress_container_modified: notify(ContainerModifiedEvent(self)) def __iter__(self): return iter(self._comments) def get(self, key, default=None): comment = self._comments.get(long(key), default) if comment is default: return default return comment.__of__(self) def keys(self): return self._comments.keys() def items(self): return [( i[0], i[1].__of__(self), ) for i in self._comments.items()] def values(self): return [v.__of__(self) for v in self._comments.values()] def iterkeys(self): return self._comments.iterkeys() def itervalues(self): for v in self._comments.itervalues(): yield v.__of__(self) def iteritems(self): for k, v in self._comments.iteritems(): yield ( k, v.__of__(self), )
class Conversation(Traversable, Persistent, Explicit): """A conversation is a container for all comments on a content object. It manages internal data structures for comment threading and efficient comment lookup. """ implements(IConversation) __allow_access_to_unprotected_subobjects__ = True def __init__(self, id="++conversation++default"): self.id = id # username -> count of comments; key is removed when count reaches 0 self._commentators = OIBTree() # id -> comment - find comment by id self._comments = LOBTree() # id -> LLSet (children) - find all children for a given comment. # 0 signifies root. self._children = LOBTree() def getId(self): """Get the id of the conversation. This is used to construct a URL. """ return self.id def enabled(self): parent = aq_inner(self.__parent__) return parent.restrictedTraverse('@@conversation_view').enabled() @property def total_comments(self): return len(self._comments) @property def last_comment_date(self): try: return self._comments[self._comments.maxKey()].creation_date except (ValueError, KeyError, AttributeError,): return None @property def commentators(self): return self._commentators def objectIds(self): return self._comments.keys() def getComments(self, start=0, size=None): """Get unthreaded comments """ count = 0l for comment in self._comments.values(min=start): # Yield the acquisition wrapped comment yield self[comment.id] count += 1 if size and count > size: return def getThreads(self, start=0, size=None, root=0, depth=None): """Get threaded comments """ def recurse(comment_id, d=0): # Yield the current comment before we look for its children yield {'id': comment_id, 'comment': self[comment_id], 'depth': d} # Recurse if there are children and we are not out of our depth if depth is None or d + 1 < depth: children = self._children.get(comment_id, None) if children is not None: for child_id in children: for value in recurse(child_id, d+1): yield value # Find top level threads comments = self._children.get(root, None) if comments is not None: count = 0l for comment_id in comments.keys(min=start): # Abort if we have found all the threads we want count += 1 if size and count > size: return # Let the closure recurse for value in recurse(comment_id): yield value def addComment(self, comment): """Add a new comment. The parent id should have been set already. The comment id may be modified to find a free key. The id used will be returned. """ # Make sure we don't have a wrapped object comment = aq_base(comment) id = long(time.time() * 1e6) while id in self._comments: id += 1 comment.comment_id = id notify(ObjectWillBeAddedEvent(comment, self, id)) self._comments[id] = comment comment.__parent__ = aq_base(self) # Record unique users who've commented (for logged in users only) commentator = comment.author_username if commentator: if not commentator in self._commentators: self._commentators[commentator] = 0 self._commentators[commentator] += 1 reply_to = comment.in_reply_to if not reply_to: # top level comments are in reply to the faux id 0 comment.in_reply_to = reply_to = 0 if not reply_to in self._children: self._children[reply_to] = LLSet() self._children[reply_to].insert(id) # Add the annotation if not already done annotions = IAnnotations(self.__parent__) if not ANNOTATION_KEY in annotions: annotions[ANNOTATION_KEY] = aq_base(self) # Notify that the object is added. The object must here be # acquisition wrapped or the indexing will fail. notify(ObjectCreatedEvent(comment)) notify(ObjectAddedEvent(comment.__of__(self), self, id)) notify(ContainerModifiedEvent(self)) return id # Dict API def __len__(self): return len(self._comments) def __contains__(self, key): return long(key) in self._comments def __getitem__(self, key): """Get an item by its long key """ return self._comments[long(key)].__of__(self) def __delitem__(self, key, suppress_container_modified=False): """Delete an item by its long key """ key = long(key) comment = self[key].__of__(self) commentator = comment.author_username notify(ObjectWillBeRemovedEvent(comment, self, key)) # Remove all children for child_id in self._children.get(key, []): # avoid sending ContainerModifiedEvent multiple times self.__delitem__(child_id, suppress_container_modified=True) # Remove the comment from _comments self._comments.pop(key) # Remove this comment as a child of its parent if not suppress_container_modified: parent = comment.in_reply_to if parent is not None: parent_children = self._children.get(parent, None) if parent_children is not None and key in parent_children: parent_children.remove(key) # Remove commentators if commentator and commentator in self._commentators: if self._commentators[commentator] <= 1: del self._commentators[commentator] else: self._commentators[commentator] -= 1 notify(ObjectRemovedEvent(comment, self, key)) if not suppress_container_modified: notify(ContainerModifiedEvent(self)) def __iter__(self): return iter(self._comments) def get(self, key, default=None): comment = self._comments.get(long(key), default) if comment is default: return default return comment.__of__(self) def keys(self): return self._comments.keys() def items(self): return [(i[0], i[1].__of__(self),) for i in self._comments.items()] def values(self): return [v.__of__(self) for v in self._comments.values()] def iterkeys(self): return self._comments.iterkeys() def itervalues(self): for v in self._comments.itervalues(): yield v.__of__(self) def iteritems(self): for k, v in self._comments.iteritems(): yield (k, v.__of__(self),)
class Conversation(BTreeDictBase): username = None new_messages_count = 0 created = None def __init__(self, username, created): self.data = LOBTree() self.username = username self.created = created def to_long(self, dt): """Turns a `datetime` object into a long.""" return long(time.mktime(dt.timetuple()) * 1000000 + dt.microsecond) def generate_key(self, message): """Generate a long int key for a message.""" key = self.to_long(message.created) while key in self.data: key = key + 1 return key def add_message(self, message): key = self.generate_key(message) message.uid = key self[key] = message return key def __setitem__(self, key, message): if key != message.uid: msg = 'key and message.uid differ ({0}/{1})' raise KeyError(msg.format(key, message.uid)) # delete old message if there is one to make sure the # new_messages_count is correct and update the new_messages_count # with the new message if key in self: del self[key] if message.new is True: self.update_new_messages_count(+1) super(Conversation, self).__setitem__(key, message) def __delitem__(self, uid): message = self[uid] if message.new is True: self.update_new_messages_count(-1) super(Conversation, self).__delitem__(uid) def get_messages(self): return self.data.values() def mark_read(self): # use update function to update inbox too self.update_new_messages_count(self.new_messages_count * -1) # update messages for message in self.data.values(): message.new = False def update_new_messages_count(self, difference): count = self.new_messages_count count = count + difference if count < 0: # FIXME: Error. Log? count = 0 self.new_messages_count = count # update the inbox accordingly self.__parent__.update_new_messages_count(difference) def to_dict(self): member = api.user.get(self.username) return { 'username': self.username, 'fullname': member.getProperty('fullname'), 'new_messages_count': self.new_messages_count }
class UserJoinAdapter(FormActionAdapter): """A form action adapter that will save form input data and keep them until Manager use this data to create new site's members""" meta_type = 'UserJoinAdapter' portal_type = 'UserJoinAdapter' security = ClassSecurityInfo() schema = FormAdapterSchema.copy() + UserJoinAdapterSchema.copy() security.declarePrivate('pfgFieldVocabulary') def pfgFieldVocabulary(self): return atapi.DisplayList( [['', '']] + [[field.getName(), field.widget.label] for field in self.fgFields()]) security.declarePrivate('userFieldVocabulary') def userFieldVocabulary(self): portal_memberdata = getToolByName(self, 'portal_memberdata') return atapi.DisplayList( [['', '']] + [[x,x] for x in portal_memberdata.propertyIds() if x not in config.RESERVED_PROPS]) def _initStorage(self, clear=False): inited = base_hasattr(self, '_inputStorage') and \ base_hasattr(self, '_inputItems') and \ base_hasattr(self, '_length') if not inited or clear: self._inputStorage = LOBTree() self._inputItems = 0 self._length = Length() def _addDataRow(self, value): # Stolen from saveDataDapter self._initStorage() id = int(time.time() * 1000) self._inputStorage[id] = value self._length.change(1) return id def onSuccess(self, fields, REQUEST=None): """Store data in the inner registry""" data = {} userid_provided = REQUEST.form.get(self.getUseridField()) if userid_provided: rtool = getToolByName(self, 'portal_registration') if api.user.get(username=userid_provided) is not None: return {self.getUseridField(): _('username_already_taken_error', default=u'The username is already in use')} if userid_provided=='Anonymous User' or \ not rtool._ALLOWED_MEMBER_ID_PATTERN.match(userid_provided): return {self.getUseridField(): _('username_invalid_error', default=u'The username is invalid')} # userid already stored in the registry for v in self._inputStorage.values(): if userid_provided==v.get(self.getUseridField()): return {self.getUseridField(): _('username_already_taken_error', default=u'The username is already in use')} for field in fields: # we do not handle files for now if field.isFileField() or field.isLabel(): continue val = REQUEST.form.get(field.fgField.getName(), '') if not type(val) in StringTypes: # Zope has marshalled the field into # something other than a string val = str(val) data[field.fgField.getName()] = val data['__timestamp__'] = DateTime() id = self._addDataRow(data) REQUEST['pfguserjoin_obj'] = self REQUEST['pfguserjoin_newid'] = id
class Conversation(BTreeDictBase): username = None new_messages_count = 0 created = None def __init__(self, username, created): self.data = LOBTree() self.username = username self.created = created def to_long(self, dt): """Turns a `datetime` object into a long.""" return long(time.mktime(dt.timetuple()) * 1000000 + dt.microsecond) def generate_key(self, message): """Generate a long int key for a message.""" key = self.to_long(message.created) while key in self.data: key = key + 1 return key def add_message(self, message): key = self.generate_key(message) message.uid = key self[key] = message return key def __setitem__(self, key, message): if key != message.uid: msg = 'key and message.uid differ ({0}/{1})' raise KeyError(msg.format(key, message.uid)) # delete old message if there is one to make sure the # new_messages_count is correct and update the new_messages_count # with the new message if key in self: del self[key] if message.new is True: self.update_new_messages_count(+1) super(Conversation, self).__setitem__(key, message) def __delitem__(self, uid): message = self[uid] if message.new is True: self.update_new_messages_count(-1) super(Conversation, self).__delitem__(uid) def get_messages(self): return self.data.values() def mark_read(self): # use update function to update inbox too self.update_new_messages_count(self.new_messages_count * -1) # update messages for message in self.data.values(): message.new = False def update_new_messages_count(self, difference): count = self.new_messages_count count = count + difference if count < 0: # FIXME: Error. Log? count = 0 self.new_messages_count = count # update the inbox accordingly self.__parent__.update_new_messages_count(difference) def to_dict(self): member = api.user.get(self.username) return {'username': self.username, 'fullname': member.getProperty('fullname'), 'new_messages_count': self.new_messages_count}
class DataBucketStream(Document): """ Represents data stored in many small files inside a "stream". Each file is "addressed" by its key similar to dict. """ meta_type = 'ERP5 Data Bucket Stream' portal_type = 'Data Bucket Stream' add_permission = Permissions.AddPortalContent # Declarative security security = ClassSecurityInfo() security.declareObjectProtected(Permissions.AccessContentsInformation) # Declarative properties property_sheets = (PropertySheet.CategoryCore, PropertySheet.SortIndex) def __init__(self, id, **kw): self.initBucketTree() self.initIndexTree() Document.__init__(self, id, **kw) def __len__(self): return len(self._tree) def initBucketTree(self): """ Initialize the Bucket Tree """ self._tree = OOBTree() def initIndexTree(self): """ Initialize the Index Tree """ self._long_index_tree = LOBTree() def getMaxKey(self, key=None): """ Return the maximum key """ try: return self._tree.maxKey(key) except ValueError: return None def getMaxIndex(self, index=None): """ Return the maximum index """ try: return self._long_index_tree.maxKey(index) except ValueError: return None def getMinKey(self, key=None): """ Return the minimum key """ try: return self._tree.minKey(key) except ValueError: return None def getMinIndex(self, index=None): """ Return the minimum key """ try: return self._long_index_tree.minKey(index) except ValueError: return None def _getOb(self, id, *args, **kw): return None def getBucketByKey(self, key=None): """ Get one bucket """ return self._tree[key].value def getBucketByIndex(self, index=None): """ Get one bucket """ key = self._long_index_tree[index] return self.getBucketByKey(key).value def getBucket(self, key): log('DeprecationWarning: Please use getBucketByKey') return self.getBucketByKey(key) def hasBucketKey(self, key): """ Wether bucket with such key exists """ return key in self._tree def hasBucketIndex(self, index): """ Wether bucket with such index exists """ return self._long_index_tree.has_key(index) def insertBucket(self, key, value): """ Insert one bucket """ try: count = self._long_index_tree.maxKey() + 1 except ValueError: count = 0 except AttributeError: pass try: self._long_index_tree.insert(count, key) except AttributeError: pass value = PersistentString(value) is_new_key = self._tree.insert(key, value) if not is_new_key: self.log("Reingestion of same key") self._tree[key] = value def getBucketKeySequenceByKey(self, start_key=None, stop_key=None, count=None, exclude_start_key=False, exclude_stop_key=False): """ Get a lazy sequence of bucket keys """ sequence = self._tree.keys(min=start_key, max=stop_key, excludemin=exclude_start_key, excludemax=exclude_stop_key) if count is None: return sequence return sequence[:count] def getBucketKeySequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket keys """ sequence = self._long_index_tree.values(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is None: return sequence return sequence[:count] def getBucketKeySequence(self, start_key=None, count=None): log('DeprecationWarning: Please use getBucketKeySequenceByKey') return self.getBucketKeySequenceByKey(start_key=start_key, count=count) def getBucketIndexKeySequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket keys """ sequence = self._long_index_tree.items(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexKeySequence(self, sequence) def getBucketIndexSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket keys """ sequence = self._long_index_tree.keys(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is None: return sequence return sequence[:count] def getBucketValueSequenceByKey(self, start_key=None, stop_key=None, count=None, exclude_start_key=False, exclude_stop_key=False): """ Get a lazy sequence of bucket values """ sequence = self._tree.values(min=start_key, max=stop_key, excludemin=exclude_start_key, excludemax=exclude_stop_key) if count is None: return sequence return sequence[:count] def getBucketValueSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket values """ sequence = self._long_index_tree.values(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexValueSequence(self, sequence) def getBucketValueSequence(self, start_key=None, count=None): log('DeprecationWarning: Please use getBucketValueSequenceByKey') return self.getBucketValueSequenceByKey(start_key=start_key, count=count) def getBucketKeyItemSequenceByKey(self, start_key=None, stop_key=None, count=None, exclude_start_key=False, exclude_stop_key=False): """ Get a lazy sequence of bucket items """ sequence = self._tree.items(min=start_key, max=stop_key, excludemin=exclude_start_key, excludemax=exclude_stop_key) if count is None: return sequence return sequence[:count] def getBucketItemSequence(self, start_key=None, count=None, exclude_start_key=False): log('DeprecationWarning: Please use getBucketKeyItemSequenceByKey') return self.getBucketKeyItemSequenceByKey( start_key=start_key, count=count, exclude_start_key=exclude_start_key) def getBucketIndexItemSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket items """ sequence = self._long_index_tree.items(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexItemSequence(self, sequence) def getBucketIndexKeyItemSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket items """ sequence = self._long_index_tree.items(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexKeyItemSequence(self, sequence) def getItemList(self): """ Return a list of all key, value pairs """ return [item for item in self._tree.items()] def getKeyList(self): """ Return a list of all keys """ return [key for key in self._tree.keys()] def getIndexList(self): """ Return a list of all indexes """ return [key for key in self._long_index_tree.keys()] def getIndexKeyTupleList(self): """ Return a list of all indexes """ return [key for key in self._long_index_tree.items()] def getMd5sum(self, key): """ Get hexdigest of bucket. """ h = hashlib.md5() h.update(self.getBucketByKey(key)) return h.hexdigest() def delBucketByKey(self, key): """ Remove the bucket. """ del self._tree[key] for index, my_key in list(self.getBucketIndexKeySequenceByIndex()): if my_key == key: del self._long_index_tree[index] def delBucketByIndex(self, index): """ Remove the bucket. """ key = self._long_index_tree[index] del self._tree[key] del self._long_index_tree[index] def rebuildIndexTreeByKeyOrder(self): """ Clear and rebuild the index tree by order of keys """ self.initIndexTree() for count, key in enumerate(self.getBucketKeySequenceByKey()): self._long_index_tree.insert(count, key)