class Client(Evolvable): version = 2 @evolve(1, 2) def add_job_configs(self): self.job_configs = PersistentList() def __init__(self, hostname, description='', connection=None): self.hostname = hostname self.description = description self.connection = connection self.jobs = LOBTree() self.archives = OOBTree() self.job_configs = PersistentList() data_root().clients[hostname] = self def latest_job(self): try: return self.jobs[self.jobs.maxKey()] except ValueError: return class Form(forms.Form): hostname = forms.CharField(validators=[slug_validator]) description = forms.CharField(widget=forms.Textarea, required=False, initial='')
class Tasks(BaseFolder): """ Container for tasks. """ implements(ITasks) allowed_contexts = () #Not manually addable content_type = 'Tasks' display_name = _(u"Tasks") custom_accessors = {'title':'get_title'} def __init__(self, data=None, **kwargs): super(Tasks, self).__init__(data=data, **kwargs) self.__task_ids__ = LOBTree() def get_title(self, default='', key=None): return self.display_name def mark_task_id(self): if len(self.__task_ids__) == 0: id = 1 #Start at 1 else: id = self.__task_ids__.maxKey()+1 suggest_name = unicode(id) self.add_task_id(id, suggest_name) return suggest_name def add_task_id(self, id, value): if id in self.__task_ids__: raise ValueError("id %s already exist in %s" % (id, self)) self.__task_ids__[id] = value
class CompassTool(SimpleItem): # XXX: all time values here should be stored as UTC, # and converted back into the proper "local" timezone # (which might differ per request) upon extraction from the tool. # But Zope and Plone support for all this must be investigated. def __init__(self): self.data = LOBTree() def __getitem__(self, tstamp): tstamp = long(tstamp) return self.data[tstamp] def __contains__(self, tstamp): return long(tstamp) in self.data def __len__(self): return len(self.data.keys()) def keys(self, start, step, descending=True): # WARNING: I'm totally relying on the output of keys() to be sorted, # which is the case, but I couldn't find any formal guarantee raw_keys = self.data.keys() slice_ = [] if descending: if start == 0: slice_ = raw_keys[-(start + step):] else: slice_ = raw_keys[-(start + step):-(start)] slice_ = [i for i in slice_] slice_.reverse() else: slice_ = raw_keys[start:start + step] return [k for k in slice_] def max_key(self): try: return self.data.maxKey() except ValueError: return None def add(self, data): now = long(time.time()) self.data[now] = data return now def remove(self, tstamp): tstamp = long(tstamp) if tstamp in self.data: del self.data[tstamp]
class CompassTool(SimpleItem): # XXX: all time values here should be stored as UTC, # and converted back into the proper "local" timezone # (which might differ per request) upon extraction from the tool. # But Zope and Plone support for all this must be investigated. def __init__(self): self.data = LOBTree() def __getitem__(self, tstamp): tstamp = long(tstamp) return self.data[tstamp] def __contains__(self, tstamp): return long(tstamp) in self.data def __len__(self): return len(self.data.keys()) def keys(self, start, step, descending=True): # WARNING: I'm totally relying on the output of keys() to be sorted, # which is the case, but I couldn't find any formal guarantee raw_keys = self.data.keys() slice_ = [] if descending: if start == 0: slice_ = raw_keys[-(start + step) :] else: slice_ = raw_keys[-(start + step) : -(start)] slice_ = [i for i in slice_] slice_.reverse() else: slice_ = raw_keys[start : start + step] return [k for k in slice_] def max_key(self): try: return self.data.maxKey() except ValueError: return None def add(self, data): now = long(time.time()) self.data[now] = data return now def remove(self, tstamp): tstamp = long(tstamp) if tstamp in self.data: del self.data[tstamp]
class Conversation(Traversable, Persistent, Explicit): """A conversation is a container for all comments on a content object. It manages internal data structures for comment threading and efficient comment lookup. """ implements(IConversation) __allow_access_to_unprotected_subobjects__ = True def __init__(self, id="++conversation++default"): self.id = id # username -> count of comments; key is removed when count reaches 0 self._commentators = OIBTree() # id -> comment - find comment by id self._comments = LOBTree() # id -> LLSet (children) - find all children for a given comment. # 0 signifies root. self._children = LOBTree() def getId(self): """Get the id of the conversation. This is used to construct a URL. """ return self.id def enabled(self): parent = aq_inner(self.__parent__) return parent.restrictedTraverse('@@conversation_view').enabled() @property def total_comments(self): return len(self._comments) @property def last_comment_date(self): try: return self._comments[self._comments.maxKey()].creation_date except (ValueError, KeyError, AttributeError,): return None @property def commentators(self): return self._commentators def objectIds(self): return self._comments.keys() def getComments(self, start=0, size=None): """Get unthreaded comments """ count = 0l for comment in self._comments.values(min=start): # Yield the acquisition wrapped comment yield self[comment.id] count += 1 if size and count > size: return def getThreads(self, start=0, size=None, root=0, depth=None): """Get threaded comments """ def recurse(comment_id, d=0): # Yield the current comment before we look for its children yield {'id': comment_id, 'comment': self[comment_id], 'depth': d} # Recurse if there are children and we are not out of our depth if depth is None or d + 1 < depth: children = self._children.get(comment_id, None) if children is not None: for child_id in children: for value in recurse(child_id, d+1): yield value # Find top level threads comments = self._children.get(root, None) if comments is not None: count = 0l for comment_id in comments.keys(min=start): # Abort if we have found all the threads we want count += 1 if size and count > size: return # Let the closure recurse for value in recurse(comment_id): yield value def addComment(self, comment): """Add a new comment. The parent id should have been set already. The comment id may be modified to find a free key. The id used will be returned. """ # Make sure we don't have a wrapped object comment = aq_base(comment) id = long(time.time() * 1e6) while id in self._comments: id += 1 comment.comment_id = id notify(ObjectWillBeAddedEvent(comment, self, id)) self._comments[id] = comment comment.__parent__ = aq_base(self) # Record unique users who've commented (for logged in users only) commentator = comment.author_username if commentator: if not commentator in self._commentators: self._commentators[commentator] = 0 self._commentators[commentator] += 1 reply_to = comment.in_reply_to if not reply_to: # top level comments are in reply to the faux id 0 comment.in_reply_to = reply_to = 0 if not reply_to in self._children: self._children[reply_to] = LLSet() self._children[reply_to].insert(id) # Add the annotation if not already done annotions = IAnnotations(self.__parent__) if not ANNOTATION_KEY in annotions: annotions[ANNOTATION_KEY] = aq_base(self) # Notify that the object is added. The object must here be # acquisition wrapped or the indexing will fail. notify(ObjectCreatedEvent(comment)) notify(ObjectAddedEvent(comment.__of__(self), self, id)) notify(ContainerModifiedEvent(self)) return id # Dict API def __len__(self): return len(self._comments) def __contains__(self, key): return long(key) in self._comments def __getitem__(self, key): """Get an item by its long key """ return self._comments[long(key)].__of__(self) def __delitem__(self, key, suppress_container_modified=False): """Delete an item by its long key """ key = long(key) comment = self[key].__of__(self) commentator = comment.author_username notify(ObjectWillBeRemovedEvent(comment, self, key)) # Remove all children for child_id in self._children.get(key, []): # avoid sending ContainerModifiedEvent multiple times self.__delitem__(child_id, suppress_container_modified=True) # Remove the comment from _comments self._comments.pop(key) # Remove this comment as a child of its parent if not suppress_container_modified: parent = comment.in_reply_to if parent is not None: parent_children = self._children.get(parent, None) if parent_children is not None and key in parent_children: parent_children.remove(key) # Remove commentators if commentator and commentator in self._commentators: if self._commentators[commentator] <= 1: del self._commentators[commentator] else: self._commentators[commentator] -= 1 notify(ObjectRemovedEvent(comment, self, key)) if not suppress_container_modified: notify(ContainerModifiedEvent(self)) def __iter__(self): return iter(self._comments) def get(self, key, default=None): comment = self._comments.get(long(key), default) if comment is default: return default return comment.__of__(self) def keys(self): return self._comments.keys() def items(self): return [(i[0], i[1].__of__(self),) for i in self._comments.items()] def values(self): return [v.__of__(self) for v in self._comments.values()] def iterkeys(self): return self._comments.iterkeys() def itervalues(self): for v in self._comments.itervalues(): yield v.__of__(self) def iteritems(self): for k, v in self._comments.iteritems(): yield (k, v.__of__(self),)
class DataBucketStream(Document): """ Represents data stored in many small files inside a "stream". Each file is "addressed" by its key similar to dict. """ meta_type = 'ERP5 Data Bucket Stream' portal_type = 'Data Bucket Stream' add_permission = Permissions.AddPortalContent # Declarative security security = ClassSecurityInfo() security.declareObjectProtected(Permissions.AccessContentsInformation) # Declarative properties property_sheets = (PropertySheet.CategoryCore, PropertySheet.SortIndex) def __init__(self, id, **kw): self.initBucketTree() self.initIndexTree() Document.__init__(self, id, **kw) def __len__(self): return len(self._tree) def initBucketTree(self): """ Initialize the Bucket Tree """ self._tree = OOBTree() def initIndexTree(self): """ Initialize the Index Tree """ self._long_index_tree = LOBTree() def getMaxKey(self, key=None): """ Return the maximum key """ try: return self._tree.maxKey(key) except ValueError: return None def getMaxIndex(self, index=None): """ Return the maximum index """ try: return self._long_index_tree.maxKey(index) except ValueError: return None def getMinKey(self, key=None): """ Return the minimum key """ try: return self._tree.minKey(key) except ValueError: return None def getMinIndex(self, index=None): """ Return the minimum key """ try: return self._long_index_tree.minKey(index) except ValueError: return None def _getOb(self, id, *args, **kw): return None def getBucketByKey(self, key=None): """ Get one bucket """ return self._tree[key].value def getBucketByIndex(self, index=None): """ Get one bucket """ key = self._long_index_tree[index] return self.getBucketByKey(key).value def getBucket(self, key): log('DeprecationWarning: Please use getBucketByKey') return self.getBucketByKey(key) def hasBucketKey(self, key): """ Wether bucket with such key exists """ return key in self._tree def hasBucketIndex(self, index): """ Wether bucket with such index exists """ return self._long_index_tree.has_key(index) def insertBucket(self, key, value): """ Insert one bucket """ try: count = self._long_index_tree.maxKey() + 1 except ValueError: count = 0 except AttributeError: pass try: self._long_index_tree.insert(count, key) except AttributeError: pass value = PersistentString(value) is_new_key = self._tree.insert(key, value) if not is_new_key: self.log("Reingestion of same key") self._tree[key] = value def getBucketKeySequenceByKey(self, start_key=None, stop_key=None, count=None, exclude_start_key=False, exclude_stop_key=False): """ Get a lazy sequence of bucket keys """ sequence = self._tree.keys(min=start_key, max=stop_key, excludemin=exclude_start_key, excludemax=exclude_stop_key) if count is None: return sequence return sequence[:count] def getBucketKeySequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket keys """ sequence = self._long_index_tree.values(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is None: return sequence return sequence[:count] def getBucketKeySequence(self, start_key=None, count=None): log('DeprecationWarning: Please use getBucketKeySequenceByKey') return self.getBucketKeySequenceByKey(start_key=start_key, count=count) def getBucketIndexKeySequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket keys """ sequence = self._long_index_tree.items(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexKeySequence(self, sequence) def getBucketIndexSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket keys """ sequence = self._long_index_tree.keys(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is None: return sequence return sequence[:count] def getBucketValueSequenceByKey(self, start_key=None, stop_key=None, count=None, exclude_start_key=False, exclude_stop_key=False): """ Get a lazy sequence of bucket values """ sequence = self._tree.values(min=start_key, max=stop_key, excludemin=exclude_start_key, excludemax=exclude_stop_key) if count is None: return sequence return sequence[:count] def getBucketValueSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket values """ sequence = self._long_index_tree.values(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexValueSequence(self, sequence) def getBucketValueSequence(self, start_key=None, count=None): log('DeprecationWarning: Please use getBucketValueSequenceByKey') return self.getBucketValueSequenceByKey(start_key=start_key, count=count) def getBucketKeyItemSequenceByKey(self, start_key=None, stop_key=None, count=None, exclude_start_key=False, exclude_stop_key=False): """ Get a lazy sequence of bucket items """ sequence = self._tree.items(min=start_key, max=stop_key, excludemin=exclude_start_key, excludemax=exclude_stop_key) if count is None: return sequence return sequence[:count] def getBucketItemSequence(self, start_key=None, count=None, exclude_start_key=False): log('DeprecationWarning: Please use getBucketKeyItemSequenceByKey') return self.getBucketKeyItemSequenceByKey( start_key=start_key, count=count, exclude_start_key=exclude_start_key) def getBucketIndexItemSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket items """ sequence = self._long_index_tree.items(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexItemSequence(self, sequence) def getBucketIndexKeyItemSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket items """ sequence = self._long_index_tree.items(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexKeyItemSequence(self, sequence) def getItemList(self): """ Return a list of all key, value pairs """ return [item for item in self._tree.items()] def getKeyList(self): """ Return a list of all keys """ return [key for key in self._tree.keys()] def getIndexList(self): """ Return a list of all indexes """ return [key for key in self._long_index_tree.keys()] def getIndexKeyTupleList(self): """ Return a list of all indexes """ return [key for key in self._long_index_tree.items()] def getMd5sum(self, key): """ Get hexdigest of bucket. """ h = hashlib.md5() h.update(self.getBucketByKey(key)) return h.hexdigest() def delBucketByKey(self, key): """ Remove the bucket. """ del self._tree[key] for index, my_key in list(self.getBucketIndexKeySequenceByIndex()): if my_key == key: del self._long_index_tree[index] def delBucketByIndex(self, index): """ Remove the bucket. """ key = self._long_index_tree[index] del self._tree[key] del self._long_index_tree[index] def rebuildIndexTreeByKeyOrder(self): """ Clear and rebuild the index tree by order of keys """ self.initIndexTree() for count, key in enumerate(self.getBucketKeySequenceByKey()): self._long_index_tree.insert(count, key)
class Repository(Evolvable): version = 2 @evolve(1, 2) def add_job_configs(self): self.job_configs = PersistentList() def __init__(self, name, url, description='', repository_id='', remote_borg='borg'): self.name = name self.url = url self.description = description self.repository_id = repository_id self.remote_borg = remote_borg self.jobs = LOBTree() self.archives = OOBTree() self.job_configs = PersistentList() @property def location(self): return Location(self.url) def latest_job(self): try: return self.jobs[self.jobs.maxKey()] except ValueError: return def __str__(self): return self.name @staticmethod def oid_get(oid): for repository in data_root().repositories: if repository.oid == oid: return repository else: raise KeyError class Form(forms.Form): name = forms.CharField() description = forms.CharField(widget=forms.Textarea, required=False) url = forms.CharField(help_text=_( 'For example /data0/repository or user@storage:/path.'), label=_('URL')) repository_id = forms.CharField(min_length=64, max_length=64, label=_('Repository ID')) remote_borg = forms.CharField( help_text=_( 'Remote borg binary name (only applies to remote repositories).' ), initial='borg', ) class ChoiceField(forms.ChoiceField): @staticmethod def get_choices(): for repository in data_root().repositories: yield repository.oid, str(repository) def __init__(self, **kwargs): super().__init__(choices=self.get_choices, **kwargs) def clean(self, value): value = super().clean(value) for repository in data_root().repositories: if repository.oid == value: return repository else: raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice') def prepare_value(self, value): if not value: return return value.oid
class Conversation(Traversable, Persistent, Explicit): """A conversation is a container for all comments on a content object. It manages internal data structures for comment threading and efficient comment lookup. """ implements(IConversation, IHideFromBreadcrumbs) __allow_access_to_unprotected_subobjects__ = True def __init__(self, id="++conversation++default"): self.id = id # username -> count of comments; key is removed when count reaches 0 self._commentators = OIBTree() # id -> comment - find comment by id self._comments = LOBTree() # id -> LLSet (children) - find all children for a given comment. # 0 signifies root. self._children = LOBTree() def getId(self): """Get the id of the conversation. This is used to construct a URL. """ return self.id def enabled(self): parent = aq_inner(self.__parent__) return parent.restrictedTraverse('@@conversation_view').enabled() @property def total_comments(self): public_comments = [x for x in self._comments.values() if \ user_nobody.has_permission('View', x)] return len(public_comments) @property def last_comment_date(self): try: return self._comments[self._comments.maxKey()].creation_date except ( ValueError, KeyError, AttributeError, ): return None @property def commentators(self): return self._commentators def objectIds(self): return self._comments.keys() def getComments(self, start=0, size=None): """Get unthreaded comments """ count = 0l for comment in self._comments.values(min=start): # Yield the acquisition wrapped comment yield self[comment.id] count += 1 if size and count > size: return def getThreads(self, start=0, size=None, root=0, depth=None): """Get threaded comments """ def recurse(comment_id, d=0): # Yield the current comment before we look for its children yield {'id': comment_id, 'comment': self[comment_id], 'depth': d} # Recurse if there are children and we are not out of our depth if depth is None or d + 1 < depth: children = self._children.get(comment_id, None) if children is not None: for child_id in children: for value in recurse(child_id, d + 1): yield value # Find top level threads comments = self._children.get(root, None) if comments is not None: count = 0l for comment_id in comments.keys(min=start): # Abort if we have found all the threads we want count += 1 if size and count > size: return # Let the closure recurse for value in recurse(comment_id): yield value def addComment(self, comment): """Add a new comment. The parent id should have been set already. The comment id may be modified to find a free key. The id used will be returned. """ # Make sure we don't have a wrapped object comment = aq_base(comment) id = long(time.time() * 1e6) while id in self._comments: id += 1 comment.comment_id = id notify(ObjectWillBeAddedEvent(comment, self, id)) self._comments[id] = comment comment.__parent__ = aq_base(self) # Record unique users who've commented (for logged in users only) commentator = comment.author_username if commentator: if not commentator in self._commentators: self._commentators[commentator] = 0 self._commentators[commentator] += 1 reply_to = comment.in_reply_to if not reply_to: # top level comments are in reply to the faux id 0 comment.in_reply_to = reply_to = 0 if not reply_to in self._children: self._children[reply_to] = LLSet() self._children[reply_to].insert(id) # Add the annotation if not already done annotions = IAnnotations(self.__parent__) if not ANNOTATION_KEY in annotions: annotions[ANNOTATION_KEY] = aq_base(self) # Notify that the object is added. The object must here be # acquisition wrapped or the indexing will fail. notify(ObjectCreatedEvent(comment)) notify(ObjectAddedEvent(comment.__of__(self), self, id)) notify(ContainerModifiedEvent(self)) return id # Dict API def __len__(self): return len(self._comments) def __contains__(self, key): return long(key) in self._comments def __getitem__(self, key): """Get an item by its long key """ try: comment_id = long(key) except ValueError: return return self._comments[comment_id].__of__(self) def __delitem__(self, key, suppress_container_modified=False): """Delete an item by its long key """ key = long(key) comment = self[key].__of__(self) commentator = comment.author_username notify(ObjectWillBeRemovedEvent(comment, self, key)) # Remove all children for child_id in self._children.get(key, []): # avoid sending ContainerModifiedEvent multiple times self.__delitem__(child_id, suppress_container_modified=True) # Remove the comment from _comments self._comments.pop(key) # Remove this comment as a child of its parent if not suppress_container_modified: parent = comment.in_reply_to if parent is not None: parent_children = self._children.get(parent, None) if parent_children is not None and key in parent_children: parent_children.remove(key) # Remove commentators if commentator and commentator in self._commentators: if self._commentators[commentator] <= 1: del self._commentators[commentator] else: self._commentators[commentator] -= 1 notify(ObjectRemovedEvent(comment, self, key)) if not suppress_container_modified: notify(ContainerModifiedEvent(self)) def __iter__(self): return iter(self._comments) def get(self, key, default=None): comment = self._comments.get(long(key), default) if comment is default: return default return comment.__of__(self) def keys(self): return self._comments.keys() def items(self): return [( i[0], i[1].__of__(self), ) for i in self._comments.items()] def values(self): return [v.__of__(self) for v in self._comments.values()] def iterkeys(self): return self._comments.iterkeys() def itervalues(self): for v in self._comments.itervalues(): yield v.__of__(self) def iteritems(self): for k, v in self._comments.iteritems(): yield ( k, v.__of__(self), )