class Poll(QonPersistent, Watchable): """A single Poll.""" persistenceVersion = 1 def __init__(self, creator, title, description, end_date, choices): """Create a poll. Choices is a list of strings.""" Watchable.__init__(self) # PollContainer (set by container when added) self.container = None # User who created poll self.creator = creator # Date created/modified self.date = datetime.utcnow() # End date self.end_date = end_date # Title/Description self.title = title self.__description = CompressedText(description) self.__cached_html = PersistentCache(self._update_html_cache) # Other settings and vote data self.__data = PollData(choices) # cache of PollData items self.__num_votes_cast = None # poll index within container self.__item_index = None def upgradeToVersion1(self): self.title = iso_8859_to_utf_8(self.title) def get_data(self): return self.__data def get_description(self): return self.__description.get_raw() def set_description(self, raw): self.__description.set_raw(raw) self.invalidate_html_cache() def has_voted(self, user): """True if user has already voted.""" return self.__data.has_voted(user) def user_vote(self, user): try: return self.__data.user_vote(user) except KeyError: return [] def user_vote_choices(self, user): """Return list of strings representing user's choices.""" choices = self.user_vote(user) pd = self.get_data() votes = [] for v in choices: votes.append(pd.choices[v]) return votes def user_votes_choices(self): """Returns dict of {user_id: [string1, string2, ...]} of user votes.""" vote_choice_data = self.__data.user_votes() vote_data = {} for user_id, choices in vote_choice_data.iteritems(): vote_data[user_id] = [self.__data.choices[i] for i in choices] return vote_data def is_active(self, as_of=None): """Return True if Poll is currently active.""" if not as_of: as_of = datetime.utcnow() is_active = as_of < self.end_date # check if we need to update our watchable_changed status if not is_active: if self.watchable_last_change() < self.end_date: self.watchable_changed() return is_active def cancel_poll(self, note=None): """Immediately ends poll, inserting note if any at beginning of description.""" self.end_date = datetime.utcnow() if note: self.set_description(note + '\n\n\n' + self.get_description()) # set changed when poll is canceled self.watchable_changed(self.end_date) def num_votes_cast(self): if not self.__num_votes_cast: self.__num_votes_cast = self.__data.num_votes_cast() return self.__num_votes_cast def valid_vote(self, user, choices): """Check various permissions to vote. If choices is not None, also checks validity of choices. Use can_vote to pre-check user's access before validity of choices. Returns: 1: if user is allowed to vote and choices includes required votes. 0: if choices are not valid -1: if user does not have access to vote -2: user has already voted -3: user has insufficient feedback score to vote -4: user has insufficnent bank to vote -5: user joined ned.com after poll started """ # general vote access if self.__data.vote_access == 'owner': if not self.container.ihb.is_owner(user): return -1 elif self.__data.vote_access == 'member': if not self.container.ihb.is_member(user) and not self.container.ihb.is_owner(user): return -1 # min karma if not self.enough_karma_to_vote(user): return -3 # karma bank cost if user.get_karma_bank_balance() < self.__data.karma_cost: return -4 # valid choices if choices and not self.__data.choices_valid(choices): return 0 # check if trying to vote for no choices if choices is not None and not choices: return 0 if not self.__data.voter_can_revise: if self.__data.has_voted(user): return -2 if not self.old_enough_to_vote(user): return -5 return 1 def can_pay_for_vote(self, user): if not user: return False return user.can_give_karma(self.get_data().karma_cost) def enough_karma_to_vote(self, user): return user.get_karma_score() >= self.get_data().min_karma def old_enough_to_vote(self, user): return user.get_user_data().member_since() <= self.date def can_vote(self, user): return self.valid_vote(user, choices=None) == 1 def can_see_intermediate_results(self, user): pd = self.get_data() vote_required = pd.vote_required_to_view has_voted = self.has_voted(user) if vote_required and not has_voted: return False if pd.intermediate_access == 'none': return False if pd.intermediate_access == 'all': return True if pd.intermediate_access == 'member': return self.container.ihb.is_member(user) or self.container.ihb.is_owner(user) if pd.intermediate_access == 'owner': return self.container.ihb.is_owner(user) return False def can_see_results(self, user): pd = self.get_data() if pd.results_access == 'all': return True if pd.results_access == 'member': return self.container.ihb.is_member(user) or self.container.ihb.is_owner(user) if pd.results_access == 'owner': return self.container.ihb.is_owner(user) return False def record_vote(self, user, choices): """Record a vote. Raises ValueError if valid_vote() check fails. Fails silently if poll is no longer open or if user can't pay. """ if not self.is_active(): return if self.valid_vote(user, choices) != 1: raise ValueError # see if new vote is identical to previous vote if self.has_voted(user): if choices == self.user_vote(user): return if not _ROLLOUT_TEST: # try to pay for item try: user.pay_karma(self.get_data().karma_cost) except NoKarmaToGive: return self.__data.record_vote(user, choices) self.__num_votes_cast = None # karma credit user.karma_activity_credit() def choice_list_to_choices(self, choice_list): """Convert a list of strings to their corresponding list of indices. Raises ValueError if there is an invalid item in choice_list. """ pd = self.get_data() choices = [] for item in choice_list: choices.append(pd.choices.index(item)) return choices def _set_item_index(self, index): """Used by PollContainer to set this item's index when added to container.""" self.__item_index = index def get_item_index(self): return self.__item_index def add_html_dependency(self, target): """Adds target as something self depends on for its HTML cache.""" self.__cached_html.add_dependency(target) def invalidate_html_cache(self): self.__cached_html.flush() def get_cached_html(self): return self.__cached_html.get().get_raw() def _update_html_cache(self): from qon.ui.blocks.wiki import rst_to_html return CompressedText(str(rst_to_html(self.get_description(), wiki=self.container.ihb.get_wiki(), container=self))) def disable_cache(self): self.__cached_html.disable_cache() def cache_disabled(self): return self.__cached_html.cache_disabled() def watchable_name(self): return self.title def watchable_changed(self, now=None): # tells container he has changed, too Watchable.watchable_changed(self, now) self.container.watchable_changed(now) def can_read(self, user): return self.container.ihb.can_read(user)
class Wiki(QonPersistent, Watchable): persistenceVersion = 1 _inactive_period = timedelta(days=7) _inactive_karma_discount = 1 def __init__(self, group): Watchable.__init__(self) self.group = group self.pages = OOBTree.OOBTree() self.__cached_recent_changes = PersistentCache(self._update_recent_changes_cache) self.index_page = WikiPage(self, name='index') self.pages['index'] = self.index_page self.index_page.versions[-1].set_raw(_default_index_page % \ dict(top_header='='*len(self.group.name), bottom_header='='*len(self.group.name), title=self.group.name, group_name=self.group.get_user_id() ) ) self.index_page.versions[-1].set_date(datetime.utcnow()) self.index_page.versions[-1].set_author(self.group.owners[0]) self.index_page.versions[-1].set_title(self.group.name) self.__uniques = OOBTree.OOBTree() self._create_default_pages() def upgradeToVersion1(self): self.__cached_recent_changes = PersistentCache(self._update_recent_changes_cache) self.version_upgrade_done() def new_page(self, name): page = WikiPage(self, name) name = clean_page_name(name) self.pages[name] = page self.watchable_changed(page.versions[-1].date) self._p_changed = 1 return page def remove_page(self, page): del self.pages[page.name] if self.__uniques.get(page.name): del self.__uniques[page.name] def get_page(self, page_name): """Return page or None.""" # jimc: not cleaning the page name # led to case differences wiping out # an old workspace page. name = clean_page_name(page_name) if self.pages.has_key(name): return self.pages[name] return None def decay_inactive_items(self): """Call this daily to decay karma of inactive items. Returns a list of items that were decayed. """ # XXX This could be refactored into an interface shared by # XXX Wiki.decay_inactive_items and Blog.decay_inactive_items decayed_items = [] decay_time = datetime.utcnow() - self._inactive_period for page_name, item in self.pages.iteritems(): if item.get_karma_score() > 0: if item.watchable_last_change() < decay_time: item.add_anon_karma(-self._inactive_karma_discount) decayed_items.append(item) return decayed_items def recent_changes(self): """Returns list of pages sorted newest first.""" return [page for date, page in self.recent_changes_with_date()] def _update_recent_changes_cache(self): bydate = [] for name, page in self.pages.items(): bydate.append((page.watchable_last_change(), page)) bydate.sort() bydate.reverse() return bydate def recent_changes_with_date(self): return self.__cached_recent_changes.get() def recent_edits_by_author(self, author, count=10): """Return count pages most-recently edited by author.""" edits = [] for name, page in self.pages.items(): version = page.latest_edit_by(author) if version: edits.append(page) edits = sort_list(edits, lambda x: x.watchable_last_change(), count=count) return edits def recent_comments_by_author(self, author, count=10): """Return count most-recent comments by author in this wiki. Returns list of (page, comment) tuples. """ bydate = [] for page in self.pages.values(): bydate.extend([(c.date, page, c) for c in page.get_comments() if c.author is author]) bydate.sort() comments = [(page, comment) for date, page, comment in bydate[-count:]] comments.reverse() return comments def num_pages(self): return len(self.pages) def num_active_pages(self, days=3): """Returns the number of pages that have been modified in the last X days Also returns the date of the latest page, in case it's useful""" cutoff_date = datetime.utcnow() - timedelta(days=days) # active_pages = [p for p in self.pages if p.watchable_last_change() > cutoff_date] // slower because it creates a new list # return len(active_pages) num = 0 latest_date = never for n, p in self.pages.items(): mod = p.watchable_last_change() if mod > cutoff_date: num += 1 if mod > latest_date: latest_date = mod return (num, latest_date) def search_pages(self, text_to_find): """Search pages for text. Slow and lame. Fix me, please.""" if text_to_find is None: return [] matching = [] text_to_find = text_to_find.strip().lower() for page_name in self.pages.keys(): page = self.pages[page_name] if page.name.find(text_to_find) != -1: matching.append(page) elif page.versions[-1].title.find(text_to_find) != -1: matching.append(page) elif page.versions[-1].get_raw().lower().find(text_to_find) != -1: matching.append(page) return matching def references_to(self, page, all=1, all_groups=1): """Return pages which refer to page. If all is false, returns first match.""" # if page has up-to-date inbound_references if hasattr(page, 'inbound_references') and page.inbound_references is not None: matching = [] sorted_refs = _sort_refs(page.inbound_references, self.group) for ref in sorted_refs: if all: matching.append(_ref_to_page(ref, self.group)) else: return _ref_to_page(ref, self.group) return matching # otherwise recompute inbound_references matching = [] for n, p in self.pages.iteritems(): if not p.outbound_references: continue for group, name in p.outbound_references: # For same-group references, group is None if group is None and (page.name == name): if all: matching.append(p) else: return p # now look through all other groups if all_groups: for group_id, group in get_group_database().root.iteritems(): wiki = group.get_wiki() for n, p in wiki.pages.iteritems(): if not p.outbound_references: continue for refgroup, refname in p.outbound_references: if refgroup is self.group and (page.name == refname): if all: matching.append(p) else: return p # convert to references format in_refs = [p.get_ref() for p in matching] # sort references for storage in_refs = _sort_refs(in_refs, self.group) # record inbound_references in page, since we just recomputed it all page.inbound_references = PersistentList(in_refs) # return sorted references matching = [_ref_to_page(ref, self.group) for ref in in_refs] return matching backlinks = references_to def is_orphan(self, page): """Returns true if page has no references to it, unless it's the index page.""" if page is self.index_page: return False if self.references_to(page, all=0): return False return True def highest_score_items(self, count=10): """Return list of pages with highest karma, highest first. Zeros are filted out.""" bykarma = [] for n, p in self.pages.items(): bykarma.append((p.get_karma_score(), p.watchable_last_change(), p)) bykarma.sort() items = [p for karma, date, p in bykarma[-count:] if karma > 0] items.reverse() return items def orphans(self): """Return pages which are not referred to.""" orphans = [] for name, page in self.pages.items(): if not self.references_to(page, all=0): orphans.append(page) bydate = [(p.watchable_modified_date(), p) for p in orphans] bydate.sort() bydate.reverse() return [p for d, p in bydate] def get_unique_name(self, page): """Return a unique name using page.name as prefix.""" cur_index = self.__uniques.get(page.name, 0) cur_index += 1 while self.pages.has_key(page.name + str(cur_index)): cur_index += 1 self.__uniques[page.name] = cur_index return page.name + str(cur_index) def watchable_name(self): return self.group.name + ' Workspace' def watchable_changed(self, now=None): Watchable.watchable_changed(self, now) # flush recent changes cache self.__cached_recent_changes.flush() # group changed, too self.group.watchable_changed(now) def watchable_modified_date(self): return self.watchable_last_change() def can_read(self, user): return self.group.can_read(user) def _create_default_pages(self): pass if 0: # There should only be a single Puntuation Help page: /help/Punctuation Help page = self.new_page('Punctuation Help') page.versions[-1].set_raw(_punc_help) page.versions[-1].set_date(datetime.utcnow()) page.versions[-1].set_author(self.group.owners[0]) page.versions[-1].set_title('Punctuation Help')
class WikiPage(QonPersistent, Watchable, qon.karma.HasKarma, IHasBlog): persistenceVersion = 4 def __init__(self, wiki, name=''): Watchable.__init__(self) qon.karma.HasKarma.__init__(self) self.wiki = wiki self.outbound_references = None self.inbound_references = None self.name = clean_page_name(name) self.versions = PersistentList() self.blog = Blog(self) self.locked_by_user = None self.__cached_html = PersistentCache(self._update_html_cache) self.__cached_html2 = PersistentCache(self._update_html2_cache) self.new_revision(force_new=1) def upgradeToVersion4(self): self.inbound_references = None self.version_upgrade_done() def upgradeToVersion3(self): self.__cached_html2 = PersistentCache(self._update_html2_cache) self.version_upgrade_done() def upgradeToVersion2(self): self.__cached_html = PersistentCache(self._update_html_cache) self.version_upgrade_done() def upgradeToVersion1(self): self.blog.ihb = self self.version_upgrade_done() def __repr__(self): return '<%s object at 0x%x: %s>' % (self.__module__ + '.' + self.__class__.__name__, id(self), self.name or "*no name*") def new_revision(self, set_date=True, author=None, title='', raw='', force_new=0): """Create a new revision for this page. Check to make sure that the new text is actually different from the latest revision. If it's not, then don't bother creating a new revision.""" if force_new or (self.versions[-1].get_raw() != raw): w = WikiVersion(page=self, author=author, title=title, raw=raw) if set_date: w.set_date(datetime.utcnow()) self.versions.append(w) self.watchable_changed(w.date) if author: author.karma_activity_credit() # before invalidating referring pages, we want to # update the html cache, which has the side effect # of updating the outbound references. self.invalidate_html_cache() unused_html = self.get_cached_html() # may seem useless for new pages, but we could be creating # a new page that was referred to from another page somewhere self._invalidate_referring_pages() self._p_changed = 1 def _invalidate_referring_pages(self, all_groups=0): """Invalidate HTML cache of pages which refer to this one.""" # we changed default behavior to not scan all groups when invalidating. # this means that cross-group links for new pages after this change # will not be accurate, until the page(s) linking to the new page # is itself modified. refs = self.wiki.references_to(self, all_groups=all_groups) for p in refs: p.invalidate_html_cache() def latest_edit_by(self, user): """Return latest edit by user, or None.""" rvers = self.versions[:] rvers.reverse() for version in rvers: if version.author is user: return version return None def get_comments(self): """Return list of comments (BlogItems).""" blog_item = self.blog.get_item(0) if blog_item: return blog_item.get_comments() else: return [] def get_revision(self, rev_id): """Return revision index rev_id or None.""" rev_id = max(0, rev_id) try: rev = self.versions[rev_id] except IndexError: rev = None return rev def revision_index(self, version): """Return revision index of version, or raise ValueError.""" return self.versions.index(version) def merge_revisions(self, base, old, new): """Merge the newest revision with older revision, off of base. Returns (merged text, exit_code) or None. Base may be -1 to signify empty text. Exit code is 0 for no conflicts, or 1 if conflicts exist. """ if len(self.versions) < 2: return None if base == -1: base_text = '' else: base_text = self.versions[base].get_raw() old_text = self.versions[old].get_raw() new_text = self.versions[new].get_raw() merger = Merger(base_text, old_text, new_text) merged = merger.merge('Revision %d' % base, 'Revision %d' % old, 'Revision %d' % new, ) if not merged: return None exit_code = 0 if merger.has_conflicts(): exit_code = 1 return (merged, exit_code) def watchable_name(self): #return self.wiki.group.name + ' ' + self.versions[-1].title return self.versions[-1].title or self.name def watchable_changed(self, now=None): # wiki changed, too Watchable.watchable_changed(self, now) self.wiki.watchable_changed(now) def watchable_modified_date(self): return self.watchable_last_change() def last_modified(self): sys.stderr.write('WARNING: using deprecated qon.wiki.WikiPage.last_modified.') return self.watchable_last_change() def who_has_lock(self): return self.locked_by_user def can_edit(self, user): """ A page is editable if either it's not locked by anybody, or if the requesting user is the one who holds the lock, or if the user is allowed to edit within the group""" # user must be logged in to edit if not user: return False # check lock if (self.locked_by_user) and (self.locked_by_user is not user) and (not self.can_manage(user)): return False if self.wiki.group.can_edit(user): return True return False def can_show(self): """Return False if this item should be suppressed due to feedback score.""" if self.get_karma_score() < qon.karma.min_karma_to_show: return False return True def can_lock(self, user): """ For now, let only a group owner lock/unlock a page. In the future, we may want to consider allowing the original page author to lock/unlock as well.""" return self.wiki.group.is_owner(user) def lock(self, user): if self.can_lock(user): self.locked_by_user = user def unlock(self, user): if self.can_lock(user): self.locked_by_user = None def can_get_karma_from(self, other): return other is not None # HTML cache methods def add_html_dependency(self, target): """Adds target as something self depends on for its HTML cache.""" self.__cached_html.add_dependency(target) self.__cached_html2.add_dependency(target) def invalidate_html_cache(self): self.__cached_html.flush() self.__cached_html2.flush() def get_cached_html(self): return self.__cached_html.get() def get_cached_html2(self): return self.__cached_html2.get() def _update_html_cache(self): v = self.versions[-1] html = v.raw_to_html(v.get_raw()) # take this opportunity to update the page's outbound references if hasattr(v, '_v_references'): self.set_outbound_references(v._v_references) del v._v_references return html def _update_html2_cache(self): v = self.versions[-1] html = v.raw_to_html(v.get_raw(), suppress_tooltip=1) return html def disable_cache(self): self.__cached_html.disable_cache() self.__cached_html2.disable_cache() def cache_disabled(self): return self.__cached_html.cache_disabled() or self.__cached_html2.cache_disabled() def get_ref(self): """Return a reference (group, page_name) to this page, for use in outbound/inbound references.""" return (self.wiki.group, self.name) def set_outbound_references(self, new_out_refs): """Record new outbound references.""" # filter non-existent cross-group page refs out of new_out_refs # this interacts with the change that no longer scans all groups # for references to new pages. if a cross-group link existed to a new # page, this method (pre-filtering) would have neglected to add the inbound # link from the cross-group reference, even if both pages had been edited. l = [] for r in new_out_refs: group_name, page_name = r if not group_name: l.append(r) else: page = _ref_to_page(r, self.wiki.group) if page: l.append(r) new_out_refs = l # get old outbound refs old_out_refs = self.outbound_references or [] # get two lists: items that used to be outbound references but # are no longer (old_not_new), and new outbound references that # weren't there before (new_not_old) old_not_new, new_not_old = xor_lists(old_out_refs, new_out_refs) # pre-fill reference to me me_ref = self.get_ref() # invalidate inbound references of pages that we no longer refer to for ref in old_not_new: page = _ref_to_page(ref, self.wiki.group) if page: # added by Alex page.remove_inbound_reference(me_ref) # add inbound references for pages we've added outbound links to for ref in new_not_old: page = _ref_to_page(ref, self.wiki.group) if page: # could be ref to new page page.add_inbound_reference(me_ref) # record new outbound references self.outbound_references = PersistentList() self.outbound_references.extend(new_out_refs) def remove_inbound_reference(self, ref): if self.inbound_references is not None: if ref in self.inbound_references: self.inbound_references.remove(ref) def add_inbound_reference(self, ref): if self.inbound_references is not None: if ref not in self.inbound_references: self.inbound_references.append(ref) # IHasBlog methods not implemented by other base classes def can_manage(self, user): """Who can manage this blog? Group owners.""" return self.wiki.group.is_owner(user) def can_read(self, user): return self.wiki.can_read(user) def can_delete_item(self, item): """Can't delete item 0, which holds page comments.""" if self.blog.get_item(0) is item: return False return True def can_create_item(self): """Users aren't allowed to create new topics in wiki pages.""" return False def is_accepted(self): return self.wiki.group.is_accepted() def get_owners(self): return self.wiki.group.get_owners() def is_owner(self, user): return self.wiki.group.is_owner(user) def get_title(self): # this is here and in BlogItem return self.versions[-1].title or self.name def get_blog(self): return self.blog def get_wiki(self): return self.wiki def get_name(self): return self.name def get_all_owners(self): return self.get_owners() def get_all_blogs(self): return [self.blog] def get_member_list(self): return self.wiki.group.get_member_list()
class BlogItem(QonPersistent, qon.karma.HasKarma, Watchable, HasComments): persistenceVersion = 6 def __init__(self, blog, author, title, summary, main='', dont_watch=0): qon.karma.HasKarma.__init__(self) Watchable.__init__(self) HasComments.__init__(self) self.blog = blog self.__deleted = 0 self.author = author self.title = title self.__summary = CompressedText(summary) if main: self.__main = CompressedText(main) else: self.__main = None self.__cached_html_summary = PersistentCache(self._update_html_cache) # history is a string, showing diffs as items are edited self.history = None self.date = datetime.utcnow() self.modified = None self.parent_blogitem = None # for comment, will point to parent blogitem upon add_comment(); otherwise None if dont_watch: """Comments aren't watchable.""" self.not_watchable = 1 else: # for watchable items only (not comments) self.__user_access = ConflictAvoidingOOBTree() def upgradeToVersion6(self): self.history = None def upgradeToVersion5(self): self.title = iso_8859_to_utf_8(self.title) def upgradeToVersion4(self): self.__deleted = self.deleted del self.deleted # upgrade HasComments HasComments._upgradeToVersion1(self) # elim __main's CompressedText if not self.get_main(): self.__main = None self.version_upgrade_done() def upgradeToVersion3(self): if not self.not_watchable: self.__user_access = ConflictAvoidingOOBTree() # get rid of old Readers attribute if hasattr(self, '_BlogItem__readers'): del self.__readers self.version_upgrade_done() def upgradeToVersion2(self): # do self.parent_blogitem 2005-03-17 self.parent_blogitem = None comments = HasComments.get_all_comments(self) for item in comments: item.parent_blogitem = self # point comments to point to itself self.version_upgrade_done() def upgradeToVersion1(self): # compress text self.__summary = CompressedText(self.summary) self.__main = CompressedText(self.main) del self.summary del self.main # create cache self.__cached_html_summary = PersistentCache(self._update_html_cache) self.version_upgrade_done() def is_deleted(self, use_attr=False): if use_attr or not self.parent_blogitem: return self.__deleted return self.parent_blogitem.get_comment_flags(self) def set_deleted(self, val): self.__deleted = bool(val) # cache/copy deleted attribute into paren't comment_flags # for fast lookup to avoid db reading each comment if self.parent_blogitem: self.parent_blogitem.set_comment_flags(self, bool(val)) # alex added so that recent items cache for parent blog gets marked as dirty self.watchable_changed() def set_deleted_note(self, note): self._deleted_note = note def get_deleted_note(self): return getattr(self, '_deleted_note', None) def can_read(self, user): return self.blog.ihb.can_read(user) def can_edit(self, user): return (self.author is user) or self.can_manage(user) def can_delete(self, user): """Return True if user can delete this item.""" # managers can always delete if self.can_manage(user): return True # authors can only delete if there are no undeleted comments if self.num_comments() == 0: return self.author is user return False def can_manage(self, user): """Return True if user can manage this item (usually a group owner).""" return self.blog.ihb and self.blog.ihb.can_manage(user) def can_show(self): """Return False if this item should be suppressed due to feedback score.""" if self.get_karma_score() < qon.karma.min_karma_to_show: return False if self.author.get_karma_score() < qon.karma.min_author_karma: return False return True def why_cant_show(self): """Only really useful when can_show()==False. Returns the reason for an item not being shown. Return value is ('item' | 'user', fbscore)""" if self.get_karma_score() < qon.karma.min_karma_to_show: return ('item', self.get_karma_score()) if self.author.get_karma_score() < qon.karma.min_author_karma: return ('user', self.author.get_karma_score()) return () def last_modified(self, consider_comments=True): if consider_comments: # this Watchable changes whenever a comment is added dt = self.watchable_last_change() if dt is never: dt = self.modified or self.date return dt else: return self.modified or self.date def new_comment(self, author, title, summary, main=''): """Create a new comment item and return it.""" comment = BlogItem(blog=self.blog, author=author, title=title, summary=summary, main=main, dont_watch=1) # Check to see if this new comment is a duplicate # of the previous comment. If so, ignore it, since # it's probably unintended, and just return None. if HasComments.is_duplicate(self, comment): comment = None else: self.add_comment(comment) # avoid 'bogus new to me' comment.read_item(author, datetime.utcnow()) author.karma_activity_credit() return comment def notify_karma_changed(self): """Called by HasKarma.add_karma.""" # also delegate to watchable_changed # self.watchable_changed() # removed by alex to keep blogitems from boldfacing when left feedback if self.blog and hasattr(self.blog, 'notify_karma_changed'): self.blog.notify_karma_changed() def add_comment(self, comment): comment.parent_blogitem = self HasComments.add_comment(self, comment) self.watchable_changed(comment.date) def watchable_name(self): return self.title def watchable_changed(self, now=None): # tells blog he has changed, too Watchable.watchable_changed(self, now) if self.blog: self.blog.watchable_changed(now) def watchable_modified_date(self): return self.last_modified() def can_get_karma_from(self, other): return other is not self.author def get_summary(self): return self.__summary.get_raw() def set_summary(self, raw): self._log_summary_change() self.__summary.set_raw(raw) self.invalidate_html_cache() def get_main(self): if not self.__main: return '' return self.__main.get_raw() def set_main(self, raw): if not self.__main: self.__main = CompressedText() self.__main.set_raw(raw) def _log_summary_change(self): import qon.log if hasattr(self.blog.ihb, 'get_user_id'): qon.log.edit_info('SetSummary\t%s\n%s' % (self.blog.ihb.get_user_id(), self.get_summary())) else: qon.log.edit_info('SetSummary2\t%s\n%s' % (self.blog.ihb.get_name(), self.get_summary())) # HTML cache methods def add_html_dependency(self, target): """Adds target as something self depends on for its HTML cache.""" self.__cached_html_summary.add_dependency(target) def invalidate_html_cache(self): self.__cached_html_summary.flush() def get_cached_html(self): return self.__cached_html_summary.get().get_raw() def _update_html_cache(self): from qon.ui.blocks.wiki import rst_to_html return CompressedText(str(rst_to_html(self.get_summary(), wiki=self.blog.ihb.get_wiki(), container=self))) def disable_cache(self): self.__cached_html_summary.disable_cache() def cache_disabled(self): return self.__cached_html_summary.cache_disabled() def read_item(self, user, now=None): """Notice that user has accessed this item. If we are a comment, we pass this on to our parent item, to catch up based on comment's submission date. """ if not hasattr(self, "_BlogItem__user_access"): # we don't keep track of user access -- we're a comment, # so just pass this on to our parent. if self.parent_blogitem: return self.parent_blogitem.read_item(user, self.date) return if not user: return now = now or datetime.utcnow() user_oid = unpack_oid(user._p_oid) dt, count = self.__user_access.get(user_oid, (never, 0)) now = max(now, dt) # increment hit count self.__user_access[user_oid] = (now, count + 1) def has_read_item(self, user, updated=None): if not hasattr(self, "_BlogItem__user_access"): return False if not user: return True dt, count = self._get_user_access(user) if count == 0: return False if updated: return dt >= updated else: return True def last_read(self, user): """Return datetime when user last read this item.""" dt, count = self._get_user_access(user) return dt def _get_user_access(self, user): if not hasattr(self, "_BlogItem__user_access"): return (never, 0) user_oid = unpack_oid(user._p_oid) return self.__user_access.get(user_oid, (never, 0)) def item_views(self): """Returns (number of views, number of readers).""" views = 0 readers = 0 for user_oid, (dt, count) in self.__user_access.iteritems(): if count > 0: # alex added if on 2006-10-09 readers += 1 views += count return (views, readers) def get_pageview_counts_per_user(self): """return a list of (user, counts)""" return_list = [] for user_oid, (dt, count) in self.__user_access.iteritems(): if count > 0: return_list.append((get_oid(pack_oid(user_oid)), count)) return return_list def get_title(self): # this is here and in WikiPage return self.title