def group_set_owners_by_user_id(user, group, owner_ids): if not group_can_manage(user, group): raise AccessError user_db = get_user_database() users = [] seen = {} for _id in owner_ids: if not seen.has_key(_id): u = user_db.get_user(_id) if u: users.append(u) seen[_id] = 1 get_group_database().set_group_owners(group, users)
def add_member(self, user): """Add user as member. Removes user from invited_users list if applicable. Calls self.notify_members_changed if it exists. """ if user in self.invited_users: del self.invited_users[user] get_group_database().invitations.remove_all_user_invitations(user, self) if isinstance(user, HasEmail): for email in user.email_list(): if email in self.invited_users: del self.invited_users[email] self.__members.add_member(user) self.notify_members_changed()
def internal_method(self, login_bundle, data): """For internal use only.""" """ data: struct method: string params: struct """ cur_user = self._check_login(login_bundle) # sanity check args before proceeding in order to provide # sanitzed error message if type(data) is not dict or not data.has_key('method') or not data.has_key('params'): raise xmlrpclib.Fault(FAULT_NO_ACCESS, 'Invalid data.') if type(data['params']) is not dict: raise xmlrpclib.Fault(FAULT_NO_ACCESS, 'Invalid data.') # check api-admin membership group = get_group_database().get_group('api-admin') if group and not group.is_member(cur_user): raise xmlrpclib.Fault(FAULT_INVALID_LOGIN, 'User must be member of http://www.ned.com/group/api-admin/') if data['method'] in ['_cookie_login']: return self._internal_cookie_to_login(data['params']) raise xmlrpclib.Fault(FAULT_NO_ACCESS, 'Invalid access.')
def check_ip_monitor(user, ipaddress): """ Called from ui whenever a user signs in. Notifies us if a user signs in under an IP address she has never signed in under before, and that IP address is in the monitored list. The list of monitored IPs and the emails of folks to be notified are kept in a workspace page in sitedev called admin_ip_monitor. """ # first check if the user is signing in under a new ip address. # if not, don't bother continuing ips = user.get_ip_addresses().iteritems() for k,v in ips: if ipaddress == k: return # now get the special page try: fullpage = get_group_database()['sitedev'].get_wiki().pages['admin_ip_monitor'].versions[-1].get_raw() except: return # and parse it for ips and emails ips_text, emails_text = fullpage.split("Notify:") ips = re.compile('(\d+\.\d+\.\d+\.\d+)').findall(ips_text) emails = re.compile('([a-zA-Z0-9_\-]+@[a-zA-Z0-9_\-]+\.[a-zA-Z0-9_\-]+)').findall(emails_text) # if the ipaddress is being monitored, then send out an alert email import smtplib if (len(emails)>0) and (ipaddress in ips): message = '%s (%s) signed in from %s' % (user.get_user_id(), user.display_name(), ipaddress) sendmail("IP Monitor Alert", message, emails)
def get_page_template(name, format): """Attempt to fetch contents of page 'name' from template group. format must be 'text' or 'html'. Returns None if page does not exist or is empty. If 'html' format is requested, returns htmltext object. By convention, template page names start with '_live_tmpl_' and this function will prepend this prefix automatically. """ _template_prefix = '_live_tmpl_' if not name.startswith(_template_prefix): name = _template_prefix + name try: page = get_group_database()['sitedev'].wiki.pages[name] except KeyError: return None # check for empty raw text in case page has been cleared; # cached html will never be fully empty raw = page.versions[-1].get_raw() if not raw: return None if format == 'html': return quixote.html.htmltext(page.get_cached_html()) else: return raw
def karma_given_by(user): """Return a list of all HasKarma items which have received karma from user. """ from qon.base import get_group_database, get_user_database results = [] def check_karma(item): p = item.karma_points_from(user) if p: results.append((item, p)) def check_blog_karma(blog): for i in blog.get_items(): check_karma(i) for c in i.get_all_comments(): check_karma(c) for id, u in get_user_database().root.iteritems(): check_karma(u) check_blog_karma(u.blog) for id, g in get_group_database().root.iteritems(): check_blog_karma(g.blog) for id, page in g.wiki.pages.iteritems(): check_karma(page) check_blog_karma(page.blog) return results
def group_purge_unsponsored(): """Remove groups which have not been sponsored by qon.group.Group._time_to_sponsor.""" pending_groups = get_group_database().mod_queue.pending_items() now = datetime.utcnow() for group in pending_groups: if now - group.date > qon.group.Group._time_to_sponsor: group_purge(group)
def most_read_items(self): """Return list of most-widely-read discussion items and user news items.""" _days_cutoff = 3 if self.__most_read_items: return self.__most_read_items items = [] # discussions for group_id, group in get_group_database().root.iteritems(): items.extend(group.blog.items_with_reader_counts()) get_connection().cacheGC() # user news for user_id, user in get_user_database().root.iteritems(): items.extend(user.blog.items_with_reader_counts()) get_connection().cacheGC() # weed out items that haven't been updated in the last 3 days # (or else this list doesn't change much, and shows very old content) cutoff_date = datetime.utcnow() - timedelta(days=_days_cutoff) items = [i for i in items if not i[1].is_deleted() and i[1].last_modified(consider_comments=True) > cutoff_date] items.sort() items = items[-self._watched_items_count:] items.reverse() self.__most_read_items = items return items
def user_new(email): # create user and get the initial password in plaintext. email = _unicode_fix(email) user, password = get_user_database().new_user_from_email(email) transaction_commit(None, 'NewUser') # moved from user.ptl/NewUserForm.commit() # send email e = url_quote(email) p = url_quote(password) s = url_quote("Sign in") message = _(_new_user_message) % dict(email=email, password=password, auto_login_url=messages.login_url + "?email=" + e + "&password="******"&submit-login="******"&from_url=Y") extra_headers = ['Content-Type: text/html'] # because of the href sendmail("Welcome to ned.com", message, [email], extra_headers=extra_headers) # send pm using _live_tmpl_pm_new_user in sitedev as the template template_text = qon.util.get_page_template('pm_new_user', format='text') if template_text: message_anon_send(user, "Welcome to ned.com!", template_text, True) # add orientation page to all new users' watch lists try: orientation = get_group_database()['help'].get_wiki().pages['start_here'] user.get_watch_list().watch_item(orientation) except: pass qon.search.searchengine.notify_new_user(user) return (email, message)
def resolve_page_name(refname): """Return (Group, clean_name, skip). Group is None if missing or invalid. Skip is non-zero if refname was /group-name/Page Name, and points to the second slash.""" # is it a full name: /group_name/page name if refname.startswith('/'): end = refname.find('/', 1) if end != -1: group_name = refname[1:end].lower() skip_index = end + 1 page_name = clean_page_name(refname[skip_index:]) else: page_name = clean_page_name(refname) group_name = None skip_index = 0 # look up group by name if group_name: try: group = get_group_database()[group_name] except KeyError: group = None else: group = None return (group, page_name, skip_index)
def delete_all_tags(i_am_sure=False): if not i_am_sure: return # globally del get_tags_database().tags get_tags_database().tags = OOBTree.OOBTree() # global reverse mapping del get_tagged_item_database().root get_tagged_item_database().root = OOBTree.OOBTree() # for each group, ensure tags are empty for group_id, group in get_group_database().root.iteritems(): del group.tags group.tags = OOBTree.OOBTree() # for user_id, user in get_user_database().root.iteritems(): if hasattr(user, "tags"): if len(user.tags) > 0: del user.tags user.tags = OOBTree.OOBTree() # get_transaction().commit()
def get_wiki(self): # users don't have wikis, return Community-General's wiki db = get_group_database() g = db.root.get('community-general', None) if g: return g.wiki else: return None
def upgrade_invalidate_html_caches(): """Invalidate all HTML caches""" from base import get_group_database group_db = get_group_database() for g in group_db.root.values(): for p in g.wiki.pages.values(): p.invalidate_html_cache() transaction_commit()
def references_to(self, page, all=1, all_groups=1): """Return pages which refer to page. If all is false, returns first match.""" # if page has up-to-date inbound_references if hasattr(page, 'inbound_references') and page.inbound_references is not None: matching = [] sorted_refs = _sort_refs(page.inbound_references, self.group) for ref in sorted_refs: if all: matching.append(_ref_to_page(ref, self.group)) else: return _ref_to_page(ref, self.group) return matching # otherwise recompute inbound_references matching = [] for n, p in self.pages.iteritems(): if not p.outbound_references: continue for group, name in p.outbound_references: # For same-group references, group is None if group is None and (page.name == name): if all: matching.append(p) else: return p # now look through all other groups if all_groups: for group_id, group in get_group_database().root.iteritems(): wiki = group.get_wiki() for n, p in wiki.pages.iteritems(): if not p.outbound_references: continue for refgroup, refname in p.outbound_references: if refgroup is self.group and (page.name == refname): if all: matching.append(p) else: return p # convert to references format in_refs = [p.get_ref() for p in matching] # sort references for storage in_refs = _sort_refs(in_refs, self.group) # record inbound_references in page, since we just recomputed it all page.inbound_references = PersistentList(in_refs) # return sorted references matching = [_ref_to_page(ref, self.group) for ref in in_refs] return matching
def upgrade_refresh_html_caches(): """Refresh all html caches.""" from base import get_group_database group_db = get_group_database() for g in group_db.root.values(): for p in g.wiki.pages.values(): html = p.get_cached_html() transaction_commit()
def group_atom_id(self, login_bundle, group_id): """Given a short group id (e.g. 'community-general') return the corresponding group's atom_tag, which is required by other API calls. """ cur_user = self._check_login(login_bundle) group = get_group_database().get_group(group_id) if group: return atom_id(group) return None
def get_all_groups(self, login_bundle): """Return array of all groups (atom_tags). List may include groups that are not active or not accessible by user. """ user = self._check_login(login_bundle) groups = [] for group_id, group in get_group_database().root.iteritems(): groups.append(atom_id(group)) return groups
def add_group_to_wiki_page_blog(): """Add group reference to each instance of Blog in each WikiPage""" from base import get_group_database group_db = get_group_database() for g in group_db.root.values(): for p in g.wiki.pages.values(): if not p.blog.ihb: p.blog.ihb = g
def recalculate_recent_activity(self, user): """EXPENSIVE recalculation of all recent user activity.""" import blog active_groups = get_group_database().active_groups() users = get_user_database().root.values() group_blogs = [g.blog for g in active_groups] group_wikis = [g.wiki for g in active_groups] user_blogs = [u.blog for u in users] # recent blog items recent = PersistentList([(i.date, i) for i in blog.recent_items_by_author(group_blogs, user, count=self._recent_count)]) recent.reverse() self.__recent_blog_items = recent # recent blog comments recent = PersistentList([(i.date, i, parent) for i, parent in blog.recent_comments_by_author(group_blogs, user, count=self._recent_count)]) recent.reverse() self.__recent_blog_comments = recent # recent wiki pages pages = [] for wiki in group_wikis: pages.extend([(p.watchable_last_change(), p) for p in wiki.recent_edits_by_author(user, count=self._recent_count)]) pages.reverse() self.__recent_wiki_pages = PersistentList(pages) del pages # recent wiki comments comments = [] for wiki in group_wikis: comments.extend([(c.date, c, p) for p, c in wiki.recent_comments_by_author(user, count=self._recent_count)]) comments.reverse() self.__recent_wiki_comments = PersistentList(comments) del comments # recent personal comments recent = PersistentList([(i.date, i, parent) for i, parent in blog.recent_comments_by_author(user_blogs, user, count=self._recent_count)]) recent.reverse() self.__recent_personal_comments = recent del users del active_groups del group_blogs del user_blogs
def upgrade_inbound_refs(): """Recreate all pages' inbound_references.""" from base import get_group_database group_db = get_group_database() for g in group_db.root.values(): for p in g.wiki.pages.values(): refs = g.wiki.references_to(p) transaction_commit() print g.get_user_id()
def upgrade_invalidate_outbound_refs(): """Invalidate all outbound references due to type change.""" from base import get_group_database group_db = get_group_database() for g in group_db.root.values(): for p in g.wiki.pages.values(): p.outbound_references = None p.invalidate_html_cache() transaction_commit()
def upgrade_wiki_raw_text_format(): """Compress all old revisions.""" from base import get_group_database import transaction group_db = get_group_database() for g in group_db.root.values(): for p in g.wiki.pages.values(): for v in p.versions: v.set_raw(v.get_raw()) transaction.commit(True)
def total_group_pms (self): """ Get the number of group pms, and the number of recipients of all group pms group_pms, group_pm_recipients = self.total_group_pms() """ group_pms, num_recipients = (0,0) for user_id, group in get_group_database().root.iteritems(): group_pms += group.get_total_group_pms() num_recipients += group.get_total_group_pms() * group.get_num_members() return (group_pms, num_recipients)
def is_member(self, user_or_email, slow=False): user = get_user_database().resolve_user(user_or_email) if not user: # user not in databse return False if slow: return self.__members.is_member(user) else: # group_db can do a faster membership check than my own list of members return self in get_group_database().member_groups(user)
def group_create(user, **kwargs): """Create a group. See qon.group_db.GroupDB.create_group for arguments.""" group_db = get_group_database() if not user or not group_db.can_create_group(user): raise AccessError group = get_group_database().create_group(**kwargs) if group: group_begin_sponsorship(group) if 0: blog_new_item(group.blog, author=kwargs['owner'], title=_(_new_forum_title), summary=_(_new_forum_summary)) transaction_commit(None, 'CreateGroup') # moved from group/form.ptl/NewGroupForm.commit() qon.search.searchengine.notify_new_group(group) return group
def add_invitation(self, user_or_email, inviter): """Invite user_or_email to join group. Note that invitations can be to any object or to e-mails, though usually here they are e-mails. """ # don't add invitation if user_or_email is already a member or an owner user_object = get_user_database().resolve_user(user_or_email) if user_object: if self.is_member(user_object): return if hasattr(self, 'is_owner'): # use hasattr because is_owner is not a HasMembership method if self.is_owner(user_object): return if self.__members.can_invite(inviter): self.invited_users[user_or_email] = inviter # tell GroupDB about the invitation get_group_database().invitations.add_invitation(user_or_email, self) else: raise NotEnoughPrivileges
def sole_owned_group_members(self, user): """Return list of group members that are solely owned by user.""" solo = [] groups = get_group_database().owned_groups(user) for group in groups: if self.is_member(group): found_other_member = 0 for owner in group.owners: if owner != user and self.is_member(owner): found_other_member = 1 break if not found_other_member: solo.append(group) return solo
def group_decay_inactive_karma(): """Decay karma of inactive blog items.""" decayed_items = [] for group_id, group in get_group_database().root.iteritems(): decayed_items.extend(group.blog.decay_inactive_items()) decayed_items.extend(group.wiki.decay_inactive_items()) transaction_commit(None, 'DecayInactive') # notify the search engine that the items' karma changed. # The reason we do the notification here, rather than in blog.decay_inactive_items() itself. # is that it's nice keeping all the search engine notifications localized # to this file, rather than sprinkled in the kernel. for item in decayed_items: qon.search.searchengine.notify_karma_given(item)
def calc_karma_total_blogitems(self): """Compute karma totals for all discussion topics and comments.""" def process_topic(item): totals['topic_plus'] += item.karma_plus_received() totals['topic_minus'] += item.karma_minus_received() def process_comment(comment): totals['comment_plus'] += comment.karma_plus_received() totals['comment_minus'] += comment.karma_minus_received() # capture comment karma for user: a positive total and a negative total author_id = comment.author.get_user_id() score = comment.get_karma_score() if score != 0: # get current totals _tot = user_comment_karma.get(author_id, (0, 0)) if score > 0: user_comment_karma[author_id] = (_tot[0] + score, _tot[1]) elif score < 0: user_comment_karma[author_id] = (_tot[0], _tot[1] + score) totals = dict(topic_plus=0, topic_minus=0, comment_plus=0, comment_minus=0) user_comment_karma = {} for user_id, group in get_group_database().root.iteritems(): for item in group.blog.get_all_items(): process_topic(item) for comment in item.get_all_comments(): process_comment(comment) get_connection().cacheGC() # personal news for user_id, user in get_user_database().root.iteritems(): for item in user.get_blog().get_all_items(): process_topic(item) for comment in item.get_all_comments(): process_comment(comment) get_connection().cacheGC() self.__karma_stats.total_topic = (totals['topic_plus'], totals['topic_minus']) self.__karma_stats.total_comment = (totals['comment_plus'], totals['comment_minus']) qon.log.stats_info('KarmaStats\ttopic:%d,%d' % self.__karma_stats.total_topic) qon.log.stats_info('KarmaStats\tcomment:%d,%d' % self.__karma_stats.total_comment) self.__karma_stats.user_content_totals = OOBTree.OOBTree(user_comment_karma)
def karma_total_page(self): """Returns (total positive, total negative).""" if self.__karma_stats.total_page is not None: return self.__karma_stats.total_page total_plus = 0 total_minus = 0 for user_id, group in get_group_database().root.iteritems(): for name, page in group.wiki.pages.iteritems(): total_plus += page.karma_plus_received() total_minus += page.karma_minus_received() self.__karma_stats.total_page = (total_plus, total_minus) qon.log.stats_info('KarmaStats\tpage:%d,%d' % self.__karma_stats.total_page) return self.__karma_stats.total_page