def save(self, editor, newtext, rev, **kw): request = self.request username = request.user.name pagename = editor.page_name if grouppage_autocreate and username == pagename: # create group pages when a user saves his own homepage for page in grouppages: grouppagename = "%s/%s" % (username, page) grouppage = Page(request, grouppagename) if not grouppage.exists(): text = """\ #acl %(username)s:read,write,delete,revert * %(username)s """ % locals() editor = PageEditor(request, grouppagename) editor._write_file(text) parts = pagename.split('/') if len(parts) == 2: subpage = parts[1] if subpage in grouppages and not self.admin(pagename): return False # No problem to save if my base class agrees return Permissions.save(self, editor, newtext, rev, **kw)
def save(self, editor, newtext, rev, **kw): self.open_spamdb(editor.request) score = self.sbayes.score(newtext) save_result = Permissions.save(self, editor, newtext, rev, **kw) if save_result and editor.page_name in ("HamPages", "SpamPages"): self.retrain(editor.request) return save_result if score < self.ham_cutoff: # File checks out spamwise. Return the default save result. return save_result if not save_result: return save_result # Now the fun begins. We scored the page and found that it is # either possible or probable spam. However, we saved it. (We # wanted to do that so we would have a copy to score later.) We # need to revert the save and send the URL of the suspect page # to the users in AdminGroup. To make matters worse, the user # may have write permission but not revert permission. So we # have to force the reversion. That requires a bit of # cut-n-paste from wikiaction.do_revert. self.force_revert(editor.page_name, editor.request)
def save(self, editor, newtext, rev, **kw): self.open_spamdb(editor.request) score = self.sbayes.score(newtext) save_result = Permissions.save(self, editor, newtext, rev, **kw) if save_result and editor.page_name in ("HamPages", "SpamPages"): self.retrain(editor.request) return save_result if score < self.ham_cutoff: return save_result if not save_result: return save_result self.force_revert(editor.page_name, editor.request)
def admin(self, pagename): try: request = self.request has_member = request.dicts.has_member username = request.user.name pagename = request.page.page_name mainpage = pagename.split('/')[0] if username == mainpage and has_member('AutoAdminGroup', username): return True groupname = "%s/AdminGroup" % mainpage if has_member(groupname, username) and has_member('AutoAdminGroup', groupname): return True except AttributeError: pass # when we get called from xmlrpc, there is no request.page return Permissions.__getattr__(self, 'admin')(pagename)
def admin(self, pagename): try: request = self.request groups = request.groups username = request.user.name pagename = request.page.page_name mainpage = pagename.split('/')[0] if username == mainpage and username in groups.get(u'AutoAdminGroup', []): return True group_name = "%s/AdminGroup" % mainpage if (username in groups.get(group_name, []) and group_name in groups.get(u'AutoAdminGroup', [])): return True except AttributeError: pass # when we get called from xmlrpc, there is no request.page return Permissions.__getattr__(self, 'admin')(pagename)
def admin(self, pagename): try: request = self.request groups = request.groups username = request.user.name pagename = request.page.page_name mainpage = pagename.split('/')[0] if username == mainpage and username in groups.get( u'AutoAdminGroup', []): return True group_name = "%s/AdminGroup" % mainpage if (username in groups.get(group_name, []) and group_name in groups.get(u'AutoAdminGroup', [])): return True except AttributeError: pass # when we get called from xmlrpc, there is no request.page return Permissions.__getattr__(self, 'admin')(pagename)
class SecurityPolicy(Permissions): """ Extend the default security policy with antispam feature """ def save(self, editor, newtext, rev, **kw): BLACKLISTPAGES = ["BadContent", "LocalBadContent"] if not editor.page_name in BLACKLISTPAGES: request = editor.request # Start timing of antispam operation request.clock.start('antispam') blacklist = [] latest_mtime = 0 for pn in BLACKLISTPAGES: do_update = ( pn != "LocalBadContent" and request.cfg.interwikiname != 'MoinMaster' ) # MoinMaster wiki shall not fetch updates from itself blacklist_mtime, blacklist_entries = getblacklist( request, pn, do_update) blacklist += blacklist_entries latest_mtime = max(latest_mtime, blacklist_mtime) if blacklist: invalid_cache = not getattr(request.cfg.cache, "antispam_blacklist", None) if invalid_cache or request.cfg.cache.antispam_blacklist[ 0] < latest_mtime: mmblcache = [] for blacklist_re in blacklist: try: mmblcache.append(re.compile(blacklist_re, re.I)) except re.error, err: logging.error( "Error in regex '%s': %s. Please check the pages %s." % (blacklist_re, str(err), ', '.join(BLACKLISTPAGES))) request.cfg.cache.antispam_blacklist = (latest_mtime, mmblcache) from MoinMoin.Page import Page oldtext = "" if rev > 0: # rev is the revision of the old page page = Page(request, editor.page_name, rev=rev) oldtext = page.get_raw_body() newset = frozenset(newtext.splitlines(1)) oldset = frozenset(oldtext.splitlines(1)) difference = newset - oldset addedtext = kw.get('comment', u'') + u''.join(difference) for blacklist_re in request.cfg.cache.antispam_blacklist[1]: match = blacklist_re.search(addedtext) if match: # Log error and raise SaveError, PageEditor should handle this. _ = editor.request.getText msg = _( 'Sorry, can not save page because "%(content)s" is not allowed in this wiki.' ) % { 'content': wikiutil.escape(match.group()) } logging.info(msg) raise editor.SaveError(msg) request.clock.stop('antispam') # No problem to save if my base class agree return Permissions.save(self, editor, newtext, rev, **kw)
def __init__(self, user): Permissions.__init__(self, user) self.sbayes = None