Ejemplo n.º 1
0
    def save(self, editor, newtext, rev, **kw):
        request = self.request
        username = request.user.name
        pagename = editor.page_name

        if grouppage_autocreate and username == pagename:
            # create group pages when a user saves his own homepage
            for page in grouppages:
                grouppagename = "%s/%s" % (username, page)
                grouppage = Page(request, grouppagename)
                if not grouppage.exists():
                    text = """\
#acl %(username)s:read,write,delete,revert
 * %(username)s
""" % locals()
                    editor = PageEditor(request, grouppagename)
                    editor._write_file(text)

        parts = pagename.split('/')
        if len(parts) == 2:
            subpage = parts[1]
            if subpage in grouppages and not self.admin(pagename):
                return False

        # No problem to save if my base class agrees
        return Permissions.save(self, editor, newtext, rev, **kw)
Ejemplo n.º 2
0
    def save(self, editor, newtext, rev, **kw):
        request = self.request
        username = request.user.name
        pagename = editor.page_name

        if grouppage_autocreate and username == pagename:
            # create group pages when a user saves his own homepage
            for page in grouppages:
                grouppagename = "%s/%s" % (username, page)
                grouppage = Page(request, grouppagename)
                if not grouppage.exists():
                    text = """\
#acl %(username)s:read,write,delete,revert
 * %(username)s
""" % locals()
                    editor = PageEditor(request, grouppagename)
                    editor._write_file(text)

        parts = pagename.split('/')
        if len(parts) == 2:
            subpage = parts[1]
            if subpage in grouppages and not self.admin(pagename):
                return False

        # No problem to save if my base class agrees
        return Permissions.save(self, editor, newtext, rev, **kw)
Ejemplo n.º 3
0
def deletecomment(macro, delkey, delpasswd):
    # Deletes a comment with given index and password
    
    request = Globs.macro.request
    formatter = Globs.macro.formatter
    datapagename = Globs.datapagename
    _ = request.getText
    
    if Params.encryptpass:
        from MoinMoin import user
        delpasswd = user.encodePassword(delpasswd)
    
    pg = PageEditor( request, datapagename )
    pagetext = pg.get_raw_body()
    
    regex = ur"""
(?P<comblock>
    ^[\{]{3}\n
    ^(?P<icon>[^\n]*)\n
    ^(?P<name>[^\n]*)\n
    ^(?P<date>[^\n]*)[\n]+
    ^(?P<text>
        \s*.*?
        (?=[\}]{3})
    )[\}]{3}[\n]*
    ^[#]{2}PASSWORD[ ](?P<passwd>[^\n]*)[\n]*
    ^[#]{2}LOGINUSER[ ](?P<loginuser>[^\n]*)[\n$]*
)"""

    pattern = re.compile(regex, re.UNICODE + re.MULTILINE + re.VERBOSE + re.DOTALL)
    commentitems = pattern.findall(pagetext)
    
    for item in commentitems:
        
        if delkey == item[3].strip():
            comauthor = item[2]
            if Globs.admin or (request.user.valid and request.user.name == comauthor) or delpasswd == item[5]:
                newpagetext = pagetext.replace(item[0], '', 1)
                
                action = 'SAVE'
                comment = 'Deleted comment by "%s"' % comauthor
                trivial = 1
                pg._write_file(newpagetext, action, u'PageComment modification at %s' % Globs.curpagename)
                addLogEntry(request, 'COMDEL', Globs.curpagename, comment)
                
                msg = _('The comment is deleted.')
                
                # send notification mails
                if Params.notify:
                    msg = msg + commentNotify(comment, trivial)
                
                message(msg)
                
                return
            else:
                message(_('Sorry, wrong password.'))
                return
                
    message(_('No such comment'))
Ejemplo n.º 4
0
 def test_update_needed(self):
     """ test update check) """
     test_data1 = u'does not matter'
     test_data2 = u'something else'
     page_name = u'Caching_TestPage'
     page = PageEditor(self.request, page_name)
     page._write_file(test_data1)
     cache = caching.CacheEntry(self.request, page, 'test_key', 'item')
     cache.update(test_data1)
     assert not cache.needsUpdate(page._text_filename())
     time.sleep(3) # XXX fails without, due to mtime granularity
     page = PageEditor(self.request, page_name)
     page._write_file(test_data2)
     assert cache.needsUpdate(page._text_filename())
Ejemplo n.º 5
0
 def force_revert(self, pagename, request):
     rev = int(request.form['rev'][0])
     revstr = '%08d' % rev
     oldpg = Page(request, pagename, rev=rev)
     pg = PageEditor(request, pagename)
     _ = request.getText
     msg = _("Thank you for your changes. Your attention to detail is appreciated.")
     try:
         pg._write_file(oldpg.get_raw_body(),
                        action="SAVE/REVERT",
                        extra=revstr)
         pg.clean_acl_cache()
     except pg.SaveError, msg:
         pass
Ejemplo n.º 6
0
 def test_update_needed(self):
     """ test update check) """
     test_data1 = u'does not matter'
     test_data2 = u'something else'
     page_name = u'Caching_TestPage'
     page = PageEditor(self.request, page_name)
     page._write_file(test_data1)
     cache = caching.CacheEntry(self.request, page, 'test_key', 'item')
     cache.update(test_data1)
     assert not cache.needsUpdate(page._text_filename())
     time.sleep(3)  # XXX fails without, due to mtime granularity
     page = PageEditor(self.request, page_name)
     page._write_file(test_data2)
     assert cache.needsUpdate(page._text_filename())
Ejemplo n.º 7
0
 def force_revert(self, pagename, request):
     rev = int(request.form['rev'][0])
     revstr = '%08d' % rev
     oldpg = Page(request, pagename, rev=rev)
     pg = PageEditor(request, pagename)
     _ = request.getText
     msg = _(
         "Thank you for your changes. Your attention to detail is appreciated."
     )
     try:
         pg._write_file(oldpg.get_raw_body(),
                        action="SAVE/REVERT",
                        extra=revstr)
         pg.clean_acl_cache()
     except pg.SaveError, msg:
         pass
Ejemplo n.º 8
0
def getblacklist(request, pagename, do_update):
    """ Get blacklist, possibly downloading new copy

    @param request: current request (request instance)
    @param pagename: bad content page name (unicode)
    @rtype: list
    @return: list of blacklisted regular expressions
    """
    from MoinMoin.PageEditor import PageEditor
    p = PageEditor(request, pagename, uid_override="Antispam subsystem")
    mymtime = wikiutil.version2timestamp(p.mtime_usecs())
    if do_update:
        tooold = time.time() - 1800
        failure = caching.CacheEntry(request,
                                     "antispam",
                                     "failure",
                                     scope='wiki')
        fail_time = failure.mtime()  # only update if no failure in last hour
        if (mymtime < tooold) and (fail_time < tooold):
            logging.info(
                "%d *BadContent too old, have to check for an update..." %
                tooold)
            import xmlrpclib
            import socket

            timeout = 15  # time out for reaching the master server via xmlrpc
            old_timeout = socket.getdefaulttimeout()
            socket.setdefaulttimeout(timeout)

            master_url = request.cfg.antispam_master_url
            master = xmlrpclib.ServerProxy(master_url)
            try:
                # Get BadContent info
                master.putClientInfo('ANTISPAM-CHECK', request.url)
                response = master.getPageInfo(pagename)

                # It seems that response is always a dict
                if isinstance(response, dict) and 'faultCode' in response:
                    raise WikirpcError("failed to get BadContent information",
                                       response)

                # Compare date against local BadContent copy
                masterdate = response['lastModified']

                if isinstance(masterdate, datetime.datetime):
                    # for python 2.5
                    mydate = datetime.datetime(
                        *tuple(time.gmtime(mymtime))[0:6])
                else:
                    # for python <= 2.4.x
                    mydate = xmlrpclib.DateTime(tuple(time.gmtime(mymtime)))

                logging.debug("master: %s mine: %s" % (masterdate, mydate))
                if mydate < masterdate:
                    # Get new copy and save
                    logging.info("Fetching page from %s..." % master_url)
                    master.putClientInfo('ANTISPAM-FETCH', request.url)
                    response = master.getPage(pagename)
                    if isinstance(response, dict) and 'faultCode' in response:
                        raise WikirpcError("failed to get BadContent data",
                                           response)
                    p._write_file(response)
                    mymtime = wikiutil.version2timestamp(p.mtime_usecs())
                else:
                    failure.update(
                        "")  # we didn't get a modified version, this avoids
                    # permanent polling for every save when there
                    # is no updated master page

            except (socket.error, xmlrpclib.ProtocolError), err:
                logging.error(
                    'Timeout / socket / protocol error when accessing %s: %s' %
                    (master_url, str(err)))
                # update cache to wait before the next try
                failure.update("")

            except (xmlrpclib.Fault, ), err:
                logging.error('Fault on %s: %s' % (master_url, str(err)))
                # update cache to wait before the next try
                failure.update("")

            except Error, err:
                # In case of Error, we log the error and use the local BadContent copy.
                logging.error(str(err))
def getblacklist(request, pagename, do_update):
    """ Get blacklist, possibly downloading new copy

    @param request: current request (request instance)
    @param pagename: bad content page name (unicode)
    @rtype: list
    @return: list of blacklisted regular expressions
    """
    from MoinMoin.PageEditor import PageEditor
    p = PageEditor(request, pagename, uid_override="Antispam subsystem")
    mymtime = wikiutil.version2timestamp(p.mtime_usecs())
    if do_update:
        tooold = time.time() - 1800
        failure = caching.CacheEntry(request, "antispam", "failure", scope='wiki')
        fail_time = failure.mtime() # only update if no failure in last hour
        if (mymtime < tooold) and (fail_time < tooold):
            logging.info("%d *BadContent too old, have to check for an update..." % tooold)
            import xmlrpclib
            import socket

            timeout = 15 # time out for reaching the master server via xmlrpc
            old_timeout = socket.getdefaulttimeout()
            socket.setdefaulttimeout(timeout)

            master_url = request.cfg.antispam_master_url
            master = xmlrpclib.ServerProxy(master_url)
            try:
                # Get BadContent info
                master.putClientInfo('ANTISPAM-CHECK', request.url)
                response = master.getPageInfo(pagename)

                # It seems that response is always a dict
                if isinstance(response, dict) and 'faultCode' in response:
                    raise WikirpcError("failed to get BadContent information",
                                       response)

                # Compare date against local BadContent copy
                masterdate = response['lastModified']

                if isinstance(masterdate, datetime.datetime):
                    # for python 2.5
                    mydate = datetime.datetime(*tuple(time.gmtime(mymtime))[0:6])
                else:
                    # for python <= 2.4.x
                    mydate = xmlrpclib.DateTime(tuple(time.gmtime(mymtime)))

                logging.debug("master: %s mine: %s" % (masterdate, mydate))
                if mydate < masterdate:
                    # Get new copy and save
                    logging.info("Fetching page from %s..." % master_url)
                    master.putClientInfo('ANTISPAM-FETCH', request.url)
                    response = master.getPage(pagename)
                    if isinstance(response, dict) and 'faultCode' in response:
                        raise WikirpcError("failed to get BadContent data", response)
                    p._write_file(response)
                    mymtime = wikiutil.version2timestamp(p.mtime_usecs())
                else:
                    failure.update("") # we didn't get a modified version, this avoids
                                       # permanent polling for every save when there
                                       # is no updated master page

            except (socket.error, xmlrpclib.ProtocolError), err:
                logging.error('Timeout / socket / protocol error when accessing %s: %s' % (master_url, str(err)))
                # update cache to wait before the next try
                failure.update("")

            except (xmlrpclib.Fault, ), err:
                logging.error('Fault on %s: %s' % (master_url, str(err)))
                # update cache to wait before the next try
                failure.update("")

            except Error, err:
                # In case of Error, we log the error and use the local BadContent copy.
                logging.error(str(err))
Ejemplo n.º 10
0
def getblacklist(request, pagename, do_update):
    """ Get blacklist, possibly downloading new copy

    @param request: current request (request instance)
    @param pagename: bad content page name (unicode)
    @rtype: list
    @return: list of blacklisted regular expressions
    """
    from MoinMoin.PageEditor import PageEditor
    p = PageEditor(request, pagename, uid_override="Antispam subsystem")
    invalidate_cache = False
    if do_update:
        tooold = time.time() - 3600
        mymtime = wikiutil.version2timestamp(p.mtime_usecs())
        failure = caching.CacheEntry(request, "antispam", "failure")
        fail_time = failure.mtime() # only update if no failure in last hour
        if (mymtime < tooold) and (fail_time < tooold):
            dprint("%d *BadContent too old, have to check for an update..." % tooold)
            import xmlrpclib

            # TODO replace following with import socket when we require py 2.3
            # also change the call / exception names accordingly
            from MoinMoin.support import timeoutsocket

            timeout = 15 # time out for reaching the master server via xmlrpc
            old_timeout = timeoutsocket.getDefaultSocketTimeout()
            timeoutsocket.setDefaultSocketTimeout(timeout)
            
            # For production code
            uri = "http://moinmaster.wikiwikiweb.de:8000/?action=xmlrpc2"
            # For testing (use your test wiki as BadContent source)
            ##uri = "http://localhost/main/?action=xmlrpc2")
            master = xmlrpclib.ServerProxy(uri)

            try:
                # Get BadContent info
                master.putClientInfo('ANTISPAM-CHECK',
                                     request.http_host+request.script_name)
                response = master.getPageInfo(pagename)

                # It seems that response is always a dict
                if isinstance(response, dict) and 'faultCode' in response:
                    raise WikirpcError("failed to get BadContent information",
                                       response)
                
                # Compare date against local BadContent copy
                masterdate = response['lastModified']
                mydate = xmlrpclib.DateTime(tuple(time.gmtime(mymtime)))
                dprint("master: %s mine: %s" % (masterdate, mydate))
                if mydate < masterdate:
                    # Get new copy and save
                    dprint("Fetching page from master...")
                    master.putClientInfo('ANTISPAM-FETCH',
                                         request.http_host + request.script_name)
                    response = master.getPage(pagename)
                    if isinstance(response, dict) and 'faultCode' in response:
                        raise WikirpcError("failed to get BadContent data",
                                           response)
                    p._write_file(response)

                invalidate_cache = True

            except (timeoutsocket.Timeout, timeoutsocket.error, xmlrpclib.ProtocolError), err:
                # Log the error
                # TODO: check if this does not fill the logs!
                dprint('Timeout / socket / protocol error when accessing'
                       ' moinmaster: %s' % str(err))
                # update cache to wait before the next try
                failure.update("")

            except Error, err:
                # In case of Error, we log the error and use the local
                # BadContent copy.
                dprint(str(err))

            # set back socket timeout
            timeoutsocket.setDefaultSocketTimeout(old_timeout)
Ejemplo n.º 11
0
def deletecomment(macro, delkey, delpasswd):
    # Deletes a comment with given index and password

    request = Globs.macro.request
    formatter = Globs.macro.formatter
    datapagename = Globs.datapagename
    _ = request.getText

    if Params.encryptpass:
        from MoinMoin import user
        delpasswd = user.encodePassword(delpasswd)

    pg = PageEditor(request, datapagename)
    pagetext = pg.get_raw_body()

    regex = ur"""
(?P<comblock>
    ^[\{]{3}\n
    ^(?P<icon>[^\n]*)\n
    ^(?P<name>[^\n]*)\n
    ^(?P<date>[^\n]*)[\n]+
    ^(?P<text>
        \s*.*?
        (?=[\}]{3})
    )[\}]{3}[\n]*
    ^[#]{2}PASSWORD[ ](?P<passwd>[^\n]*)[\n]*
    ^[#]{2}LOGINUSER[ ](?P<loginuser>[^\n]*)[\n$]*
)"""

    pattern = re.compile(regex,
                         re.UNICODE + re.MULTILINE + re.VERBOSE + re.DOTALL)
    commentitems = pattern.findall(pagetext)

    for item in commentitems:

        if delkey == item[3].strip():
            comauthor = item[2]
            if Globs.admin or (request.user.valid and request.user.name
                               == comauthor) or delpasswd == item[5]:
                newpagetext = pagetext.replace(item[0], '', 1)

                action = 'SAVE'
                comment = 'Deleted comment by "%s"' % comauthor
                trivial = 1
                pg._write_file(
                    newpagetext, action,
                    u'PageComment modification at %s' % Globs.curpagename)
                addLogEntry(request, 'COMDEL', Globs.curpagename, comment)

                msg = _('The comment is deleted.')

                # send notification mails
                if Params.notify:
                    msg = msg + commentNotify(comment, trivial)

                message(msg)

                return
            else:
                message(_('Sorry, wrong password.'))
                return

    message(_('No such comment'))