示例#1
0
    def buildPageSets(self):
        """ Calculates which pages should go into which package. """
        request = self.request

        all_pages = set(request.rootpage.getPageList())
        packaged_pages = set()

        languages = i18n.wikiLanguages()
        pageset_names = i18n.strings.pagesets
        pageSets = {}
        for lang in languages:
            def trans(text, request=request, lang=lang, **kw):
                return i18n.getText(text, request, lang, **kw)

            try:
                lang_long = languages[lang]['x-language-in-english']
                lang_long = lang_long.replace('/', '_').replace(' ', '_')
            except KeyError:
                lang_long = lang

            for pageset_name in pageset_names:
                pageset_orig = set(getattr(i18n.strings, pageset_name))
                pageset_trans = set([trans(pn) for pn in pageset_orig])
                key = u"%s--%s" % (lang_long, pageset_name)
                pageset = pageset_trans
                if lang != 'en':
                    pageset -= pageset_orig
                if pageset:
                    print key, len(pageset)
                    pageSets[key] = pageset
                    packaged_pages |= pageset

        not_packaged_pages = all_pages - packaged_pages
        pageSets['00_needs_fixing'] = not_packaged_pages
        return pageSets
示例#2
0
class LegacyMonitor(AbstractMonitor):
    # global state
    dumping = False
    dump_file = None
    dumped = set()
    to_dump = set()
    hook_enabled = False

    def dump(cls, label):
        df = cls.dump_file or sys.stderr
        s = StringIO()
        print >> s, "\nDumping thread %s:" % (label, )
        try:
            raise ZeroDivisionError
        except ZeroDivisionError:
            f = sys.exc_info()[2].tb_frame.f_back.f_back
        traceback.print_list(traceback.extract_stack(f, None), s)
        df.write(s.getvalue())

    dump = classmethod(dump)

    def dump_hook(cls, a, b, c):  # arguments are ignored
        if cls.dumping and sys.exc_info()[0] is None:
            thread = threading.currentThread()
            if thread in cls.to_dump:
                cls.dump(repr(thread))
                cls.to_dump.discard(thread)
                cls.dumped.add(thread)
                if not cls.to_dump:
                    cls.dumping = False

    dump_hook = classmethod(dump_hook)

    def trigger_dump(cls, dumpfile=None):
        cls.to_dump = set(threading.enumerate())
        if dumpfile is not None:
            cls.dump_file = dumpfile
        cls.dumping = True

    trigger_dump = classmethod(trigger_dump)

    def activate_hook(cls):
        sys.setprofile(cls.dump_hook)
        threading.setprofile(cls.dump_hook)
        cls.hook_enabled = True

    activate_hook = classmethod(activate_hook)

    def hook_enabled(cls):
        return cls.hook_enabled

    hook_enabled = classmethod(hook_enabled)
示例#3
0
def get_available_actions(config, page, user):
        """ Get a list of actions available on a particular page
        for a particular user.

        The set does not contain actions that starts with lower case.
        Themes use this set to display the actions to the user.

        @param config: a config object (for the per-wiki actions)
        @param page: the page to which the actions should apply
        @param user: the user which wants to apply an action
        @rtype: set
        @return: set of avaiable actions
        """
        if not user.may.read(page.page_name):
            return []


        actions = get_names(config)

        # Filter non ui actions (starts with lower case letter)
        actions = [action for action in actions if not action[0].islower()]

        # Filter actions by page type, acl and user state
        excluded = []
        if (page.isUnderlayPage() and not page.isStandardPage()) or \
                not user.may.write(page.page_name) or \
                not user.may.delete(page.page_name):
                # Prevent modification of underlay only pages, or pages
                # the user can't write and can't delete
                excluded = [u'RenamePage', u'DeletePage', ] # AttachFile must NOT be here!
        return set([action for action in actions if not action in excluded])
示例#4
0
def page_change(change_type, request, page, subscribers, **kwargs):
    """Sends notification about page being changed in some way"""

    # send notifications to all subscribers
    if subscribers:
        recipients = set()

        for lang in subscribers:
            _ = lambda text: request.getText(text, lang=lang)
            jids = [u.jid for u in subscribers[lang] if u.jid]
            names = [u.name for u in subscribers[lang] if u.jid]
            msg = notification.page_change_message(change_type, request, page,
                                                   lang, **kwargs)
            page_url = request.getQualifiedURL(page.url(request))
            url = {'url': page_url, 'description': _("Changed page")}
            data = {
                'action': change_type,
                'subject': _('Page changed'),
                'url_list': [url],
                'text': msg['text'],
                'diff': msg.get('diff', ''),
                'comment': msg.get('comment', ''),
                'editor': msg['editor'],
                'old_name': msg.get('old_name', ''),
                'page_name': msg.get('page_name', ''),
                'revision': msg.get('revision', '')
            }

            result = send_notification(request, jids, data)

            if result:
                recipients.update(names)

        if recipients:
            return notification.Success(recipients)
示例#5
0
def handle_file_attached(event):
    """Handles event sent when a file is attached to a page"""

    names = set()
    request = event.request
    page = Page(request, event.pagename)
    subscribers = page.getSubscribers(request, return_users=1)
    notification.filter_subscriber_list(event, subscribers, True)
    recipients = []

    for lang in subscribers:
        recipients.extend(subscribers[lang])

    attachlink = request.getQualifiedURL(getAttachUrl(event.pagename, event.filename, request))
    pagelink = request.getQualifiedURL(page.url(request, {}))

    for lang in subscribers.keys():
        _ = lambda text: request.getText(text, lang=lang)
        data = notification.attachment_added(request, _, event.pagename, event.filename, event.size)
        links = [{'url': attachlink, 'description': _("Attachment link")},
                  {'url': pagelink, 'description': _("Page link")}]

        jids = [usr.jid for usr in subscribers[lang]]
        data['url_list'] = links
        data['action'] = "file_attached"

        if send_notification(request, jids, data):
            names.update(recipients)

    return notification.Success(names)
示例#6
0
def page_change(change_type, request, page, subscribers, **kwargs):
    """Sends notification about page being changed in some way"""

    # send notifications to all subscribers
    if subscribers:
        recipients = set()

        for lang in subscribers:
            _ = lambda text: request.getText(text, lang=lang)
            jids = [u.jid for u in subscribers[lang] if u.jid]
            names = [u.name for u in subscribers[lang] if u.jid]
            msg = notification.page_change_message(change_type, request, page, lang, **kwargs)
            page_url = request.getQualifiedURL(page.url(request))
            url = {'url': page_url, 'description': _("Changed page")}
            data = {'action': change_type, 'subject': _('Page changed'),
                            'url_list': [url], 'text': msg['text'], 'diff': msg.get('diff', ''),
                            'comment': msg.get('comment', ''), 'editor': msg['editor'],
                            'old_name': msg.get('old_name', ''), 'page_name': msg.get('page_name', ''),
                            'revision': msg.get('revision', '')}

            result = send_notification(request, jids, data)

            if result:
                recipients.update(names)

        if recipients:
            return notification.Success(recipients)
示例#7
0
def page_change(change_type, request, page, subscribers, **kwargs):
    """Sends notification about page being changed in some way"""

    # send notifications to all subscribers
    if subscribers:
        recipients = set()

        for lang in subscribers:
            _ = lambda text: request.getText(text, lang=lang)
            jids = [u.jid for u in subscribers[lang] if u.jid]
            names = [u.name for u in subscribers[lang] if u.jid]
            msg = notification.page_change_message(change_type, request, page, lang, **kwargs)
            page_url = request.getQualifiedURL(page.url(request))
            url = {"url": page_url, "description": _("Changed page")}
            data = {
                "action": change_type,
                "subject": _("Page changed"),
                "url_list": [url],
                "text": msg["text"],
                "diff": msg.get("diff", ""),
                "comment": msg.get("comment", ""),
                "editor": msg["editor"],
                "old_name": msg.get("old_name", ""),
                "page_name": msg.get("page_name", ""),
                "revision": msg.get("revision", ""),
            }

            result = send_notification(request, jids, data)

            if result:
                recipients.update(names)

        if recipients:
            return notification.Success(recipients)
示例#8
0
def get_available_actions(config, page, user):
    """ Get a list of actions available on a particular page
        for a particular user.

        The set does not contain actions that starts with lower case.
        Themes use this set to display the actions to the user.

        @param config: a config object (for the per-wiki actions)
        @param page: the page to which the actions should apply
        @param user: the user which wants to apply an action
        @rtype: set
        @return: set of avaiable actions
        """
    if not user.may.read(page.page_name):
        return []

    actions = get_names(config)

    # Filter non ui actions (starts with lower case letter)
    actions = [action for action in actions if not action[0].islower()]

    # Filter actions by page type, acl and user state
    excluded = []
    if (page.isUnderlayPage() and not page.isStandardPage()) or \
            not user.may.write(page.page_name) or \
            not user.may.delete(page.page_name):
        # Prevent modification of underlay only pages, or pages
        # the user can't write and can't delete
        excluded = [
            u'RenamePage',
            u'DeletePage',
        ]  # AttachFile must NOT be here!
    return set([action for action in actions if not action in excluded])
示例#9
0
    def mainloop(self):
        # self.options.wiki_url = 'localhost/'
        if self.options.wiki_url and '.' in self.options.wiki_url:
            print "NEVER EVER RUN THIS ON A REAL WIKI!!! This must be run on a local testwiki."
            return

        self.init_request() # this request will work on a test wiki in tests/wiki/ directory
                            # we assume that there are current moinmaster pages there
        request = self.request

        if not ('tests/wiki' in request.cfg.data_dir.replace("\\", "/") and 'tests/wiki' in request.cfg.data_underlay_dir.replace("\\", "/")):
            import sys
            print sys.path
            print "NEVER EVER RUN THIS ON A REAL WIKI!!! This must be run on a local testwiki."
            return

        print "Building page sets ..."
        pageSets = self.buildPageSets()

        print "Creating packages ..."
        package_path = os.path.join('tests', 'wiki', 'underlay', 'pages', 'LanguageSetup', 'attachments')
        try:
            # create attachment dir in case it is not there:
            os.mkdir(package_path)
        except OSError:
            pass
        generate_filename = lambda name: os.path.join(package_path, '%s.zip' % name)
        [self.packagePages(list(pages), generate_filename(name), "ReplaceUnderlay") for name, pages in pageSets.items()]

        print "Removing pagedirs of packaged pages ..."
        dontkill = set(['LanguageSetup'])
        [self.removePages(list(pages - dontkill)) for name, pages in pageSets.items()]

        print "Finished."
示例#10
0
def handle_file_attached(event):
    """Sends an email to super users that have subscribed to this event type"""

    names = set()
    from_address = event.request.cfg.mail_from
    request = event.request
    page = Page(request, event.pagename)

    subscribers = page.getSubscribers(request, return_users=1)
    notification.filter_subscriber_list(event, subscribers, False)
    recipients = []

    for lang in subscribers:
        recipients.extend(subscribers[lang])

    attachlink = request.getBaseURL() + getAttachUrl(event.pagename, event.filename, request)
    pagelink = request.getQualifiedURL(page.url(request, {}))

    for lang in subscribers:
        emails = []
        _ = lambda text: request.getText(text, lang=lang)

        links = _("Attachment link: %(attach)s\n" \
                  "Page link: %(page)s\n") % {'attach': attachlink, 'page': pagelink}

        data = notification.attachment_added(request, _, event.pagename, event.filename, event.size)
        data['text'] = data['text'] + links

        emails = [usr.email for usr in subscribers[lang]]

        if send_notification(request, from_address, emails, data):
            names.update(recipients)

    return notification.Success(names)
示例#11
0
def handle_file_attached(event):
    """Handles event sent when a file is attached to a page"""

    names = set()
    request = event.request
    page = Page(request, event.pagename)
    subscribers = page.getSubscribers(request, return_users=1)
    notification.filter_subscriber_list(event, subscribers, True)
    recipients = []

    for lang in subscribers:
        recipients.extend(subscribers[lang])

    attachlink = request.getQualifiedURL(getAttachUrl(event.pagename, event.filename, request))
    pagelink = request.getQualifiedURL(page.url(request, {}))

    for lang in subscribers.keys():
        _ = lambda text: request.getText(text, lang=lang)
        data = notification.attachment_added(request, _, event.pagename, event.filename, event.size)
        links = [{'url': attachlink, 'description': _("Attachment link")},
                  {'url': pagelink, 'description': _("Page link")}]

        jids = [usr.jid for usr in subscribers[lang]]
        data['url_list'] = links
        data['action'] = "file_attached"

        if send_notification(request, jids, data):
            names.update(recipients)

    return notification.Success(names)
示例#12
0
def handle_page_change(event):
    """ Send email to all subscribers of given page.

    @param event: event to notify about
    @rtype: string
    @return: message, indicating success or errors.

    """
    comment = event.comment
    page = event.page
    request = event.request
    trivial = isinstance(event, ev.TrivialPageChangedEvent)
    subscribers = page.getSubscribers(request, return_users=1)
    mail_from = page.cfg.mail_from

    if subscribers:
        recipients = set()

        # get a list of old revisions, and append a diff
        revisions = page.getRevList()

        # send email to all subscribers
        for lang in subscribers:
            users = [u for u in subscribers[lang]
                     if event.name in u.email_subscribed_events]
            emails = [u.email for u in users]
            names = [u.name for u in users]
            data = prep_page_changed_mail(request, page, comment, lang, revisions, trivial)

            if send_notification(request, mail_from, emails, data):
                recipients.update(names)

        if recipients:
            return notification.Success(recipients)
示例#13
0
def handle_page_changed(event):
    """ Send email to all subscribers of given page.

    @param event: event to notify about
    @rtype: string
    @return: message, indicating success or errors.

    """
    comment = event.comment
    page = event.page
    request = event.request
    trivial = isinstance(event, ev.TrivialPageChangedEvent)
    old_page = None
    if isinstance(event, (ev.PageChangedEvent, ev.TrivialPageChangedEvent)):
        change_type = "page_changed"
    elif isinstance(event, ev.PageDeletedEvent):
        change_type = "page_deleted"
    elif isinstance(event, ev.PageRenamedEvent):
        change_type = "page_renamed"
        old_page = event.old_page
    subscribers = page.getSubscribers(request, return_users=1)
    mail_from = page.cfg.mail_from

    if subscribers:
        recipients = set()

        # get a list of old revisions, and append a diff
        revisions = page.getRevList()

        # send email to all subscribers
        for lang in subscribers:
            users = [
                u for u in subscribers[lang]
                if event.name in u.email_subscribed_events
            ]
            emails = [u.email for u in users]
            names = [u.name for u in users]
            data = prep_page_changed_mail(request,
                                          page,
                                          comment,
                                          lang,
                                          revisions,
                                          trivial=trivial,
                                          change_type=change_type,
                                          old_page=old_page)

            if send_notification(request, mail_from, emails, data):
                recipients.update(names)

        if recipients:
            return notification.Success(recipients)
示例#14
0
def get_names(config):
    """ Get a list of known actions.

    @param config: a config object
    @rtype: set
    @return: set of known actions
    """
    if not hasattr(config.cache, 'action_names'):
        actions = names[:]
        actions.extend(wikiutil.getPlugins('action', config))
        actions = set([action for action in actions
                      if not action in config.actions_excluded])
        config.cache.action_names = actions # remember it
    return config.cache.action_names
示例#15
0
    def buildPageSets(self):
        """ Calculates which pages should go into which package. """
        request = self.request

        all_pages = set(request.rootpage.getPageList())
        packaged_pages = set()

        languages = i18n.wikiLanguages()
        pageset_names = i18n.strings.pagesets
        pageSets = {}
        for lang in languages:

            def trans(text, request=request, lang=lang, **kw):
                return i18n.getText(text, request, lang, **kw)

            try:
                lang_long = languages[lang]['x-language-in-english']
                lang_long = lang_long.replace('/', '_').replace(' ', '_')
            except KeyError:
                lang_long = lang

            for pageset_name in pageset_names:
                pageset_orig = set(getattr(i18n.strings, pageset_name))
                pageset_trans = set([trans(pn) for pn in pageset_orig])
                key = u"%s--%s" % (lang_long, pageset_name)
                pageset = pageset_trans
                if lang != 'en':
                    pageset -= pageset_orig
                if pageset:
                    print key, len(pageset)
                    pageSets[key] = pageset
                    packaged_pages |= pageset

        not_packaged_pages = all_pages - packaged_pages
        pageSets['00_needs_fixing'] = not_packaged_pages
        return pageSets
示例#16
0
def get_names(config):
    """ Get a list of known actions.

    @param config: a config object
    @rtype: set
    @return: set of known actions
    """
    if not hasattr(config.cache, 'action_names'):
        actions = names[:]
        actions.extend(wikiutil.getPlugins('action', config))
        actions = set([
            action for action in actions
            if not action in config.actions_excluded
        ])
        config.cache.action_names = actions  # remember it
    return config.cache.action_names
示例#17
0
    def mainloop(self):
        # self.options.wiki_url = 'localhost/'
        if self.options.wiki_url and '.' in self.options.wiki_url:
            print "NEVER EVER RUN THIS ON A REAL WIKI!!! This must be run on a local testwiki."
            return

        self.init_request(
        )  # this request will work on a test wiki in tests/wiki/ directory
        # we assume that there are current moinmaster pages there
        request = self.request

        if not ('tests/wiki' in request.cfg.data_dir.replace("\\", "/")
                and 'tests/wiki' in request.cfg.data_underlay_dir.replace(
                    "\\", "/")):
            import sys
            print sys.path
            print "NEVER EVER RUN THIS ON A REAL WIKI!!! This must be run on a local testwiki."
            return

        print "Building page sets ..."
        pageSets = self.buildPageSets()

        print "Creating packages ..."
        package_path = os.path.join('tests', 'wiki', 'underlay', 'pages',
                                    'LanguageSetup', 'attachments')
        try:
            # create attachment dir in case it is not there:
            os.mkdir(package_path)
        except OSError:
            pass
        generate_filename = lambda name: os.path.join(package_path, '%s.zip' %
                                                      name)
        [
            self.packagePages(list(pages), generate_filename(name),
                              "ReplaceUnderlay")
            for name, pages in pageSets.items()
        ]

        print "Removing pagedirs of packaged pages ..."
        dontkill = set(['LanguageSetup'])
        [
            self.removePages(list(pages - dontkill))
            for name, pages in pageSets.items()
        ]

        print "Finished."
示例#18
0
def handle_file_changed(event):
    """Sends an email to users that have subscribed to this event type"""

    names = set()
    from_address = event.request.cfg.mail_from
    request = event.request
    page = Page(request, event.pagename)

    if isinstance(event, ev.FileAttachedEvent):
        notification_fn = notification.attachment_added
    elif isinstance(event, ev.FileRemovedEvent):
        notification_fn = notification.attachment_removed

    subscribers = page.getSubscribers(request, return_users=1)
    notification.filter_subscriber_list(event, subscribers, False)
    recipients = []

    for lang in subscribers:
        recipients.extend(subscribers[lang])

    attachlink = request.getQualifiedURL(
        getAttachUrl(event.pagename, event.filename, request))
    pagelink = request.getQualifiedURL(page.url(request, {}))

    for lang in subscribers:
        _ = lambda text: request.getText(text, lang=lang)

        links = _("Attachment link: %(attach)s\n" \
                  "Page link: %(page)s\n") % {'attach': attachlink, 'page': pagelink}

        data = notification_fn(request, _, event.pagename, event.filename,
                               event.size)
        data['text'] = data['text'] + links

        emails = [usr.email for usr in subscribers[lang]]

        if send_notification(request, from_address, emails, data):
            names.update(recipients)

    return notification.Success(names)
示例#19
0
    def buildPageSets(self):
        """ Calculates which pages should go into which package. """
        request = self.request
        pageSets = {}

        allPages = set(request.rootpage.getPageList())

        systemPages = wikidicts.Group(request, "SystemPagesGroup").members()

        for pagename in systemPages:
            if pagename.endswith("Group"):
                #print x + " -> " + repr(wikidicts.Group(request, x).members())
                self.gd.addgroup(request, pagename)

        langPages = set()
        for name, group in self.gd.dictdict.items():
            groupPages = set(group.members() + [name])
            name = name.replace("SystemPagesIn", "").replace("Group", "")
            pageSets[name] = groupPages
            langPages |= groupPages

        specialPages = set(["SystemPagesGroup"])

        masterNonSystemPages = allPages - langPages - specialPages

        moinI18nPages = set([x for x in masterNonSystemPages if x.startswith("MoinI18n")])

        nodistPages = moinI18nPages | set(["InterWikiMap", ])

        extraPages = masterNonSystemPages - nodistPages

        pageSets[ALL] = langPages

        for name in pageSets.keys():
            if name not in (u"English"):
                pageSets[name] -= pageSets[u"English"]
                pageSets[name] -= nodistPages

        pageSets[EXTRA] = extraPages   # stuff that maybe should be in some language group
        pageSets[NODIST] = nodistPages # we dont want to have them in dist archive
        return pageSets
示例#20
0
 def trigger_dump(cls, dumpfile=None):
     cls.to_dump = set(threading.enumerate())
     if dumpfile is not None:
         cls.dump_file = dumpfile
     cls.dumping = True
示例#21
0
文件: SyncPages.py 项目: aahlad/soar
 def __init__(self, pagename, request):
     self.request = request
     self.pagename = pagename
     self.page = PageEditor(request, pagename)
     self.status = []
     self.rollback = set()
示例#22
0
文件: SyncPages.py 项目: aahlad/soar
    def sync(self, params, local, remote):
        """ This method does the synchronisation work.
            Currently, it handles nearly all cases.
            The major missing part is rename handling.
            There are a few other cases left that have to be implemented:
                Wiki A    | Wiki B   | Remark
                ----------+----------+------------------------------
                exists    | non-     | Now the wiki knows that the page was renamed.
                with tags | existing | There should be an RPC method that asks
                          |          | for the new name (which could be recorded
                          |          | on page rename). Then the page is
                          |          | renamed in Wiki A as well and the sync
                          |          | is done normally.
                          |          | Every wiki retains a dict that maps
                          |          | (IWID, oldname) => newname and that is
                          |          | updated on every rename. oldname refers
                          |          | to the pagename known by the old wiki (can be
                          |          | gathered from tags).
                ----------+----------+-------------------------------
                exists    | any case | Try a rename search first, then
                          |          | do a sync without considering tags
                with tags | with non | to ensure data integrity.
                          | matching | Hmm, how do we detect this
                          | tags     | case if the unmatching tags are only
                          |          | on the remote side?
                ----------+----------+-------------------------------
        """
        _ = lambda x: x  # we will translate it later

        direction = params["direction"]
        if direction == BOTH:
            match_direction = direction
        else:
            match_direction = None

        local_full_iwid = packLine(
            [local.get_iwid(), local.get_interwiki_name()])
        remote_full_iwid = remote.iwid_full

        self.log_status(self.INFO,
                        _("Synchronisation started -"),
                        raw_suffix=" <<DateTime(%s)>>" %
                        self.page._get_local_timestamp())

        l_pages = local.get_pages()
        r_pages = remote.get_pages(exclude_non_writable=direction != DOWN)

        if params["groupList"]:
            pages_from_groupList = set(local.getGroupItems(
                params["groupList"]))
            r_pages = SyncPage.filter(r_pages,
                                      pages_from_groupList.__contains__)
            l_pages = SyncPage.filter(l_pages,
                                      pages_from_groupList.__contains__)

        m_pages = [
            elem.add_missing_pagename(local, remote)
            for elem in SyncPage.merge(l_pages, r_pages)
        ]

        self.log_status(
            self.INFO,
            _("Got a list of %s local and %s remote pages. This results in %s pages to process."
              ), (str(len(l_pages)), str(len(r_pages)), str(len(m_pages))))

        if params["pageMatch"]:
            m_pages = SyncPage.filter(m_pages, params["pageMatch"].match)
            self.log_status(self.INFO, _("After filtering: %s pages"),
                            (str(len(m_pages)), ))

        class handle_page(rpc_aggregator.RPCYielder):
            def run(yielder, sp):
                # XXX add locking, acquire read-lock on sp
                if debug:
                    self.log_status(ActionClass.INFO,
                                    raw_suffix="Processing %r" % sp)

                local_pagename = sp.local_name
                if not self.request.user.may.write(local_pagename):
                    self.log_status(
                        ActionClass.WARN,
                        _("Skipped page %s because of no write access to local page."
                          ), (local_pagename, ))
                    return

                current_page = PageEditor(self.request,
                                          local_pagename)  # YYY direct access
                comment = u"Local Merge - %r" % (remote.get_interwiki_name()
                                                 or remote.get_iwid())

                tags = TagStore(current_page)

                matching_tags = tags.fetch(iwid_full=remote.iwid_full,
                                           direction=match_direction)
                matching_tags.sort()
                if debug:
                    self.log_status(ActionClass.INFO,
                                    raw_suffix="Tags: %r <<BR>> All: %r" %
                                    (matching_tags, tags.tags))

                # some default values for non matching tags
                normalised_name = None
                remote_rev = None
                local_rev = sp.local_rev  # merge against the newest version
                old_contents = ""

                if matching_tags:
                    newest_tag = matching_tags[-1]

                    local_change = newest_tag.current_rev != sp.local_rev
                    remote_change = newest_tag.remote_rev != sp.remote_rev

                    # handle some cases where we cannot continue for this page
                    if not remote_change and (direction == DOWN
                                              or not local_change):
                        return  # no changes done, next page
                    if sp.local_deleted and sp.remote_deleted:
                        return
                    if sp.remote_deleted and not local_change:
                        msg = local.delete_page(sp.local_name, comment)
                        if not msg:
                            self.log_status(ActionClass.INFO,
                                            _("Deleted page %s locally."),
                                            (sp.name, ))
                        else:
                            self.log_status(
                                ActionClass.ERROR,
                                _("Error while deleting page %s locally:"),
                                (sp.name, ), msg)
                        return
                    if sp.local_deleted and not remote_change:
                        if direction == DOWN:
                            return
                        yield remote.delete_page_pre(sp.remote_name,
                                                     sp.remote_rev,
                                                     local_full_iwid)
                        msg = remote.delete_page_post(yielder.fetch_result())
                        if not msg:
                            self.log_status(ActionClass.INFO,
                                            _("Deleted page %s remotely."),
                                            (sp.name, ))
                        else:
                            self.log_status(
                                ActionClass.ERROR,
                                _("Error while deleting page %s remotely:"),
                                (sp.name, ), msg)
                        return
                    if sp.local_mime_type != MIMETYPE_MOIN and not (
                            local_change ^ remote_change):
                        self.log_status(
                            ActionClass.WARN,
                            _("The item %s cannot be merged automatically but was changed in both wikis. Please delete it in one of both wikis and try again."
                              ), (sp.name, ))
                        return
                    if sp.local_mime_type != sp.remote_mime_type:
                        self.log_status(
                            ActionClass.WARN,
                            _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."
                              ), (sp.name, ))
                        return
                    if newest_tag.normalised_name != sp.name:
                        self.log_status(
                            ActionClass.WARN,
                            _("The item %s was renamed locally. This is not implemented yet. Therefore the full synchronisation history is lost for this page."
                              ), (sp.name, ))  # XXX implement renames
                    else:
                        normalised_name = newest_tag.normalised_name
                        local_rev = newest_tag.current_rev
                        remote_rev = newest_tag.remote_rev
                        old_contents = Page(
                            self.request,
                            local_pagename,
                            rev=newest_tag.current_rev).get_raw_body_str(
                            )  # YYY direct access
                else:
                    if (sp.local_deleted
                            and not sp.remote_rev) or (sp.remote_deleted
                                                       and not sp.local_rev):
                        return

                self.log_status(
                    ActionClass.INFO,
                    _("Synchronising page %s with remote page %s ..."),
                    (local_pagename, sp.remote_name))

                if direction == DOWN:
                    remote_rev = None  # always fetch the full page, ignore remote conflict check
                    patch_base_contents = ""
                else:
                    patch_base_contents = old_contents

                # retrieve remote contents diff
                if remote_rev != sp.remote_rev:
                    if sp.remote_deleted:  # ignore remote changes
                        current_remote_rev = sp.remote_rev
                        is_remote_conflict = False
                        diff = None
                        self.log_status(
                            ActionClass.WARN,
                            _("The page %s was deleted remotely but changed locally."
                              ), (sp.name, ))
                    else:
                        yield remote.get_diff_pre(sp.remote_name, remote_rev,
                                                  None, normalised_name)
                        diff_result = remote.get_diff_post(
                            yielder.fetch_result())
                        if diff_result is None:
                            self.log_status(
                                ActionClass.ERROR,
                                _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."
                                  ), (sp.remote_name, ))
                            return
                        is_remote_conflict = diff_result["conflict"]
                        assert diff_result["diffversion"] == 1
                        diff = diff_result["diff"]
                        current_remote_rev = diff_result["current"]
                else:
                    current_remote_rev = remote_rev
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        is_remote_conflict = wikiutil.containsConflictMarker(
                            old_contents.decode("utf-8"))
                    else:
                        is_remote_conflict = NotImplemented
                    diff = None

                # do not sync if the conflict is remote and local, or if it is local
                # and the page has never been synchronised
                if (sp.local_mime_type == MIMETYPE_MOIN
                        and wikiutil.containsConflictMarker(
                            current_page.get_raw_body())  # YYY direct access
                        and (remote_rev is None or is_remote_conflict)):
                    self.log_status(
                        ActionClass.WARN,
                        _("Skipped page %s because of a locally or remotely unresolved conflict."
                          ), (local_pagename, ))
                    return

                if remote_rev is None and direction == BOTH:
                    self.log_status(
                        ActionClass.INFO,
                        _("This is the first synchronisation between the local and the remote wiki for the page %s."
                          ), (sp.name, ))

                # calculate remote page contents from diff
                if sp.remote_deleted:
                    remote_contents = ""
                elif diff is None:
                    remote_contents = old_contents
                else:
                    remote_contents = patch(patch_base_contents,
                                            decompress(diff))

                if diff is None:  # only a local change
                    if debug:
                        self.log_status(
                            ActionClass.INFO,
                            raw_suffix="Only local changes for %r" % sp.name)
                    merged_text_raw = current_page.get_raw_body_str()
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        merged_text = merged_text_raw.decode("utf-8")
                elif local_rev == sp.local_rev:
                    if debug:
                        self.log_status(
                            ActionClass.INFO,
                            raw_suffix="Only remote changes for %r" % sp.name)
                    merged_text_raw = remote_contents
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        merged_text = merged_text_raw.decode("utf-8")
                else:
                    # this is guaranteed by a check above
                    assert sp.local_mime_type == MIMETYPE_MOIN
                    remote_contents_unicode = remote_contents.decode("utf-8")
                    # here, the actual 3-way merge happens
                    merged_text = diff3.text_merge(
                        old_contents.decode("utf-8"), remote_contents_unicode,
                        current_page.get_raw_body(), 1,
                        *conflict_markers)  # YYY direct access
                    if debug:
                        self.log_status(
                            ActionClass.INFO,
                            raw_suffix="Merging %r, %r and %r into %r" %
                            (old_contents.decode("utf-8"),
                             remote_contents_unicode,
                             current_page.get_raw_body(), merged_text))
                    merged_text_raw = merged_text.encode("utf-8")

                # generate binary diff
                diff = textdiff(remote_contents, merged_text_raw)
                if debug:
                    self.log_status(ActionClass.INFO,
                                    raw_suffix="Diff against %r" %
                                    remote_contents)

                # XXX upgrade to write lock
                try:
                    local_change_done = True
                    current_page.saveText(merged_text,
                                          sp.local_rev or 0,
                                          comment=comment)  # YYY direct access
                except PageEditor.Unchanged:
                    local_change_done = False
                except PageEditor.EditConflict:
                    local_change_done = False
                    assert False, "You stumbled on a problem with the current storage system - I cannot lock pages"

                new_local_rev = current_page.get_real_rev(
                )  # YYY direct access

                def rollback_local_change():  # YYY direct local access
                    comment = u"Wikisync rollback"
                    rev = new_local_rev - 1
                    revstr = '%08d' % rev
                    oldpg = Page(self.request, sp.local_name, rev=rev)
                    pg = PageEditor(self.request, sp.local_name)
                    if not oldpg.exists():
                        pg.deletePage(comment)
                    else:
                        try:
                            savemsg = pg.saveText(oldpg.get_raw_body(),
                                                  0,
                                                  comment=comment,
                                                  extra=revstr,
                                                  action="SAVE/REVERT")
                        except PageEditor.Unchanged:
                            pass
                    return sp.local_name

                if local_change_done:
                    self.register_rollback(rollback_local_change)

                if direction == BOTH:
                    yield remote.merge_diff_pre(sp.remote_name, compress(diff),
                                                new_local_rev,
                                                current_remote_rev,
                                                current_remote_rev,
                                                local_full_iwid, sp.name)
                    try:
                        very_current_remote_rev = remote.merge_diff_post(
                            yielder.fetch_result())
                    except NotAllowedException:
                        self.log_status(
                            ActionClass.ERROR,
                            _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."
                              ), (sp.name, ))
                        return
                else:
                    very_current_remote_rev = current_remote_rev

                if local_change_done:
                    self.remove_rollback(rollback_local_change)

                # this is needed at least for direction both and cgi sync to standalone for immutable pages on both
                # servers. It is not needed for the opposite direction
                try:
                    tags.add(remote_wiki=remote_full_iwid,
                             remote_rev=very_current_remote_rev,
                             current_rev=new_local_rev,
                             direction=direction,
                             normalised_name=sp.name)
                except:
                    self.log_status(
                        ActionClass.ERROR,
                        _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."
                          ), (sp.name, ))
                    return

                if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(
                        merged_text):
                    self.log_status(ActionClass.INFO,
                                    _("Page %s successfully merged."),
                                    (sp.name, ))
                elif is_remote_conflict:
                    self.log_status(
                        ActionClass.WARN,
                        _("Page %s contains conflicts that were introduced on the remote side."
                          ), (sp.name, ))
                else:
                    self.log_status(ActionClass.WARN,
                                    _("Page %s merged with conflicts."),
                                    (sp.name, ))

                # XXX release lock

        rpc_aggregator.scheduler(remote.create_multicall_object, handle_page,
                                 m_pages, 8, remote.prepare_multicall)
示例#23
0
 def __init__(self, pagename, request):
     self.request = request
     self.pagename = pagename
     self.page = PageEditor(request, pagename)
     self.status = []
     self.rollback = set()
示例#24
0
    def sync(self, params, local, remote):
        """ This method does the synchronisation work.
            Currently, it handles nearly all cases.
            The major missing part is rename handling.
            There are a few other cases left that have to be implemented:
                Wiki A    | Wiki B   | Remark
                ----------+----------+------------------------------
                exists    | non-     | Now the wiki knows that the page was renamed.
                with tags | existing | There should be an RPC method that asks
                          |          | for the new name (which could be recorded
                          |          | on page rename). Then the page is
                          |          | renamed in Wiki A as well and the sync
                          |          | is done normally.
                          |          | Every wiki retains a dict that maps
                          |          | (IWID, oldname) => newname and that is
                          |          | updated on every rename. oldname refers
                          |          | to the pagename known by the old wiki (can be
                          |          | gathered from tags).
                ----------+----------+-------------------------------
                exists    | any case | Try a rename search first, then
                          |          | do a sync without considering tags
                with tags | with non | to ensure data integrity.
                          | matching | Hmm, how do we detect this
                          | tags     | case if the unmatching tags are only
                          |          | on the remote side?
                ----------+----------+-------------------------------
        """
        _ = lambda x: x # we will translate it later

        direction = params["direction"]
        if direction == BOTH:
            match_direction = direction
        else:
            match_direction = None

        local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()])
        remote_full_iwid = remote.iwid_full

        self.log_status(self.INFO, _("Synchronisation started -"), raw_suffix=" <<DateTime(%s)>>" % self.page._get_local_timestamp())

        l_pages = local.get_pages()
        r_pages = remote.get_pages(exclude_non_writable=direction != DOWN)

        if params["groupList"]:
            pages_from_groupList = set(local.getGroupItems(params["groupList"]))
            r_pages = SyncPage.filter(r_pages, pages_from_groupList.__contains__)
            l_pages = SyncPage.filter(l_pages, pages_from_groupList.__contains__)

        m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)]

        self.log_status(self.INFO, _("Got a list of %s local and %s remote pages. This results in %s pages to process."),
                        (str(len(l_pages)), str(len(r_pages)), str(len(m_pages))))

        if params["pageMatch"]:
            m_pages = SyncPage.filter(m_pages, params["pageMatch"].match)
            self.log_status(self.INFO, _("After filtering: %s pages"), (str(len(m_pages)), ))

        class handle_page(rpc_aggregator.RPCYielder):
            def run(yielder, sp):
                # XXX add locking, acquire read-lock on sp
                if debug:
                    self.log_status(ActionClass.INFO, raw_suffix="Processing %r" % sp)

                local_pagename = sp.local_name
                if not self.request.user.may.write(local_pagename):
                    self.log_status(ActionClass.WARN, _("Skipped page %s because of no write access to local page."), (local_pagename, ))
                    return

                current_page = PageEditor(self.request, local_pagename) # YYY direct access
                comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())

                tags = TagStore(current_page)

                matching_tags = tags.fetch(iwid_full=remote.iwid_full, direction=match_direction)
                matching_tags.sort()
                if debug:
                    self.log_status(ActionClass.INFO, raw_suffix="Tags: %r <<BR>> All: %r" % (matching_tags, tags.tags))

                # some default values for non matching tags
                normalised_name = None
                remote_rev = None
                local_rev = sp.local_rev # merge against the newest version
                old_contents = ""

                if matching_tags:
                    newest_tag = matching_tags[-1]

                    local_change = newest_tag.current_rev != sp.local_rev
                    remote_change = newest_tag.remote_rev != sp.remote_rev

                    # handle some cases where we cannot continue for this page
                    if not remote_change and (direction == DOWN or not local_change):
                        return # no changes done, next page
                    if sp.local_deleted and sp.remote_deleted:
                        return
                    if sp.remote_deleted and not local_change:
                        msg = local.delete_page(sp.local_name, comment)
                        if not msg:
                            self.log_status(ActionClass.INFO, _("Deleted page %s locally."), (sp.name, ))
                        else:
                            self.log_status(ActionClass.ERROR, _("Error while deleting page %s locally:"), (sp.name, ), msg)
                        return
                    if sp.local_deleted and not remote_change:
                        if direction == DOWN:
                            return
                        yield remote.delete_page_pre(sp.remote_name, sp.remote_rev, local_full_iwid)
                        msg = remote.delete_page_post(yielder.fetch_result())
                        if not msg:
                            self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (sp.name, ))
                        else:
                            self.log_status(ActionClass.ERROR, _("Error while deleting page %s remotely:"), (sp.name, ), msg)
                        return
                    if sp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change):
                        self.log_status(ActionClass.WARN, _("The item %s cannot be merged automatically but was changed in both wikis. Please delete it in one of both wikis and try again."), (sp.name, ))
                        return
                    if sp.local_mime_type != sp.remote_mime_type:
                        self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (sp.name, ))
                        return
                    if newest_tag.normalised_name != sp.name:
                        self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore the full synchronisation history is lost for this page."), (sp.name, )) # XXX implement renames
                    else:
                        normalised_name = newest_tag.normalised_name
                        local_rev = newest_tag.current_rev
                        remote_rev = newest_tag.remote_rev
                        old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access
                else:
                    if (sp.local_deleted and not sp.remote_rev) or (
                        sp.remote_deleted and not sp.local_rev):
                        return

                self.log_status(ActionClass.INFO, _("Synchronising page %s with remote page %s ..."), (local_pagename, sp.remote_name))

                if direction == DOWN:
                    remote_rev = None # always fetch the full page, ignore remote conflict check
                    patch_base_contents = ""
                else:
                    patch_base_contents = old_contents

                # retrieve remote contents diff
                if remote_rev != sp.remote_rev:
                    if sp.remote_deleted: # ignore remote changes
                        current_remote_rev = sp.remote_rev
                        is_remote_conflict = False
                        diff = None
                        self.log_status(ActionClass.WARN, _("The page %s was deleted remotely but changed locally."), (sp.name, ))
                    else:
                        yield remote.get_diff_pre(sp.remote_name, remote_rev, None, normalised_name)
                        diff_result = remote.get_diff_post(yielder.fetch_result())
                        if diff_result is None:
                            self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (sp.remote_name, ))
                            return
                        is_remote_conflict = diff_result["conflict"]
                        assert diff_result["diffversion"] == 1
                        diff = diff_result["diff"]
                        current_remote_rev = diff_result["current"]
                else:
                    current_remote_rev = remote_rev
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8"))
                    else:
                        is_remote_conflict = NotImplemented
                    diff = None

                # do not sync if the conflict is remote and local, or if it is local
                # and the page has never been synchronised
                if (sp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) # YYY direct access
                    and (remote_rev is None or is_remote_conflict)):
                    self.log_status(ActionClass.WARN, _("Skipped page %s because of a locally or remotely unresolved conflict."), (local_pagename, ))
                    return

                if remote_rev is None and direction == BOTH:
                    self.log_status(ActionClass.INFO, _("This is the first synchronisation between the local and the remote wiki for the page %s."), (sp.name, ))

                # calculate remote page contents from diff
                if sp.remote_deleted:
                    remote_contents = ""
                elif diff is None:
                    remote_contents = old_contents
                else:
                    remote_contents = patch(patch_base_contents, decompress(diff))

                if diff is None: # only a local change
                    if debug:
                        self.log_status(ActionClass.INFO, raw_suffix="Only local changes for %r" % sp.name)
                    merged_text_raw = current_page.get_raw_body_str()
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        merged_text = merged_text_raw.decode("utf-8")
                elif local_rev == sp.local_rev:
                    if debug:
                        self.log_status(ActionClass.INFO, raw_suffix="Only remote changes for %r" % sp.name)
                    merged_text_raw = remote_contents
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        merged_text = merged_text_raw.decode("utf-8")
                else:
                    # this is guaranteed by a check above
                    assert sp.local_mime_type == MIMETYPE_MOIN
                    remote_contents_unicode = remote_contents.decode("utf-8")
                    # here, the actual 3-way merge happens
                    merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 1, *conflict_markers) # YYY direct access
                    if debug:
                        self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r into %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), merged_text))
                    merged_text_raw = merged_text.encode("utf-8")

                # generate binary diff
                diff = textdiff(remote_contents, merged_text_raw)
                if debug:
                    self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % remote_contents)

                # XXX upgrade to write lock
                try:
                    local_change_done = True
                    current_page.saveText(merged_text, sp.local_rev or 0, comment=comment) # YYY direct access
                except PageEditor.Unchanged:
                    local_change_done = False
                except PageEditor.EditConflict:
                    local_change_done = False
                    assert False, "You stumbled on a problem with the current storage system - I cannot lock pages"

                new_local_rev = current_page.get_real_rev() # YYY direct access

                def rollback_local_change(): # YYY direct local access
                    comment = u"Wikisync rollback"
                    rev = new_local_rev - 1
                    revstr = '%08d' % rev
                    oldpg = Page(self.request, sp.local_name, rev=rev)
                    pg = PageEditor(self.request, sp.local_name)
                    if not oldpg.exists():
                        pg.deletePage(comment)
                    else:
                        try:
                            savemsg = pg.saveText(oldpg.get_raw_body(), 0, comment=comment, extra=revstr, action="SAVE/REVERT")
                        except PageEditor.Unchanged:
                            pass
                    return sp.local_name

                if local_change_done:
                    self.register_rollback(rollback_local_change)

                if direction == BOTH:
                    yield remote.merge_diff_pre(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name)
                    try:
                        very_current_remote_rev = remote.merge_diff_post(yielder.fetch_result())
                    except NotAllowedException:
                        self.log_status(ActionClass.ERROR, _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."), (sp.name, ))
                        return
                else:
                    very_current_remote_rev = current_remote_rev


                if local_change_done:
                    self.remove_rollback(rollback_local_change)

                # this is needed at least for direction both and cgi sync to standalone for immutable pages on both
                # servers. It is not needed for the opposite direction
                try:
                    tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=sp.name)
                except:
                    self.log_status(ActionClass.ERROR, _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."), (sp.name, ))
                    return

                if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(merged_text):
                    self.log_status(ActionClass.INFO, _("Page %s successfully merged."), (sp.name, ))
                elif is_remote_conflict:
                    self.log_status(ActionClass.WARN, _("Page %s contains conflicts that were introduced on the remote side."), (sp.name, ))
                else:
                    self.log_status(ActionClass.WARN, _("Page %s merged with conflicts."), (sp.name, ))

                # XXX release lock

        rpc_aggregator.scheduler(remote.create_multicall_object, handle_page, m_pages, 8, remote.prepare_multicall)
示例#25
0
 def trigger_dump(cls, dumpfile=None):
     cls.to_dump = set(threading.enumerate())
     if dumpfile is not None:
         cls.dump_file = dumpfile
     cls.dumping = True
示例#26
0
import atexit
import sys

import py

rootdir = py.magic.autopath().dirpath()
moindir = rootdir.join("..")
sys.path.insert(0, str(moindir))

from MoinMoin.support.python_compatibility import set
from MoinMoin.web.request import TestRequest, Client
from MoinMoin.wsgiapp import Application, init
from MoinMoin._tests import maketestwiki, wikiconfig

coverage_modules = set()

try:
    """
    This code adds support for coverage.py (see
    http://nedbatchelder.com/code/modules/coverage.html).
    It prints a coverage report for the modules specified in all
    module globals (of the test modules) named "coverage_modules".
    """

    import coverage

    def report_coverage():
        coverage.stop()
        module_list = [sys.modules[mod] for mod in coverage_modules]
        module_list.sort()
示例#27
0
import atexit
import sys

import py

rootdir = py.magic.autopath().dirpath()
moindir = rootdir.join("..")
sys.path.insert(0, str(moindir))

from MoinMoin.support.python_compatibility import set
from MoinMoin.web.request import TestRequest, Client
from MoinMoin.wsgiapp import Application, init
from MoinMoin._tests import maketestwiki, wikiconfig

coverage_modules = set()

try:
    """
    This code adds support for coverage.py (see
    http://nedbatchelder.com/code/modules/coverage.html).
    It prints a coverage report for the modules specified in all
    module globals (of the test modules) named "coverage_modules".
    """

    import coverage

    def report_coverage():
        coverage.stop()
        module_list = [sys.modules[mod] for mod in coverage_modules]
        module_list.sort()