コード例 #1
0
ファイル: mkpagepacks.py プロジェクト: steveyen/moingo
    def packagePages(self, pagelist, filename, function):
        """ Puts pages from pagelist into filename and calls function on them on installation. """
        request = self.request
        try:
            os.remove(filename)
        except OSError:
            pass
        zf = zipfile.ZipFile(filename, "w", COMPRESSION_LEVEL)

        cnt = 0
        script = [packLine(['MoinMoinPackage', '1']), ]

        for pagename in pagelist:
            pagename = pagename.strip()
            page = Page(request, pagename)
            if page.exists():
                cnt += 1
                script.append(packLine([function, str(cnt), pagename]))
                timestamp = wikiutil.version2timestamp(page.mtime_usecs())
                zi = zipfile.ZipInfo(filename=str(cnt), date_time=datetime.fromtimestamp(timestamp).timetuple()[:6])
                zi.compress_type = COMPRESSION_LEVEL
                zf.writestr(zi, page.get_raw_body().encode("utf-8"))
            else:
                #import sys
                #print >>sys.stderr, "Could not find the page %s." % pagename.encode("utf-8")
                pass

        script += [packLine(['Print', 'Installed MoinMaster page bundle %s.' % os.path.basename(filename)])]

        zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
        zf.close()
コード例 #2
0
ファイル: wikisync.py プロジェクト: execgit/gwiki-with-moin
class MoinRemoteWiki(RemoteWiki):
    """ Used for MoinMoin wikis reachable via XMLRPC. """
    def __init__(self,
                 request,
                 interwikiname,
                 prefix,
                 pagelist,
                 user,
                 password,
                 verbose=False):
        self.request = request
        self.prefix = prefix
        self.pagelist = pagelist
        self.verbose = verbose
        _ = self.request.getText

        wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_interwiki(
            self.request, interwikiname, '')
        self.wiki_url = wikiutil.mapURL(self.request, wikiurl)
        self.valid = not wikitag_bad
        self.xmlrpc_url = str(self.wiki_url + "?action=xmlrpc2")
        if not self.valid:
            self.connection = None
            return

        self.connection = self.createConnection()

        try:
            iw_list = self.connection.interwikiName()
        except socket.error:
            raise UnsupportedWikiException(
                _("The wiki is currently not reachable."))
        except xmlrpclib.Fault, err:
            raise UnsupportedWikiException("xmlrpclib.Fault: %s" % str(err))

        if user and password:
            token = self.connection.getAuthToken(user, password)
            if token:
                self.token = token
            else:
                raise NotAllowedException(_("Invalid username or password."))
        else:
            self.token = None

        self.remote_interwikiname = remote_interwikiname = iw_list[0]
        self.remote_iwid = remote_iwid = iw_list[1]
        self.is_anonymous = remote_interwikiname is None
        if not self.is_anonymous and interwikiname != remote_interwikiname:
            raise UnsupportedWikiException(
                _("The remote wiki uses a different InterWiki name (%(remotename)s)"
                  " internally than you specified (%(localname)s).") % {
                      "remotename": wikiutil.escape(remote_interwikiname),
                      "localname": wikiutil.escape(interwikiname)
                  })

        if self.is_anonymous:
            self.iwid_full = packLine([remote_iwid])
        else:
            self.iwid_full = packLine([remote_iwid, interwikiname])
コード例 #3
0
    def collectpackage(self, pagelist, fileobject, pkgname="", include_attachments=False):
        """ Expects a list of pages as an argument, and fileobject to be an open
        file object, which a zipfile will get written to.

        @param pagelist: pages to package
        @param fileobject: open file object to write to
        @param pkgname: optional file name, to prevent self packaging
        @rtype: string or None
        @return: error message, if one happened
        @rtype: boolean
        @param include_attachments: True if you want attachments collected
        """
        _ = self.request.getText
        COMPRESSION_LEVEL = zipfile.ZIP_DEFLATED

        pages = []
        for pagename in pagelist:
            pagename = wikiutil.normalize_pagename(pagename, self.request.cfg)
            if pagename:
                page = Page(self.request, pagename)
                if page.exists() and self.request.user.may.read(pagename):
                    pages.append(page)
        if not pages:
            return (_('No pages like "%s"!') % wikiutil.escape(pagelist))

        # Set zipfile output
        zf = zipfile.ZipFile(fileobject, "w", COMPRESSION_LEVEL)

        cnt = 0
        userid = user.getUserIdentification(self.request)
        script = [packLine(['MoinMoinPackage', '1']), ]

        for page in pages:
            cnt += 1
            files = _get_files(self.request, page.page_name)
            script.append(packLine(["AddRevision", str(cnt), page.page_name, userid, "Created by the PackagePages action."]))

            timestamp = wikiutil.version2timestamp(page.mtime_usecs())

            # avoid getting strange exceptions from zipfile in case of pre-1980 timestamps
            nineteeneighty = (10 * 365 + 3) * 24 * 3600 # 1970 + 10y + 3d
            timestamp = max(nineteeneighty, timestamp) # zip can not store timestamps before 1980

            zi = zipfile.ZipInfo(filename=str(cnt), date_time=datetime.fromtimestamp(timestamp).timetuple()[:6])
            zi.compress_type = COMPRESSION_LEVEL
            zf.writestr(zi, page.get_raw_body().encode("utf-8"))
            if include_attachments:
                for attname in files:
                    if attname != pkgname:
                        cnt += 1
                        zipname = "%d_attachment" % cnt
                        script.append(packLine(["AddAttachment", zipname, attname, page.page_name, userid, "Created by the PackagePages action."]))
                        filename = AttachFile.getFilename(self.request, page.page_name, attname)
                        zf.write(filename, zipname)
        script += [packLine(['Print', 'Thank you for using PackagePages!'])]

        zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
        zf.close()
コード例 #4
0
ファイル: mkpagepacks.py プロジェクト: Opngate/moinmoin
    def packagePages(self, pagelist, filename, function):
        """ Puts pages from pagelist into filename and calls function on them on installation. """
        request = self.request
        try:
            os.remove(filename)
        except OSError:
            pass
        # page LanguageSetup needs no packing!
        existing_pages = [
            pagename for pagename in pagelist
            if Page(request, pagename).exists() and pagename != 'LanguageSetup'
        ]
        if not existing_pages:
            return

        zf = zipfile.ZipFile(filename, "w", COMPRESSION_LEVEL)

        script = [
            packLine(['MoinMoinPackage', '1']),
        ]

        cnt = 0
        for pagename in existing_pages:
            pagename = pagename.strip()
            page = Page(request, pagename)
            files = _get_files(request, pagename)
            for attname in files:
                cnt += 1
                zipname = "%d" % cnt
                script.append(
                    packLine([
                        "ReplaceUnderlayAttachment", zipname, attname, pagename
                    ]))
                attpath = AttachFile.getFilename(request, pagename, attname)
                zf.write(attpath, zipname)

            cnt += 1
            zipname = "%d" % cnt
            script.append(packLine([function, zipname, pagename]))
            timestamp = wikiutil.version2timestamp(page.mtime_usecs())
            zi = zipfile.ZipInfo(
                filename=zipname,
                date_time=datetime.fromtimestamp(timestamp).timetuple()[:6])
            zi.compress_type = COMPRESSION_LEVEL
            zf.writestr(zi, page.get_raw_body().encode("utf-8"))

        script += [
            packLine([
                'Print',
                'Installed MoinMaster page bundle %s.' %
                os.path.basename(filename)
            ])
        ]

        zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
        zf.close()
コード例 #5
0
ファイル: mkpagepacks.py プロジェクト: microcosmx/experiments
    def packagePages(self, pagelist, filename, function):
        """ Puts pages from pagelist into filename and calls function on them on installation. """
        request = self.request
        try:
            os.remove(filename)
        except OSError:
            pass
        # page LanguageSetup needs no packing!
        existing_pages = [
            pagename for pagename in pagelist if Page(request, pagename).exists() and pagename != "LanguageSetup"
        ]
        if not existing_pages:
            return

        zf = zipfile.ZipFile(filename, "w", COMPRESSION_LEVEL)

        script = [packLine(["MoinMoinPackage", "1"])]

        fallback_timestamp = int(time.time())

        cnt = 0
        for pagename in existing_pages:
            pagename = pagename.strip()
            page = Page(request, pagename)
            files = _get_files(request, pagename)
            for attname in files:
                cnt += 1
                zipname = "%d" % cnt
                script.append(packLine(["ReplaceUnderlayAttachment", zipname, attname, pagename]))
                attpath = AttachFile.getFilename(request, pagename, attname)
                zf.write(attpath, zipname)

            cnt += 1
            zipname = "%d" % cnt
            script.append(packLine([function, zipname, pagename]))
            timestamp = wikiutil.version2timestamp(page.mtime_usecs())
            if not timestamp:
                # page.mtime_usecs() returns 0 for underlay pages
                timestamp = fallback_timestamp
            dt = datetime.fromtimestamp(timestamp)
            zi = zipfile.ZipInfo(filename=zipname, date_time=dt.timetuple()[:6])
            zi.compress_type = COMPRESSION_LEVEL
            zf.writestr(zi, page.get_raw_body().encode("utf-8"))

        script += [packLine(["Print", "Installed MoinMaster page bundle %s." % os.path.basename(filename)])]

        zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
        zf.close()
コード例 #6
0
ファイル: mkpagepacks.py プロジェクト: steveyen/moingo
    def packageCompoundInstaller(self, bundledict, filename):
        """ Creates a package which installs all other packages. """
        try:
            os.remove(filename)
        except OSError:
            pass
        zf = zipfile.ZipFile(filename, "w", COMPRESSION_LEVEL)

        script = [packLine(['MoinMoinPackage', '1']), ]

        script += [packLine(["InstallPackage", "SystemPagesSetup", name + ".zip"])
                   for name in bundledict if name not in (NODIST, EXTRA, ALL, u"English")]
        script += [packLine(['Print', 'Installed all MoinMaster page bundles.'])]

        zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
        zf.close()
コード例 #7
0
    def generate_log_table(self):
        """ Transforms self.status into a user readable table. """
        table_line = u"|| %(smiley)s || %(message)s%(raw_suffix)s ||"
        table = []

        for line in self.status:
            level, message, substitutions, raw_suffix = line
            if message:
                if substitutions:
                    macro_args = [message] + list(substitutions)
                    message = u"<<GetText2(|%s)>>" % (packLine(macro_args), )
                else:
                    message = u"<<GetText(%s)>>" % (message, )
            else:
                message = u""
            table.append(table_line % {"smiley": level[1],
                                       "message": message,
                                       "raw_suffix": raw_suffix.replace("\n", "<<BR>>")})

        return "\n".join(table)
コード例 #8
0
ファイル: SyncPages.py プロジェクト: steveyen/moingo
    def generate_log_table(self):
        """ Transforms self.status into a user readable table. """
        table_line = u"|| %(smiley)s || %(message)s%(raw_suffix)s ||"
        table = []

        for line in self.status:
            level, message, substitutions, raw_suffix = line
            if message:
                if substitutions:
                    macro_args = [message] + list(substitutions)
                    message = u"<<GetText2(|%s)>>" % (packLine(macro_args), )
                else:
                    message = u"<<GetText(%s)>>" % (message, )
            else:
                message = u""
            table.append(table_line % {"smiley": level[1],
                                       "message": message,
                                       "raw_suffix": raw_suffix.replace("\n", "<<BR>>")})

        return "\n".join(table)
コード例 #9
0
    def sync(self, params, local, remote):
        """ This method does the synchronisation work.
            Currently, it handles nearly all cases.
            The major missing part is rename handling.
            There are a few other cases left that have to be implemented:
                Wiki A    | Wiki B   | Remark
                ----------+----------+------------------------------
                exists    | non-     | Now the wiki knows that the page was renamed.
                with tags | existing | There should be an RPC method that asks
                          |          | for the new name (which could be recorded
                          |          | on page rename). Then the page is
                          |          | renamed in Wiki A as well and the sync
                          |          | is done normally.
                          |          | Every wiki retains a dict that maps
                          |          | (IWID, oldname) => newname and that is
                          |          | updated on every rename. oldname refers
                          |          | to the pagename known by the old wiki (can be
                          |          | gathered from tags).
                ----------+----------+-------------------------------
                exists    | any case | Try a rename search first, then
                          |          | do a sync without considering tags
                with tags | with non | to ensure data integrity.
                          | matching | Hmm, how do we detect this
                          | tags     | case if the unmatching tags are only
                          |          | on the remote side?
                ----------+----------+-------------------------------
        """
        _ = lambda x: x # we will translate it later

        direction = params["direction"]
        if direction == BOTH:
            match_direction = direction
        else:
            match_direction = None

        local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()])
        remote_full_iwid = remote.iwid_full

        self.log_status(self.INFO, _("Synchronisation started -"), raw_suffix=" <<DateTime(%s)>>" % self.page._get_local_timestamp())

        l_pages = local.get_pages()
        r_pages = remote.get_pages(exclude_non_writable=direction != DOWN)

        if params["groupList"]:
            pages_from_groupList = set(local.getGroupItems(params["groupList"]))
            r_pages = SyncPage.filter(r_pages, pages_from_groupList.__contains__)
            l_pages = SyncPage.filter(l_pages, pages_from_groupList.__contains__)

        m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)]

        self.log_status(self.INFO, _("Got a list of %s local and %s remote pages. This results in %s pages to process."),
                        (str(len(l_pages)), str(len(r_pages)), str(len(m_pages))))

        if params["pageMatch"]:
            m_pages = SyncPage.filter(m_pages, params["pageMatch"].match)
            self.log_status(self.INFO, _("After filtering: %s pages"), (str(len(m_pages)), ))

        class handle_page(rpc_aggregator.RPCYielder):
            def run(yielder, sp):
                # XXX add locking, acquire read-lock on sp
                if debug:
                    self.log_status(ActionClass.INFO, raw_suffix="Processing %r" % sp)

                local_pagename = sp.local_name
                if not self.request.user.may.write(local_pagename):
                    self.log_status(ActionClass.WARN, _("Skipped page %s because of no write access to local page."), (local_pagename, ))
                    return

                current_page = PageEditor(self.request, local_pagename) # YYY direct access
                comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())

                tags = TagStore(current_page)

                matching_tags = tags.fetch(iwid_full=remote.iwid_full, direction=match_direction)
                matching_tags.sort()
                if debug:
                    self.log_status(ActionClass.INFO, raw_suffix="Tags: %r <<BR>> All: %r" % (matching_tags, tags.tags))

                # some default values for non matching tags
                normalised_name = None
                remote_rev = None
                local_rev = sp.local_rev # merge against the newest version
                old_contents = ""

                if matching_tags:
                    newest_tag = matching_tags[-1]

                    local_change = newest_tag.current_rev != sp.local_rev
                    remote_change = newest_tag.remote_rev != sp.remote_rev

                    # handle some cases where we cannot continue for this page
                    if not remote_change and (direction == DOWN or not local_change):
                        return # no changes done, next page
                    if sp.local_deleted and sp.remote_deleted:
                        return
                    if sp.remote_deleted and not local_change:
                        msg = local.delete_page(sp.local_name, comment)
                        if not msg:
                            self.log_status(ActionClass.INFO, _("Deleted page %s locally."), (sp.name, ))
                        else:
                            self.log_status(ActionClass.ERROR, _("Error while deleting page %s locally:"), (sp.name, ), msg)
                        return
                    if sp.local_deleted and not remote_change:
                        if direction == DOWN:
                            return
                        yield remote.delete_page_pre(sp.remote_name, sp.remote_rev, local_full_iwid)
                        msg = remote.delete_page_post(yielder.fetch_result())
                        if not msg:
                            self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (sp.name, ))
                        else:
                            self.log_status(ActionClass.ERROR, _("Error while deleting page %s remotely:"), (sp.name, ), msg)
                        return
                    if sp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change):
                        self.log_status(ActionClass.WARN, _("The item %s cannot be merged automatically but was changed in both wikis. Please delete it in one of both wikis and try again."), (sp.name, ))
                        return
                    if sp.local_mime_type != sp.remote_mime_type:
                        self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (sp.name, ))
                        return
                    if newest_tag.normalised_name != sp.name:
                        self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore the full synchronisation history is lost for this page."), (sp.name, )) # XXX implement renames
                    else:
                        normalised_name = newest_tag.normalised_name
                        local_rev = newest_tag.current_rev
                        remote_rev = newest_tag.remote_rev
                        old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access
                else:
                    if (sp.local_deleted and not sp.remote_rev) or (
                        sp.remote_deleted and not sp.local_rev):
                        return

                self.log_status(ActionClass.INFO, _("Synchronising page %s with remote page %s ..."), (local_pagename, sp.remote_name))

                if direction == DOWN:
                    remote_rev = None # always fetch the full page, ignore remote conflict check
                    patch_base_contents = ""
                else:
                    patch_base_contents = old_contents

                # retrieve remote contents diff
                if remote_rev != sp.remote_rev:
                    if sp.remote_deleted: # ignore remote changes
                        current_remote_rev = sp.remote_rev
                        is_remote_conflict = False
                        diff = None
                        self.log_status(ActionClass.WARN, _("The page %s was deleted remotely but changed locally."), (sp.name, ))
                    else:
                        yield remote.get_diff_pre(sp.remote_name, remote_rev, None, normalised_name)
                        diff_result = remote.get_diff_post(yielder.fetch_result())
                        if diff_result is None:
                            self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (sp.remote_name, ))
                            return
                        is_remote_conflict = diff_result["conflict"]
                        assert diff_result["diffversion"] == 1
                        diff = diff_result["diff"]
                        current_remote_rev = diff_result["current"]
                else:
                    current_remote_rev = remote_rev
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8"))
                    else:
                        is_remote_conflict = NotImplemented
                    diff = None

                # do not sync if the conflict is remote and local, or if it is local
                # and the page has never been synchronised
                if (sp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) # YYY direct access
                    and (remote_rev is None or is_remote_conflict)):
                    self.log_status(ActionClass.WARN, _("Skipped page %s because of a locally or remotely unresolved conflict."), (local_pagename, ))
                    return

                if remote_rev is None and direction == BOTH:
                    self.log_status(ActionClass.INFO, _("This is the first synchronisation between the local and the remote wiki for the page %s."), (sp.name, ))

                # calculate remote page contents from diff
                if sp.remote_deleted:
                    remote_contents = ""
                elif diff is None:
                    remote_contents = old_contents
                else:
                    remote_contents = patch(patch_base_contents, decompress(diff))

                if diff is None: # only a local change
                    if debug:
                        self.log_status(ActionClass.INFO, raw_suffix="Only local changes for %r" % sp.name)
                    merged_text_raw = current_page.get_raw_body_str()
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        merged_text = merged_text_raw.decode("utf-8")
                elif local_rev == sp.local_rev:
                    if debug:
                        self.log_status(ActionClass.INFO, raw_suffix="Only remote changes for %r" % sp.name)
                    merged_text_raw = remote_contents
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        merged_text = merged_text_raw.decode("utf-8")
                else:
                    # this is guaranteed by a check above
                    assert sp.local_mime_type == MIMETYPE_MOIN
                    remote_contents_unicode = remote_contents.decode("utf-8")
                    # here, the actual 3-way merge happens
                    merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 1, *conflict_markers) # YYY direct access
                    if debug:
                        self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r into %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), merged_text))
                    merged_text_raw = merged_text.encode("utf-8")

                # generate binary diff
                diff = textdiff(remote_contents, merged_text_raw)
                if debug:
                    self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % remote_contents)

                # XXX upgrade to write lock
                try:
                    local_change_done = True
                    current_page.saveText(merged_text, sp.local_rev or 0, comment=comment) # YYY direct access
                except PageEditor.Unchanged:
                    local_change_done = False
                except PageEditor.EditConflict:
                    local_change_done = False
                    assert False, "You stumbled on a problem with the current storage system - I cannot lock pages"

                new_local_rev = current_page.get_real_rev() # YYY direct access

                def rollback_local_change(): # YYY direct local access
                    comment = u"Wikisync rollback"
                    rev = new_local_rev - 1
                    revstr = '%08d' % rev
                    oldpg = Page(self.request, sp.local_name, rev=rev)
                    pg = PageEditor(self.request, sp.local_name)
                    if not oldpg.exists():
                        pg.deletePage(comment)
                    else:
                        try:
                            savemsg = pg.saveText(oldpg.get_raw_body(), 0, comment=comment, extra=revstr, action="SAVE/REVERT")
                        except PageEditor.Unchanged:
                            pass
                    return sp.local_name

                if local_change_done:
                    self.register_rollback(rollback_local_change)

                if direction == BOTH:
                    yield remote.merge_diff_pre(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name)
                    try:
                        very_current_remote_rev = remote.merge_diff_post(yielder.fetch_result())
                    except NotAllowedException:
                        self.log_status(ActionClass.ERROR, _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."), (sp.name, ))
                        return
                else:
                    very_current_remote_rev = current_remote_rev


                if local_change_done:
                    self.remove_rollback(rollback_local_change)

                # this is needed at least for direction both and cgi sync to standalone for immutable pages on both
                # servers. It is not needed for the opposite direction
                try:
                    tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=sp.name)
                except:
                    self.log_status(ActionClass.ERROR, _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."), (sp.name, ))
                    return

                if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(merged_text):
                    self.log_status(ActionClass.INFO, _("Page %s successfully merged."), (sp.name, ))
                elif is_remote_conflict:
                    self.log_status(ActionClass.WARN, _("Page %s contains conflicts that were introduced on the remote side."), (sp.name, ))
                else:
                    self.log_status(ActionClass.WARN, _("Page %s merged with conflicts."), (sp.name, ))

                # XXX release lock

        rpc_aggregator.scheduler(remote.create_multicall_object, handle_page, m_pages, 8, remote.prepare_multicall)
コード例 #10
0
def execute(pagename, request):
    pagename_header = '%s-%s.zip' % (pagename, datetime.now().isoformat()[:10])
    pagename_header = pagename_header.encode('ascii', 'ignore')

    request.content_type = 'application/zip'
    request.headers['Content-Disposition'] = \
        'attachment; filename="%s"' % pagename_header

    args = values_to_form(request.values)

    try:
        args = args['args'][0]
    except (KeyError, IndexError):
        args = u''

    pagelist, metakeys, _ = metatable_parseargs(request,
                                                args,
                                                get_all_keys=True)

    renameDict = dict()

    for page in pagelist:
        metas = get_metas(request,
                          page, ["gwikirename"],
                          abs_attach=False,
                          checkAccess=False)
        renameList = metas["gwikirename"]
        if renameList:
            renameDict[page] = renameList

    output = StringIO()
    zip = zipfile.ZipFile(output, "w", zipfile.ZIP_DEFLATED)

    userid = user.getUserIdentification(request)
    script = [
        packLine(['MoinMoinPackage', '1']),
    ]
    counter = 0

    for pagename in pagelist:
        counter += 1
        page = Page(request, pagename)
        timestamp = wikiutil.version2timestamp(page.mtime_usecs())
        # Underlay pages are in epoch 0, zipfile in python 2.7 does
        # not support this.
        if not timestamp:
            pagefile, rev, exists = page.get_rev()
            if rev == 99999999:
                # We should never get here
                log.error("Page %s neither in pages or underlay, skipping." %
                          (pagename))
                continue
            timestamp = os.path.getctime(pagefile)
        pagetext = page.get_raw_body().encode("utf-8")
        filename = str(counter)
        zinfo = zipfile.ZipInfo(
            filename=filename,
            date_time=datetime.fromtimestamp(timestamp).timetuple()[:6])
        zinfo.compress_type = zipfile.ZIP_DEFLATED
        zip.writestr(zinfo, pagetext)

        targetNameList = renameDict.get(pagename, [pagename])
        for targetName in targetNameList:
            script.append(
                packLine(["AddRevision", filename, targetName, userid, ""]))

        for attachment in _get_files(request, pagename):
            counter += 1
            sourcefile = AttachFile.getFilename(request, pagename, attachment)
            filename = str(counter) + "-attachment"
            zip.write(sourcefile, filename)
            script.append(
                packLine([
                    "AddAttachment", filename, attachment, pagename, userid, ""
                ]))

    zip.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
    zip.close()

    request.write(output.getvalue())
コード例 #11
0
ファイル: test_packages.py プロジェクト: steveyen/moingo
 def testQuoting(self):
     for line in ([':foo', 'is\\', 'ja|', u't|�', u'baAz�'], [], ['', '']):
         assert line == unpackLine(packLine(line))
コード例 #12
0
ファイル: SyncPages.py プロジェクト: steveyen/moingo
    def sync(self, params, local, remote):
        """ This method does the synchronisation work.
            Currently, it handles nearly all cases.
            The major missing part is rename handling.
            There are a few other cases left that have to be implemented:
                Wiki A    | Wiki B   | Remark
                ----------+----------+------------------------------
                exists    | non-     | Now the wiki knows that the page was renamed.
                with tags | existing | There should be an RPC method that asks
                          |          | for the new name (which could be recorded
                          |          | on page rename). Then the page is
                          |          | renamed in Wiki A as well and the sync
                          |          | is done normally.
                          |          | Every wiki retains a dict that maps
                          |          | (IWID, oldname) => newname and that is
                          |          | updated on every rename. oldname refers
                          |          | to the pagename known by the old wiki (can be
                          |          | gathered from tags).
                ----------+----------+-------------------------------
                exists    | any case | Try a rename search first, then
                          |          | do a sync without considering tags
                with tags | with non | to ensure data integrity.
                          | matching | Hmm, how do we detect this
                          | tags     | case if the unmatching tags are only
                          |          | on the remote side?
                ----------+----------+-------------------------------
        """
        _ = lambda x: x # we will translate it later

        direction = params["direction"]
        if direction == BOTH:
            match_direction = direction
        else:
            match_direction = None

        local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()])
        remote_full_iwid = remote.iwid_full

        self.log_status(self.INFO, _("Synchronisation started -"), raw_suffix=" <<DateTime(%s)>>" % self.page._get_local_timestamp())

        l_pages = local.get_pages()
        r_pages = remote.get_pages(exclude_non_writable=direction != DOWN)

        if params["groupList"]:
            pages_from_groupList = set(local.getGroupItems(params["groupList"]))
            r_pages = SyncPage.filter(r_pages, pages_from_groupList.__contains__)
            l_pages = SyncPage.filter(l_pages, pages_from_groupList.__contains__)

        m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)]

        self.log_status(self.INFO, _("Got a list of %s local and %s remote pages. This results in %s pages to process."),
                        (str(len(l_pages)), str(len(r_pages)), str(len(m_pages))))

        if params["pageMatch"]:
            m_pages = SyncPage.filter(m_pages, params["pageMatch"].match)
            self.log_status(self.INFO, _("After filtering: %s pages"), (str(len(m_pages)), ))

        class handle_page(rpc_aggregator.RPCYielder):
            def run(yielder, sp):
                # XXX add locking, acquire read-lock on sp
                if debug:
                    self.log_status(ActionClass.INFO, raw_suffix="Processing %r" % sp)

                local_pagename = sp.local_name
                if not self.request.user.may.write(local_pagename):
                    self.log_status(ActionClass.WARN, _("Skipped page %s because of no write access to local page."), (local_pagename, ))
                    return

                current_page = PageEditor(self.request, local_pagename) # YYY direct access
                comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())

                tags = TagStore(current_page)

                matching_tags = tags.fetch(iwid_full=remote.iwid_full, direction=match_direction)
                matching_tags.sort()
                if debug:
                    self.log_status(ActionClass.INFO, raw_suffix="Tags: %r <<BR>> All: %r" % (matching_tags, tags.tags))

                # some default values for non matching tags
                normalised_name = None
                remote_rev = None
                local_rev = sp.local_rev # merge against the newest version
                old_contents = ""

                if matching_tags:
                    newest_tag = matching_tags[-1]

                    local_change = newest_tag.current_rev != sp.local_rev
                    remote_change = newest_tag.remote_rev != sp.remote_rev

                    # handle some cases where we cannot continue for this page
                    if not remote_change and (direction == DOWN or not local_change):
                        return # no changes done, next page
                    if sp.local_deleted and sp.remote_deleted:
                        return
                    if sp.remote_deleted and not local_change:
                        msg = local.delete_page(sp.local_name, comment)
                        if not msg:
                            self.log_status(ActionClass.INFO, _("Deleted page %s locally."), (sp.name, ))
                        else:
                            self.log_status(ActionClass.ERROR, _("Error while deleting page %s locally:"), (sp.name, ), msg)
                        return
                    if sp.local_deleted and not remote_change:
                        if direction == DOWN:
                            return
                        yield remote.delete_page_pre(sp.remote_name, sp.remote_rev, local_full_iwid)
                        msg = remote.delete_page_post(yielder.fetch_result())
                        if not msg:
                            self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (sp.name, ))
                        else:
                            self.log_status(ActionClass.ERROR, _("Error while deleting page %s remotely:"), (sp.name, ), msg)
                        return
                    if sp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change):
                        self.log_status(ActionClass.WARN, _("The item %s cannot be merged automatically but was changed in both wikis. Please delete it in one of both wikis and try again."), (sp.name, ))
                        return
                    if sp.local_mime_type != sp.remote_mime_type:
                        self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (sp.name, ))
                        return
                    if newest_tag.normalised_name != sp.name:
                        self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore the full synchronisation history is lost for this page."), (sp.name, )) # XXX implement renames
                    else:
                        normalised_name = newest_tag.normalised_name
                        local_rev = newest_tag.current_rev
                        remote_rev = newest_tag.remote_rev
                        old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access
                else:
                    if (sp.local_deleted and not sp.remote_rev) or (
                        sp.remote_deleted and not sp.local_rev):
                        return

                self.log_status(ActionClass.INFO, _("Synchronising page %s with remote page %s ..."), (local_pagename, sp.remote_name))

                if direction == DOWN:
                    remote_rev = None # always fetch the full page, ignore remote conflict check
                    patch_base_contents = ""
                else:
                    patch_base_contents = old_contents

                # retrieve remote contents diff
                if remote_rev != sp.remote_rev:
                    if sp.remote_deleted: # ignore remote changes
                        current_remote_rev = sp.remote_rev
                        is_remote_conflict = False
                        diff = None
                        self.log_status(ActionClass.WARN, _("The page %s was deleted remotely but changed locally."), (sp.name, ))
                    else:
                        yield remote.get_diff_pre(sp.remote_name, remote_rev, None, normalised_name)
                        diff_result = remote.get_diff_post(yielder.fetch_result())
                        if diff_result is None:
                            self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (sp.remote_name, ))
                            return
                        is_remote_conflict = diff_result["conflict"]
                        assert diff_result["diffversion"] == 1
                        diff = diff_result["diff"]
                        current_remote_rev = diff_result["current"]
                else:
                    current_remote_rev = remote_rev
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8"))
                    else:
                        is_remote_conflict = NotImplemented
                    diff = None

                # do not sync if the conflict is remote and local, or if it is local
                # and the page has never been synchronised
                if (sp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) # YYY direct access
                    and (remote_rev is None or is_remote_conflict)):
                    self.log_status(ActionClass.WARN, _("Skipped page %s because of a locally or remotely unresolved conflict."), (local_pagename, ))
                    return

                if remote_rev is None and direction == BOTH:
                    self.log_status(ActionClass.INFO, _("This is the first synchronisation between the local and the remote wiki for the page %s."), (sp.name, ))

                # calculate remote page contents from diff
                if sp.remote_deleted:
                    remote_contents = ""
                elif diff is None:
                    remote_contents = old_contents
                else:
                    remote_contents = patch(patch_base_contents, decompress(diff))

                if diff is None: # only a local change
                    if debug:
                        self.log_status(ActionClass.INFO, raw_suffix="Only local changes for %r" % sp.name)
                    merged_text_raw = current_page.get_raw_body_str()
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        merged_text = merged_text_raw.decode("utf-8")
                elif local_rev == sp.local_rev:
                    if debug:
                        self.log_status(ActionClass.INFO, raw_suffix="Only remote changes for %r" % sp.name)
                    merged_text_raw = remote_contents
                    if sp.local_mime_type == MIMETYPE_MOIN:
                        merged_text = merged_text_raw.decode("utf-8")
                else:
                    # this is guaranteed by a check above
                    assert sp.local_mime_type == MIMETYPE_MOIN
                    remote_contents_unicode = remote_contents.decode("utf-8")
                    # here, the actual 3-way merge happens
                    merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 1, *conflict_markers) # YYY direct access
                    if debug:
                        self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r into %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), merged_text))
                    merged_text_raw = merged_text.encode("utf-8")

                # generate binary diff
                diff = textdiff(remote_contents, merged_text_raw)
                if debug:
                    self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % remote_contents)

                # XXX upgrade to write lock
                try:
                    local_change_done = True
                    current_page.saveText(merged_text, sp.local_rev or 0, comment=comment) # YYY direct access
                except PageEditor.Unchanged:
                    local_change_done = False
                except PageEditor.EditConflict:
                    local_change_done = False
                    assert False, "You stumbled on a problem with the current storage system - I cannot lock pages"

                new_local_rev = current_page.get_real_rev() # YYY direct access

                def rollback_local_change(): # YYY direct local access
                    comment = u"Wikisync rollback"
                    rev = new_local_rev - 1
                    revstr = '%08d' % rev
                    oldpg = Page(self.request, sp.local_name, rev=rev)
                    pg = PageEditor(self.request, sp.local_name)
                    if not oldpg.exists():
                        pg.deletePage(comment)
                    else:
                        try:
                            savemsg = pg.saveText(oldpg.get_raw_body(), 0, comment=comment, extra=revstr, action="SAVE/REVERT")
                        except PageEditor.Unchanged:
                            pass
                    return sp.local_name

                if local_change_done:
                    self.register_rollback(rollback_local_change)

                if direction == BOTH:
                    yield remote.merge_diff_pre(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name)
                    try:
                        very_current_remote_rev = remote.merge_diff_post(yielder.fetch_result())
                    except NotAllowedException:
                        self.log_status(ActionClass.ERROR, _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."), (sp.name, ))
                        return
                else:
                    very_current_remote_rev = current_remote_rev


                if local_change_done:
                    self.remove_rollback(rollback_local_change)

                # this is needed at least for direction both and cgi sync to standalone for immutable pages on both
                # servers. It is not needed for the opposite direction
                try:
                    tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=sp.name)
                except:
                    self.log_status(ActionClass.ERROR, _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."), (sp.name, ))
                    return

                if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(merged_text):
                    self.log_status(ActionClass.INFO, _("Page %s successfully merged."), (sp.name, ))
                elif is_remote_conflict:
                    self.log_status(ActionClass.WARN, _("Page %s contains conflicts that were introduced on the remote side."), (sp.name, ))
                else:
                    self.log_status(ActionClass.WARN, _("Page %s merged with conflicts."), (sp.name, ))

                # XXX release lock

        rpc_aggregator.scheduler(remote.create_multicall_object, handle_page, m_pages, 8, remote.prepare_multicall)
コード例 #13
0
def execute(pagename, request):
    pagename_header = "%s-%s.zip" % (pagename, datetime.now().isoformat()[:10])
    pagename_header = pagename_header.encode("ascii", "ignore")

    request.content_type = "application/zip"
    request.headers["Content-Disposition"] = 'attachment; filename="%s"' % pagename_header

    args = values_to_form(request.values)

    try:
        args = args["args"][0]
    except (KeyError, IndexError):
        args = u""

    pagelist, metakeys, _ = metatable_parseargs(request, args, get_all_keys=True)

    renameDict = dict()

    for page in pagelist:
        metas = get_metas(request, page, ["gwikirename"], abs_attach=False, checkAccess=False)
        renameList = metas["gwikirename"]
        if renameList:
            renameDict[page] = renameList

    output = StringIO()
    zip = zipfile.ZipFile(output, "w", zipfile.ZIP_DEFLATED)

    userid = user.getUserIdentification(request)
    script = [packLine(["MoinMoinPackage", "1"])]
    counter = 0

    for pagename in pagelist:
        counter += 1
        page = Page(request, pagename)
        timestamp = wikiutil.version2timestamp(page.mtime_usecs())
        # Underlay pages are in epoch 0, zipfile in python 2.7 does
        # not support this.
        if not timestamp:
            pagefile, rev, exists = page.get_rev()
            if rev == 99999999:
                # We should never get here
                log.error("Page %s neither in pages or underlay, skipping." % (pagename))
                continue
            timestamp = os.path.getctime(pagefile)
        pagetext = page.get_raw_body().encode("utf-8")
        filename = str(counter)
        zinfo = zipfile.ZipInfo(filename=filename, date_time=datetime.fromtimestamp(timestamp).timetuple()[:6])
        zinfo.compress_type = zipfile.ZIP_DEFLATED
        zip.writestr(zinfo, pagetext)

        targetNameList = renameDict.get(pagename, [pagename])
        for targetName in targetNameList:
            script.append(packLine(["AddRevision", filename, targetName, userid, ""]))

        for attachment in _get_files(request, pagename):
            counter += 1
            sourcefile = AttachFile.getFilename(request, pagename, attachment)
            filename = str(counter) + "-attachment"
            zip.write(sourcefile, filename)
            script.append(packLine(["AddAttachment", filename, attachment, pagename, userid, ""]))

    zip.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
    zip.close()

    request.write(output.getvalue())
コード例 #14
0
            self.token = None

        self.remote_interwikiname = remote_interwikiname = iw_list[0]
        self.remote_iwid = remote_iwid = iw_list[1]
        self.is_anonymous = remote_interwikiname is None
        if not self.is_anonymous and interwikiname != remote_interwikiname:
            raise UnsupportedWikiException(
                _(
                    "The remote wiki uses a different InterWiki name (%(remotename)s)"
                    " internally than you specified (%(localname)s)."
                )
                % {"remotename": wikiutil.escape(remote_interwikiname), "localname": wikiutil.escape(interwikiname)}
            )

        if self.is_anonymous:
            self.iwid_full = packLine([remote_iwid])
        else:
            self.iwid_full = packLine([remote_iwid, interwikiname])

    def createConnection(self):
        return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=self.verbose)

    # Public methods
    def get_diff_pre(self, pagename, from_rev, to_rev, n_name=None):
        """ Returns the binary diff of the remote page named pagename, given
            from_rev and to_rev. Generates the call. """
        return "getDiff", (pagename, from_rev, to_rev, n_name)

    def get_diff_post(self, value):
        """ Processes the return value of the call generated by get_diff_pre. """
        if isinstance(value, xmlrpclib.Fault):
コード例 #15
0
 def testQuoting(self):
     for line in ([':foo', 'is\\', 'ja|', u't|�',
                   u'baAz�'], [], ['', '']):
         assert line == unpackLine(packLine(line))
コード例 #16
0
ファイル: test_packages.py プロジェクト: imosts/flume
 def testQuoting(self):
     for line in ([':foo', 'is\\', 'ja|', u't|ü', u'baAzß'], [], ['', '']):
         self.assertEqual(line, unpackLine(packLine(line)))