コード例 #1
0
ファイル: moin2doku.py プロジェクト: mir3k/moin2doku
def convert_editlog(page, output = None, overwrite = False):
	pagedir = page.getPagePath()
	pagename = wikiname(pagedir)
	if not output:
		output = pagename
	edit_log = editlog.EditLog(request, page.getPagePath('edit-log'))

	changes = {}
	for log in edit_log:
		# not supported. perhaps add anyway?
		if log.action in ('ATTNEW', 'ATTDEL', 'ATTDRW'):
			continue

		# 1201095949  192.168.2.23    E   start   [email protected]
		author = log.hostname
		if log.userid:
			userdata = user.User(request, log.userid)
			if userdata.name:
				author = userdata.name

		try:
			action = {
				'SAVE' : 'E',
				'SAVENEW' : 'C',
				'SAVE/REVERT' : 'R',
			}[log.action]
		except KeyError:
			action = log.action

		mtime = str(log.ed_time_usecs / USEC)
		changes[mtime] = u"\t".join([mtime, log.addr, action, dw.cleanID(log.pagename), author, log.comment])

	# see if we have missing entries, try to recover
	page = Page(request, pagename)
	if len(page.getRevList()) != len(changes):
		print "RECOVERING edit-log, missing %d entries" % (len(page.getRevList()) - len(changes))
		for rev in page.getRevList():
			page = Page(request, pagename, rev = rev)
			mtime = page.mtime_usecs() / USEC

			if not mtime:
				pagefile, realrev, exists = page.get_rev(rev = rev);
				if os.path.exists(pagefile):
					mtime = int(os.path.getmtime(pagefile))
					print "Recovered %s: %s" % (rev, mtime)

			mtime = str(mtime)
			if not changes.has_key(mtime):
				changes[mtime] = u"\t".join([mtime, '127.0.0.1', '?', dw.cleanID(pagename), 'root', 'recovered entry'])
				print "ADDING %s" % mtime

	changes = sorted(changes.values())
	out_file = os.path.join(output_dir, 'meta', dw.metaFN(output, '.changes'))
	writefile(out_file, changes, overwrite = overwrite)
コード例 #2
0
def execute(pagename, request):
    pagename_header = '%s-%s.zip' % (pagename, datetime.now().isoformat()[:10])
    pagename_header = pagename_header.encode('ascii', 'ignore')

    request.content_type = 'application/zip'
    request.headers['Content-Disposition'] = \
        'attachment; filename="%s"' % pagename_header

    args = values_to_form(request.values)

    try:
        args = args['args'][0]
    except (KeyError, IndexError):
        args = u''

    pagelist, metakeys, _ = metatable_parseargs(request,
                                                args,
                                                get_all_keys=True)

    renameDict = dict()

    for page in pagelist:
        metas = get_metas(request,
                          page, ["gwikirename"],
                          abs_attach=False,
                          checkAccess=False)
        renameList = metas["gwikirename"]
        if renameList:
            renameDict[page] = renameList

    output = StringIO()
    zip = zipfile.ZipFile(output, "w", zipfile.ZIP_DEFLATED)

    userid = user.getUserIdentification(request)
    script = [
        packLine(['MoinMoinPackage', '1']),
    ]
    counter = 0

    for pagename in pagelist:
        counter += 1
        page = Page(request, pagename)
        timestamp = wikiutil.version2timestamp(page.mtime_usecs())
        # Underlay pages are in epoch 0, zipfile in python 2.7 does
        # not support this.
        if not timestamp:
            pagefile, rev, exists = page.get_rev()
            if rev == 99999999:
                # We should never get here
                log.error("Page %s neither in pages or underlay, skipping." %
                          (pagename))
                continue
            timestamp = os.path.getctime(pagefile)
        pagetext = page.get_raw_body().encode("utf-8")
        filename = str(counter)
        zinfo = zipfile.ZipInfo(
            filename=filename,
            date_time=datetime.fromtimestamp(timestamp).timetuple()[:6])
        zinfo.compress_type = zipfile.ZIP_DEFLATED
        zip.writestr(zinfo, pagetext)

        targetNameList = renameDict.get(pagename, [pagename])
        for targetName in targetNameList:
            script.append(
                packLine(["AddRevision", filename, targetName, userid, ""]))

        for attachment in _get_files(request, pagename):
            counter += 1
            sourcefile = AttachFile.getFilename(request, pagename, attachment)
            filename = str(counter) + "-attachment"
            zip.write(sourcefile, filename)
            script.append(
                packLine([
                    "AddAttachment", filename, attachment, pagename, userid, ""
                ]))

    zip.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
    zip.close()

    request.write(output.getvalue())
コード例 #3
0
def convertfile(page, output=None, overwrite=False):
    pagedir = page.getPagePath()
    pagename = wikiname(pagedir)

    if not output:
        output = pagename

    print "Converting %s" % pagename

    if page.isUnderlayPage():
        print "underlay: %s" % page.request.cfg.data_underlay_dir
        print "underlay: %s" % request.cfg.data_underlay_dir
        print "SKIP UNDERLAY: %s" % pagename
        return False

    current_exists = page.exists()
    current_rev = page.current_rev()

    if convert_attic:
        revs = page.getRevList()
    else:
        revs = [current_rev]

    # Generate random ID Number for collision avoidance when attachments in Namespace have the same name
    randomID = random.randint(101, 999)

    for rev in revs:
        page = Page(request, pagename, rev=rev)
        pagefile, realrev, exists = page.get_rev(rev=rev)

        mtime = page.mtime_usecs() / USEC

        if not mtime:
            if os.path.exists(pagefile) != exists:
                raise Exception, "IT SHOULD NOT HAPPEN"

            if os.path.exists(pagefile):
                mtime = int(os.path.getmtime(pagefile))
                print "recovered %s: %s" % (rev, mtime)

            if not mtime:
                print "NO REVISION: for %s" % pagefile
                continue

        if rev == current_rev:
            out_file = os.path.join(output_dir, 'pages', dw.wikiFN(output))
            if not convert_attic and not exists:
                # if not converting attic, allow current version may not exist anymore
                continue
        else:
            out_file = os.path.join(output_dir, 'attic',
                                    dw.wikiFN(output, str(mtime)))

        content = moin2doku(pagename, page.get_raw_body(), randomID)
        if len(content) == 0:
            #			raise Exception, "No content"
            print "NO CONTENT: exists: %s,%s" % (exists,
                                                 os.path.exists(pagefile))

        writefile(out_file, content, overwrite=overwrite)
        copystat(pagefile, out_file)

    ID = dw.cleanID(output)
    copy_attachments(page, dw.getNS(ID), randomID)

    # convert edit-log, it's always present even if current page is not
    convert_editlog(page, output=output, overwrite=overwrite)

    # add to redirect.conf if filenames differ
    # and page must exist (no redirect for deleted pages)
    if redirect_conf and current_exists:
        # redirect dokuwiki plugin is quite picky
        # - it doesn't understand if entries are not lowercase
        # - it doesn't understand if paths are separated by forward slash
        old_page = pagename.lower().replace('/', ':').replace(' ', '_')
        if old_page != ID:
            redirect_map[old_page] = ID

    print "Converted %s as %s" % (pagename, dw.wikiFN(output))

    return True
コード例 #4
0
def convert_editlog(page, output=None, overwrite=False):
    pagedir = page.getPagePath()
    pagename = wikiname(pagedir)
    if not output:
        output = pagename
    edit_log = editlog.EditLog(request, page.getPagePath('edit-log'))

    changes = {}
    for log in edit_log:
        # not supported. perhaps add anyway?
        if log.action in ('ATTNEW', 'ATTDEL', 'ATTDRW'):
            continue

        # 1201095949  192.168.2.23    E   start   [email protected]
        author = log.hostname
        if log.userid:
            userdata = user.User(request, log.userid)
            if userdata.name:
                author = userdata.name

        try:
            action = {
                'SAVE': 'E',
                'SAVENEW': 'C',
                'SAVE/REVERT': 'R',
            }[log.action]
        except KeyError:
            action = log.action

        mtime = str(log.ed_time_usecs / USEC)
        changes[mtime] = u"\t".join([
            mtime, log.addr, action,
            dw.cleanID(log.pagename), author, log.comment
        ])

    # see if we have missing entries, try to recover
    page = Page(request, pagename)
    if len(page.getRevList()) != len(changes):
        print "RECOVERING edit-log, missing %d entries" % (
            len(page.getRevList()) - len(changes))
        for rev in page.getRevList():
            page = Page(request, pagename, rev=rev)
            mtime = page.mtime_usecs() / USEC

            if not mtime:
                pagefile, realrev, exists = page.get_rev(rev=rev)
                if os.path.exists(pagefile):
                    mtime = int(os.path.getmtime(pagefile))
                    print "Recovered %s: %s" % (rev, mtime)

            mtime = str(mtime)
            if not changes.has_key(mtime):
                changes[mtime] = u"\t".join([
                    mtime, '127.0.0.1', '?',
                    dw.cleanID(pagename), 'root', 'recovered entry'
                ])
                print "ADDING %s" % mtime

    changes = sorted(changes.values())
    out_file = os.path.join(output_dir, 'meta', dw.metaFN(output, '.changes'))
    writefile(out_file, changes, overwrite=overwrite)
コード例 #5
0
def execute(pagename, request):
    pagename_header = "%s-%s.zip" % (pagename, datetime.now().isoformat()[:10])
    pagename_header = pagename_header.encode("ascii", "ignore")

    request.content_type = "application/zip"
    request.headers["Content-Disposition"] = 'attachment; filename="%s"' % pagename_header

    args = values_to_form(request.values)

    try:
        args = args["args"][0]
    except (KeyError, IndexError):
        args = u""

    pagelist, metakeys, _ = metatable_parseargs(request, args, get_all_keys=True)

    renameDict = dict()

    for page in pagelist:
        metas = get_metas(request, page, ["gwikirename"], abs_attach=False, checkAccess=False)
        renameList = metas["gwikirename"]
        if renameList:
            renameDict[page] = renameList

    output = StringIO()
    zip = zipfile.ZipFile(output, "w", zipfile.ZIP_DEFLATED)

    userid = user.getUserIdentification(request)
    script = [packLine(["MoinMoinPackage", "1"])]
    counter = 0

    for pagename in pagelist:
        counter += 1
        page = Page(request, pagename)
        timestamp = wikiutil.version2timestamp(page.mtime_usecs())
        # Underlay pages are in epoch 0, zipfile in python 2.7 does
        # not support this.
        if not timestamp:
            pagefile, rev, exists = page.get_rev()
            if rev == 99999999:
                # We should never get here
                log.error("Page %s neither in pages or underlay, skipping." % (pagename))
                continue
            timestamp = os.path.getctime(pagefile)
        pagetext = page.get_raw_body().encode("utf-8")
        filename = str(counter)
        zinfo = zipfile.ZipInfo(filename=filename, date_time=datetime.fromtimestamp(timestamp).timetuple()[:6])
        zinfo.compress_type = zipfile.ZIP_DEFLATED
        zip.writestr(zinfo, pagetext)

        targetNameList = renameDict.get(pagename, [pagename])
        for targetName in targetNameList:
            script.append(packLine(["AddRevision", filename, targetName, userid, ""]))

        for attachment in _get_files(request, pagename):
            counter += 1
            sourcefile = AttachFile.getFilename(request, pagename, attachment)
            filename = str(counter) + "-attachment"
            zip.write(sourcefile, filename)
            script.append(packLine(["AddAttachment", filename, attachment, pagename, userid, ""]))

    zip.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
    zip.close()

    request.write(output.getvalue())
コード例 #6
0
ファイル: moin2doku.py プロジェクト: mir3k/moin2doku
def convertfile(page, output = None, overwrite = False):
	pagedir = page.getPagePath()
	pagename = wikiname(pagedir)

	if not output:
		output = pagename

	if page.isUnderlayPage():
		print "underlay: %s" % page.request.cfg.data_underlay_dir
		print "underlay: %s" % request.cfg.data_underlay_dir
		print "SKIP UNDERLAY: %s" % pagename
		return False

	current_exists = page.exists()
	current_rev = page.current_rev()

	if convert_attic:
		revs = page.getRevList()
	else:
		revs = [current_rev]

	# Generate random ID Number for collision avoidance when attachments in Namespace have the same name
	randomID = random.randint(101,999)

	for rev in revs:
		page = Page(request, pagename, rev = rev)
		pagefile, realrev, exists = page.get_rev(rev = rev);

		mtime = page.mtime_usecs() / USEC

		if not mtime:
			if os.path.exists(pagefile) != exists:
				raise Exception, "IT SHOULD NOT HAPPEN"

			if os.path.exists(pagefile):
				mtime = int(os.path.getmtime(pagefile))
				print "recovered %s: %s" % (rev, mtime)

			if not mtime:
				print "NO REVISION: for %s" % pagefile
				continue

		if rev == current_rev:
			out_file = os.path.join(output_dir, 'pages', dw.wikiFN(output))
			if not convert_attic and not exists:
				# if not converting attic, allow current version may not exist anymore
				continue
		else:
			out_file = os.path.join(output_dir, 'attic', dw.wikiFN(output, str(mtime)))

		content = moin2doku(pagename, page.get_raw_body(),randomID)
		if len(content) == 0:
#			raise Exception, "No content"
			print "NO CONTENT: exists: %s,%s" % (exists, os.path.exists(pagefile))

		writefile(out_file, content, overwrite = overwrite)
		copystat(pagefile, out_file)

	ID = dw.cleanID(output)
	copy_attachments(page, dw.getNS(ID),randomID)

	# convert edit-log, it's always present even if current page is not
	convert_editlog(page, output = output, overwrite = overwrite)

	# add to redirect.conf if filenames differ
	# and page must exist (no redirect for deleted pages)
	if redirect_conf and current_exists:
		# redirect dokuwiki plugin is quite picky
		# - it doesn't understand if entries are not lowercase
		# - it doesn't understand if paths are separated by forward slash
		old_page = pagename.lower().replace('/', ':').replace(' ', '_')
		if old_page != ID:
			redirect_map[old_page] = ID

	print "Converted %s as %s" % (pagename, dw.wikiFN(output))

	return True