def fetch_convert_content(prefix, content): content = prefix + content try: new_store_format = new_confluence_api.convertWikiToStorageFormat(content) except xmlrpc.client.Fault as e: if ('com.atlassian.confluence.content.render.xhtml.migration.exceptions.UnknownMacroMigrationException:' in e.faultString): new_store_format = new_confluence_api.convertWikiToStorageFormat( prefix + '{code}\n' + content.replace('{code}', r'\{code\}') + '\n{code}') else: raise e return new_store_format
def fetch_convert_content(prefix, content): content = prefix + content try: new_store_format = new_confluence_api.convertWikiToStorageFormat(content) except xmlrpc.client.Fault as e: if ( "com.atlassian.confluence.content.render.xhtml.migration.exceptions.UnknownMacroMigrationException:" in e.faultString ): new_store_format = new_confluence_api.convertWikiToStorageFormat( prefix + "{code}\n" + content.replace("{code}", r"\{code\}") + "\n{code}" ) else: raise e return new_store_format
def import_page(page_id, parent_id): json_file_path = os.path.join(utils.DATA_DIR, "pages", page_id + ".json") if not os.path.isfile(json_file_path): logger.error("no file %s" % json_file_path) return with open(json_file_path, "r") as page_file: page = json.loads(page_file.read()) prefix = u"""{info}\n本文档从旧 Wiki 导入,原 URL:%s\n\n原创建人:%s %s\n\n原最后更新人:%s %s\n{info}\n\n""" % ( page["url"], page["creator"], dateutil.parser.parse(page["created"]).strftime("%Y-%m-%d %H:%M:%S"), page["modifier"], dateutil.parser.parse(page["modified"]).strftime("%Y-%m-%d %H:%M:%S"), ) new_store_format = fetch_convert_content(prefix, page["content"]) logger.debug("convert page: %s, size: %d" % (page_id, len(page["content"]))) try: new_confluence_api.storePage( { "space": NEW_SPACE_KEY, "parentId": parent_id, "title": page["title"], "content": new_store_format, "modified": page["modified"], } ) except xmlrpc.client.Fault as e: if "com.ctc.wstx.exc.WstxParsingException" in e.faultString: new_store_format = new_confluence_api.convertWikiToStorageFormat( prefix + "{code}\n" + page["content"].replace("{code}", r"\{code\}") + "\n{code}" ) logger.error("cannot insert convert, ignore, page id: %s, page title: %s" % (page["id"], page["title"])) new_confluence_api.storePage( { "space": NEW_SPACE_KEY, "parentId": parent_id, "title": page["title"], "content": new_store_format, "modified": page["modified"], } ) else: raise e
def import_page(page_id, parent_id): json_file_path = os.path.join(utils.DATA_DIR, 'pages', page_id + '.json') if not os.path.isfile(json_file_path): logger.error('no file %s' % json_file_path) return with open(json_file_path, 'r') as page_file: page = json.loads(page_file.read()) prefix = u"""{info}\n本文档从旧 Wiki 导入,原 URL:%s\n\n原创建人:%s %s\n\n原最后更新人:%s %s\n{info}\n\n""" % ( page['url'], page['creator'], dateutil.parser.parse(page['created']).strftime('%Y-%m-%d %H:%M:%S'), page['modifier'], dateutil.parser.parse(page['modified']).strftime('%Y-%m-%d %H:%M:%S'), ) new_store_format = fetch_convert_content(prefix, page['content']) logger.debug('convert page: %s, size: %d' % (page_id, len(page['content']))) try: new_confluence_api.storePage({ 'space': NEW_SPACE_KEY, 'parentId': parent_id, 'title': page['title'], 'content': new_store_format, 'modified': page['modified'], }) except xmlrpc.client.Fault as e: if ('com.ctc.wstx.exc.WstxParsingException' in e.faultString): new_store_format = new_confluence_api.convertWikiToStorageFormat( prefix + '{code}\n' + page['content'].replace('{code}', r'\{code\}') + '\n{code}') logger.error('cannot insert convert, ignore, page id: %s, page title: %s' % ( page['id'], page['title'])) new_confluence_api.storePage({ 'space': NEW_SPACE_KEY, 'parentId': parent_id, 'title': page['title'], 'content': new_store_format, 'modified': page['modified'], }) else: raise e