def mainloop(self): """ moin-package's main code. """ # Initalize request self.init_request() request = self.request _ = self.request.getText # Check our command line args if self.options.pages and self.options.search: script.fatal(_("Options --pages and --search are mutually exclusive!")) elif not self.options.output: script.fatal(_("You must specify an output file!")) elif not self.options.pages and not self.options.search: script.log(_("No pages specified using --pages or --search, assuming full package.")) include_attachments = self.options.attachment or False if include_attachments: script.log(_("All attachments included into the package.")) # Sanity checks if os.path.exists(self.options.output): script.fatal(_("Output file already exists! Cowardly refusing to continue!")) # Check for user if self.options.package_user: request.user = user.User(request, name=self.options.package_user) # Import PackagePages here, as we now have an initalized request. from MoinMoin.action.PackagePages import PackagePages # Perform actual packaging. package = PackagePages(request.rootpage.page_name, request) packageoutput = open(self.options.output, "wb") if self.options.search: packagedata = package.collectpackage( package.searchpackage(request, self.options.search), packageoutput, include_attachments=include_attachments, ) elif self.options.pages: packagedata = package.collectpackage( self.options.pages.split(","), packageoutput, include_attachments=include_attachments ) else: packagedata = package.collectpackage( request.rootpage.getPageList( include_underlay=False, filter=lambda name: not wikiutil.isSystemPage(request, name) ), packageoutput, include_attachments=include_attachments, ) if packagedata: script.fatal(packagedata)
def mainloop(self): """ moin-dump's main code. """ # Prepare output directory if not self.options.target_dir: script.fatal("you must use --target-dir=/your/output/path to specify the directory we write the html files to") outputdir = os.path.abspath(self.options.target_dir) try: os.mkdir(outputdir) script.log("Created output directory '%s'!" % outputdir) except OSError, err: if err.errno != errno.EEXIST: script.fatal("Cannot create output directory '%s'!" % outputdir)
def mainloop(self): """ moin-package's main code. """ # Initalize request self.init_request() request = self.request _ = self.request.getText # Check our command line args if self.options.pages and self.options.search: script.fatal(_("Options --pages and --search are mutually exclusive!")) elif not self.options.output: script.fatal(_("You must specify an output file!")) elif not self.options.pages and not self.options.search: script.log(_("No pages specified using --pages or --search, assuming full package.")) include_attachments = self.options.attachment or False if include_attachments: script.log(_("All attachments included into the package.")) # Sanity checks if os.path.exists(self.options.output): script.fatal(_("Output file already exists! Cowardly refusing to continue!")) # Check for user if self.options.package_user: request.user = user.User(request, name=self.options.package_user) # Import PackagePages here, as we now have an initalized request. from MoinMoin.action.PackagePages import PackagePages # Perform actual packaging. package = PackagePages(request.rootpage.page_name, request) packageoutput = open(self.options.output, "wb") if self.options.search: packagedata = package.collectpackage(package.searchpackage(request, self.options.search), packageoutput, include_attachments=include_attachments) elif self.options.pages: packagedata = package.collectpackage(self.options.pages.split(","), packageoutput, include_attachments=include_attachments) else: packagedata = package.collectpackage(request.rootpage.getPageList( include_underlay=False, filter=lambda name: not wikiutil.isSystemPage(request, name)), packageoutput, include_attachments=include_attachments) if packagedata: script.fatal(packagedata)
def _attachment(request, pagename, filename, outputdir, **kw): filename = filename.encode(config.charset) source_dir = AttachFile.getAttachDir(request, pagename) source_file = os.path.join(source_dir, filename) dest_dir = os.path.join(outputdir, "attachments", wikiutil.quoteWikinameFS(pagename)) dest_file = os.path.join(dest_dir, filename) dest_url = "attachments/%s/%s" % (wikiutil.quoteWikinameFS(pagename), wikiutil.url_quote(filename)) if os.access(source_file, os.R_OK): if not os.access(dest_dir, os.F_OK): try: os.makedirs(dest_dir) except: script.fatal("Cannot create attachment directory '%s'" % dest_dir) elif not os.path.isdir(dest_dir): script.fatal("'%s' is not a directory" % dest_dir) shutil.copyfile(source_file, dest_file) script.log('Writing "%s"...' % dest_url) return dest_url else: return ""
def mainloop(self): """ moin-dump's main code. """ outputdir = '/' date_str = '' with open(self._input_file, 'r') as fd: for line in fd: if line.startswith("pubdate ="): date_str = line[10:].strip() break # Insert config dir or the current directory to the start of the path. config_dir = self.options.config_dir if config_dir and os.path.isfile(config_dir): config_dir = os.path.dirname(config_dir) if config_dir and not os.path.isdir(config_dir): script.fatal("bad path given to --config-dir option") sys.path.insert(0, os.path.abspath(config_dir or os.curdir)) self.init_request() request = self.request # fix script_root so we get relative paths in output html request.script_root = url_prefix_static # use this user for permissions checks request.user = user.User(request, name=self.options.dump_user) pages = request.rootpage.getPageList(user='') # get list of all pages in wiki pages.sort() if self._input_file: # did user request a particular page or group of pages? try: namematch = re.compile(self._input_file) pages = [page for page in pages if namematch.match(page)] if not pages: pages = [self._input_file] except: pages = [self._input_file] wikiutil.quoteWikinameURL = lambda pagename, qfn=wikiutil.quoteWikinameFS: (qfn(pagename) + HTML_SUFFIX) AttachFile.getAttachUrl = lambda pagename, filename, request, **kw: _attachment(request, pagename, filename, outputdir, **kw) errlog = sys.stderr errcnt = 0 page_front_page = wikiutil.getLocalizedPage(request, request.cfg.page_front_page).page_name page_title_index = wikiutil.getLocalizedPage(request, 'TitleIndex').page_name page_word_index = wikiutil.getLocalizedPage(request, 'WordIndex').page_name navibar_html = '' urlbase = request.url # save wiki base url for pagename in pages: # we have the same name in URL and FS script.log('Writing "%s"...' % self._output_file) try: pagehtml = '' request.url = urlbase + pagename # add current pagename to url base page = FSPage(request, pagename, formatter = CustomHtmlFormatter(request, store_pagelinks=1)) request.page = page try: request.reset() pagehtml = request.redirectedOutput(page.send_page, count_hit=0, content_only=1, do_cache=0) except: errcnt = errcnt + 1 print >> sys.stderr, "*** Caught exception while writing page!" print >> errlog, "~" * 78 print >> errlog, file # page filename import traceback traceback.print_exc(None, errlog) finally: timestamp = time.strftime("%Y-%m-%d %H:%M") filepath = self._output_file fileout = codecs.open(filepath, 'w', config.charset) fileout.write(page_template % { 'charset': config.charset, 'pagename': pagename, 'pagehtml': pagehtml, 'logo_html': logo_html, 'navibar_html': navibar_html, 'timestamp': timestamp, 'theme': request.cfg.theme_default, 'date_str': date_str, }) fileout.close()
elif self.options.uname: try: set_password(request, newpass, uname=self.options.uname, notify=notify, skip_invalid=skip_invalid, subject=subject, text_intro=text_intro, text_msg=text_msg, text_data=text_data) except Fault, err: print str(err) elif self.options.all_users: uids = sorted(getUserList(request)) total = len(uids) for nr, uid in enumerate(uids, start=1): log("%05d / %05d - processing uid %s" % (nr, total, uid)) try: set_password(request, newpass, uid=uid, notify=notify, skip_invalid=skip_invalid, subject=subject, text_intro=text_intro, text_msg=text_msg, text_data=text_data) except Fault, err: print str(err)
text_data = f.read().decode('utf-8') if self.options.uid: try: set_password(request, newpass, uid=self.options.uid, notify=notify, skip_invalid=skip_invalid, subject=subject, text_intro=text_intro, text_msg=text_msg, text_data=text_data) except Fault, err: print str(err) elif self.options.uname: try: set_password(request, newpass, uname=self.options.uname, notify=notify, skip_invalid=skip_invalid, subject=subject, text_intro=text_intro, text_msg=text_msg, text_data=text_data) except Fault, err: print str(err) elif self.options.all_users: uids = sorted(getUserList(request)) total = len(uids) for nr, uid in enumerate(uids, start=1): log("%05d / %05d - processing uid %s" % (nr, total, uid)) try: set_password(request, newpass, uid=uid, notify=notify, skip_invalid=skip_invalid, subject=subject, text_intro=text_intro, text_msg=text_msg, text_data=text_data) except Fault, err: print str(err)
class PluginScript(MoinScript): """Purpose: ======== This script imports the wiki page from given file into the wiki. Detailed Instructions: ====================== General syntax: moin [options] import wikipage [wikipage-options] [options] usually should be: --config-dir=/path/to/cfg --wiki-url=http://wiki.example.org/ --page=Page """ def __init__(self, argv, def_values): MoinScript.__init__(self, argv, def_values) self.parser.add_option('--acl', dest='acl', default='', metavar='ACL', help='Set a specific ACL for the wiki page') self.parser.add_option('--author', dest='author', metavar='AUTHOR', default='PageImporter', help='Use AUTHOR for edit history / RecentChanges') self.parser.add_option('--comment', dest='comment', metavar='COMMENT', default='', help='COMMENT for edit history / RecentChanges') self.parser.add_option('--file', dest='file', default='', metavar='FILE', help='Read the wiki page from the given file') self.parser.add_option('--no-backup', dest='revision_backup', default=True, action='store_false', help="Suppress making a page backup per revision") self._update_option_help('--page', 'Name of the wiki page which should be imported') def mainloop(self): self.init_request() request = self.request request.user.may = IAmRoot() if not self.options.page: fatal('You must specify a wiki page name (--page=Page)!') if not self.options.file: fatal('You must specify a FILE to read from (--file=FILE)!') try: fileObj = open(self.options.file, 'rb') except IOError, err: fatal(str(err)) page_content = decodeUnknownInput(fileObj.read()).rstrip() fileObj.close() if not self.options.acl: acl = '' else: acl = '#acl %s\n' % self.options.acl comment = clean_input(self.options.comment) pe = PageEditor(request, self.options.page, do_editor_backup=0, uid_override=self.options.author, do_revision_backup=int(self.options.revision_backup)) try: pe.saveText(acl + page_content, 0, comment=comment) except PageEditor.Unchanged: log("info: wikipage was not modified - ignored update.") except PageEditor.SaveError, err: log("error: %r" % err)
class PluginScript(script.MoinScript): """\ Purpose: ======== This tool allows you to dump MoinMoin wiki pages to static HTML files. Detailed Instructions: ====================== General syntax: moin [options] export dump [dump-options] [options] usually should be: --config-dir=/path/to/my/cfg/ --wiki-url=http://wiki.example.org/ [dump-options] see below: 0. You must run this script as owner of the wiki files, usually this is the web server user. 1. To dump all the pages on the wiki to the directory '/mywiki' moin ... export dump --target-dir=/mywiki 2. To dump all the pages readable by 'JohnSmith' on the wiki to the directory '/mywiki' moin ... export dump --target-dir=/mywiki --username JohnSmith """ def __init__(self, argv=None, def_values=None): script.MoinScript.__init__(self, argv, def_values) self.parser.add_option("-t", "--target-dir", dest="target_dir", help="Write html dump to DIRECTORY") self.parser.add_option( "-u", "--username", dest="dump_user", help="User the dump will be performed as (for ACL checks, etc)") def mainloop(self): """ moin-dump's main code. """ # Prepare output directory if not self.options.target_dir: script.fatal( "you must use --target-dir=/your/output/path to specify the directory we write the html files to" ) outputdir = os.path.abspath(self.options.target_dir) try: os.mkdir(outputdir) script.log("Created output directory '%s'!" % outputdir) except OSError, err: if err.errno != errno.EEXIST: script.fatal("Cannot create output directory '%s'!" % outputdir) # Insert config dir or the current directory to the start of the path. config_dir = self.options.config_dir if config_dir and os.path.isfile(config_dir): config_dir = os.path.dirname(config_dir) if config_dir and not os.path.isdir(config_dir): script.fatal("bad path given to --config-dir option") sys.path.insert(0, os.path.abspath(config_dir or os.curdir)) self.init_request() request = self.request # fix script_root so we get relative paths in output html request.script_root = url_prefix_static # use this user for permissions checks request.user = user.User(request, name=self.options.dump_user) pages = request.rootpage.getPageList( user='') # get list of all pages in wiki pages.sort() if self.options.page: # did user request a particular page or group of pages? try: namematch = re.compile(self.options.page) pages = [page for page in pages if namematch.match(page)] if not pages: pages = [self.options.page] except: pages = [self.options.page] wikiutil.quoteWikinameURL = lambda pagename, qfn=wikiutil.quoteWikinameFS: ( qfn(pagename) + HTML_SUFFIX) AttachFile.getAttachUrl = lambda pagename, filename, request, **kw: _attachment( request, pagename, filename, outputdir, **kw) errfile = os.path.join(outputdir, 'error.log') errlog = open(errfile, 'w') errcnt = 0 page_front_page = wikiutil.getLocalizedPage( request, request.cfg.page_front_page).page_name page_title_index = wikiutil.getLocalizedPage(request, 'TitleIndex').page_name page_word_index = wikiutil.getLocalizedPage(request, 'WordIndex').page_name navibar_html = '' for p in [page_front_page, page_title_index, page_word_index]: navibar_html += '[<a href="%s">%s</a>] ' % ( wikiutil.quoteWikinameURL(p), wikiutil.escape(p)) urlbase = request.url # save wiki base url for pagename in pages: # we have the same name in URL and FS file = wikiutil.quoteWikinameURL(pagename) script.log('Writing "%s"...' % file) try: pagehtml = '' request.url = urlbase + pagename # add current pagename to url base page = Page.Page(request, pagename) request.page = page try: request.reset() pagehtml = request.redirectedOutput(page.send_page, count_hit=0, content_only=1) except: errcnt = errcnt + 1 print >> sys.stderr, "*** Caught exception while writing page!" print >> errlog, "~" * 78 print >> errlog, file # page filename import traceback traceback.print_exc(None, errlog) finally: timestamp = time.strftime("%Y-%m-%d %H:%M") filepath = os.path.join(outputdir, file) fileout = codecs.open(filepath, 'w', config.charset) fileout.write( page_template % { 'charset': config.charset, 'pagename': pagename, 'pagehtml': pagehtml, 'logo_html': logo_html, 'navibar_html': navibar_html, 'timestamp': timestamp, 'theme': request.cfg.theme_default, }) fileout.close() # copy FrontPage to "index.html" indexpage = page_front_page if self.options.page: indexpage = pages[ 0] # index page has limited use when dumping specific pages, but create one anyway shutil.copyfile( os.path.join(outputdir, wikiutil.quoteWikinameFS(indexpage) + HTML_SUFFIX), os.path.join(outputdir, 'index' + HTML_SUFFIX)) errlog.close() if errcnt: print >> sys.stderr, "*** %d error(s) occurred, see '%s'!" % ( errcnt, errfile)