def pagelink(self, on, pagename='', anchor=None, css=None, **kw): """ Link to a page. formatter.text_python will use an optimized call with a page!=None parameter. DO NOT USE THIS YOURSELF OR IT WILL BREAK. See wikiutil.link_tag() for possible keyword parameters. """ apply(FormatterBase.pagelink, (self, on, pagename), kw) if on: attrs = {} # Warn about non-existing pages. if not os.path.exists(wikiutil.pageName2inputFile(pagename)): warning('linked "%s" page does not exists' % pagename) if pagename == config.general.indexpagename: pagename = 'index' # FIXME: url = wikiutil.quoteWikinameURL(pagename + ".html") # Add anchor if anchor: url = "%s#%s" % (url, wikiutil.url_quote_plus(anchor)) attrs['href'] = url if css: attrs['class'] = css markup = self._open('a', attr=attrs, **kw) else: markup = self._close('a') return markup
def format(self): '''Formats the self.wikitext and returns the content as HTML source (unicode). ''' # Format the content. content = format_text(self.wikitext, self.pagename) # Process links to subpages in this category. subpages = [] for fname, prag in all_pragmas.items(): if self.pagename in prag.multiple('#category'): subpages.append(wikiutil.inputFile2pageName(fname)) if subpages: lines = [ '', '<a name="#category-subpages"></a>', '<h1>%s</h1>' % (config.text.subpages % dict(value=self.pagename)), '<div id="category-subpages">', '<table>', '<tr>', '<td>', '<ul>' ] subpages.sort(lambda a, b: -(a.lower() < b.lower())) num_of_columns = 3 m = len(subpages) p = m / num_of_columns if m % num_of_columns: p += 1 for i in xrange(m): if i > 0 and i % p == 0: lines.extend(['</ul>', '</td>', '<td>', '<ul>']) lines.append( '<li><a href="%s.html">%s</a>' % (wikiutil.quoteWikinameURL(subpages[i]), subpages[i])) lines.extend(['</ul>', '</td>', '</tr>', '</table>', '</div>']) content += '\n'.join(lines) # Process categories this page belongs to. categories = self.pragmas.multiple('#category') if categories: categories.sort(lambda a, b: -(a.lower() < b.lower())) for category in categories: fname = wikiutil.pageName2inputFile(category) wikiutil.assertFileNameCase(fname) assert os.path.exists(fname), '%s does not exist' % fname links = [ '<a href="%s.html">%s</a>' % (wikiutil.quoteWikinameURL(name), name) for name in categories ] if len(links) > 1: format = config.text.categories else: format = config.text.category lines = ['', '<p class="categories-links">'] lines.append(format % dict(value=' | '.join(links))) lines.append('</p>') content += '\n'.join(lines) return content
def __init__(self, pagename): self.pagename = pagename filename = wikiutil.pageName2inputFile(pagename) assert os.path.exists(filename), '%s does not exist' % filename # XXX: all_pragmas is global # it is created by process(); we're safe to use it here since no Content # object will be (and never can be!) created before process() is called self.pragmas = all_pragmas[filename] # Content as wikitext. Used by .format() (may be modified before calling). self.wikitext = read(filename)
def format(self): '''Formats the self.wikitext and returns the content as HTML source (unicode). ''' # Format the content. content = format_text(self.wikitext, self.pagename) # Process links to subpages in this category. subpages = [] for fname, prag in all_pragmas.items(): if self.pagename in prag.multiple('#category'): subpages.append(wikiutil.inputFile2pageName(fname)) if subpages: lines = ['', '<a name="#category-subpages"></a>', '<h1>%s</h1>' % (config.text.subpages % dict(value=self.pagename)), '<div id="category-subpages">', '<table>', '<tr>', '<td>', '<ul>'] subpages.sort(lambda a, b: -(a.lower() < b.lower())) num_of_columns = 3 m = len(subpages) p = m / num_of_columns if m % num_of_columns: p += 1 for i in xrange(m): if i > 0 and i % p == 0: lines.extend(['</ul>', '</td>', '<td>', '<ul>']) lines.append('<li><a href="%s.html">%s</a>' % (wikiutil.quoteWikinameURL(subpages[i]), subpages[i])) lines.extend(['</ul>', '</td>', '</tr>', '</table>', '</div>']) content += '\n'.join(lines) # Process categories this page belongs to. categories = self.pragmas.multiple('#category') if categories: categories.sort(lambda a, b: -(a.lower() < b.lower())) for category in categories: fname = wikiutil.pageName2inputFile(category) wikiutil.assertFileNameCase(fname) assert os.path.exists(fname), '%s does not exist' % fname links = ['<a href="%s.html">%s</a>' % (wikiutil.quoteWikinameURL(name), name) for name in categories] if len(links) > 1: format = config.text.categories else: format = config.text.category lines = ['', '<p class="categories-links">'] lines.append(format % dict(value=' | '.join(links))) lines.append('</p>') content += '\n'.join(lines) return content
def _word_repl(self, word, text=None, css=None): """Handle WikiNames.""" if not text: # TODO: if a simple, self-referencing link, emit it as plain text #if word == self.formatter.page.page_name: # return self.formatter.text(word) text = word # handle anchors parts = word.split("#", 1) anchor = "" if len(parts)==2: word, anchor = parts schemes = [] if css: schemes.append(css) # mark a link to current page with special "current" class if word == self.formatter.pagename: schemes.append('current') if schemes: css = ' '.join(schemes) result = [] # Ben modif if not os.path.exists(wikiutil.pageName2inputFile(word)): if(word != text): result.append('['+word+' '+text+']') else: result.append('['+word+']') else: # fin Ben modif result.append(self.formatter.pagelink(1, word, anchor=anchor, css=css)) result.append(self.formatter.text(text)) result.append(self.formatter.pagelink(0, word)) return ''.join(result)
def main(): usage = 'usage: %prog [options] [pagename ...]' version = '%%prog %s (%s)' % (__version__, __date__) optparser = OptionParser(usage=usage, version=version) optparser.add_option('-m', '--make', action='store_true', help='build modified pages') optparser.add_option('-b', '--build', action='store_true', help='build all pages') optparser.add_option('-c', '--clean', action='store_true', help='remove html files') optparser.add_option( '-s', '--synchronize', action='store_true', help='upload modified files to the FTP ' 'server; wiki files are not uploded; subdirectories *ARE* uploaded; requires ftputil library; FTP ' 'has to be configured using the config file; this switch can be combined with any of the above ' 'three') optparser.add_option( '-f', '--force', action='store_true', help='when used together with --synchronize, ' 'causes the files and directories that does not exist locally to be deleted from the FTP server' ) optparser.add_option('-d', '--directory', dest='dir', help='wiki directory, defaults to current directory', default='.') optparser.add_option( '-g', '--config', help='name of the config file relative to the wiki directory, ' 'defaults to _statwiki.config', default='_statwiki.config') options, args = optparser.parse_args() a = [ name for name, value in options.__dict__.items() if name in ('make', 'build', 'clean') and value ] if len(a) > 1: sys.exit( 'error: only one of --make, --build and --clean switches can be used at once' ) try: mode = a[0] except IndexError: if options.synchronize: # if only --synchronize was specified, do nothing besides syncing mode = 'idle' else: sys.exit( 'error: one of the --make, --build, --clean or --synchronize switches must ' 'be specified; use --help for more information') os.chdir(options.dir) config.parse(wikiutil.fixFileNameCase(options.config)) if args: # Add .wiki to the names if needed. wikipages = [] for name in args: if not name.endswith('.wiki'): name = wikiutil.pageName2inputFile(name) wikipages.append(wikiutil.fixFileNameCase(name)) else: wikipages = [ x for x in os.listdir('.') if x.endswith('.wiki') and not x.startswith('_') ] if mode == 'clean': print 'Cleaning...' for filename in wikipages: pagename = wikiutil.inputFile2pageName(filename) try: os.unlink(wikiutil.pageName2outputFile(pagename)) print wikiutil.pageName2outputFile(pagename) except OSError: pass elif mode == 'make': print 'Making...' todo = [] for filename in wikipages: ofilename = wikiutil.inputFile2outputFile(filename) if not os.path.exists(ofilename) or \ os.path.getmtime(ofilename) < os.path.getmtime(filename): todo.append(filename) process(todo) elif mode == 'build': print 'Building...' process(wikipages) if options.synchronize: print 'Synchronizing with %s...' % getattr(config.ftp, 'host', '???') try: host = config.ftp.host except AttributeError: sys.exit( 'cannot synchronize, configure the FTP server access first') # Import ftpsync only if --synchronize was specified so that ftputil doesn't have to be # installed if this option is not used. from ftpsync import synchronize synchronize(options.force) elif options.force: sys.exit('error: --force can only be used together with --synchronize')
def process(wikipages): donepages = [] # Load all pragmas. global all_pragmas # used by Content objects too all_pragmas = Pragmas(force_update=wikipages) # Check for deleted pages. for filename, pragmas in all_pragmas.previous_items(): if not os.path.exists(filename): # Process categories so the page is removed from them. for category in pragmas.multiple('#category'): fname = wikiutil.pageName2inputFile(category) if fname not in wikipages: wikipages.append(fname) all_pragmas.remove(filename) # Process the pages. while wikipages: filename = wikipages[0] del wikipages[0] assert os.path.exists(filename), '%s does not exist' % filename pagename = wikiutil.inputFile2pageName(filename) pragmas = all_pragmas[filename] prev_pragmas = all_pragmas.previous(filename) # Prepare the namespace for the generator script. globs = dict(pagename=pagename, summary=pragmas.single('#summary', ''), genconfig=config.generator) print filename # Process categories this page was added to. for category in pragmas.multiple('#category'): if category not in prev_pragmas.multiple('#category'): fname = wikiutil.pageName2inputFile(category) wikiutil.assertFileNameCase(fname) if fname not in donepages and fname not in wikipages: wikipages.append(fname) # Process categories this page was removed from. for category in prev_pragmas.multiple('#category'): if category not in pragmas.multiple('#category'): fname = wikiutil.pageName2inputFile(category) if os.path.exists(fname): wikiutil.assertFileNameCase(fname) if fname not in donepages and fname not in wikipages and \ os.path.exists(fname): wikipages.append(fname) # Load included pages. for inc in pragmas.multiple('#include'): args = inc.split() assert len( args) == 3 and args[1] == 'as', '#include pragma syntax error' globs[args[2]] = Content(args[0]) # Create a HTML using the generator script. globs.update( dict(content=Content(pagename), modify_time=time.strftime( config.general.timeformat, time.gmtime(os.path.getmtime(filename))), generate_time=time.strftime(config.general.timeformat, time.gmtime()), out=codecs.open(wikiutil.pageName2outputFile(pagename), 'wt', encoding=config.charset))) try: for args in pragmas.multiple('#execute'): exec args in globs execfile(config.general.generator, globs) except: globs['out'].close() os.remove(wikiutil.pageName2outputFile(pagename)) raise globs['out'].close() if filename not in donepages: donepages.append(filename) # Add pages including the current page to processing. for filename, pragmas in all_pragmas.items(): try: inc = pragmas.single('#include').split(None, 1)[0] except ValueError: continue if inc != pagename: continue if filename not in donepages and filename not in wikipages: assert os.path.exists(filename), '%s does not exist' % filename wikipages.append(filename)
def main(): usage = 'usage: %prog [options] [pagename ...]' version = '%%prog %s (%s)' % (__version__, __date__) optparser = OptionParser(usage=usage, version=version) optparser.add_option('-m', '--make', action='store_true', help='build modified pages') optparser.add_option('-b', '--build', action='store_true', help='build all pages') optparser.add_option('-c', '--clean', action='store_true', help='remove html files') optparser.add_option('-s', '--synchronize', action='store_true', help='upload modified files to the FTP ' 'server; wiki files are not uploded; subdirectories *ARE* uploaded; requires ftputil library; FTP ' 'has to be configured using the config file; this switch can be combined with any of the above ' 'three') optparser.add_option('-f', '--force', action='store_true', help='when used together with --synchronize, ' 'causes the files and directories that does not exist locally to be deleted from the FTP server') optparser.add_option('-d', '--directory', dest='dir', help='wiki directory, defaults to current directory', default='.') optparser.add_option('-g', '--config', help='name of the config file relative to the wiki directory, ' 'defaults to _statwiki.config', default='_statwiki.config') options, args = optparser.parse_args() a = [name for name, value in options.__dict__.items() if name in ('make', 'build', 'clean') and value] if len(a) > 1: sys.exit('error: only one of --make, --build and --clean switches can be used at once') try: mode = a[0] except IndexError: if options.synchronize: # if only --synchronize was specified, do nothing besides syncing mode = 'idle' else: sys.exit('error: one of the --make, --build, --clean or --synchronize switches must ' 'be specified; use --help for more information') os.chdir(options.dir) config.parse(wikiutil.fixFileNameCase(options.config)) if args: # Add .wiki to the names if needed. wikipages = [] for name in args: if not name.endswith('.wiki'): name = wikiutil.pageName2inputFile(name) wikipages.append(wikiutil.fixFileNameCase(name)) else: wikipages = [x for x in os.listdir('.') if x.endswith('.wiki') and not x.startswith('_')] if mode == 'clean': print 'Cleaning...' for filename in wikipages: pagename = wikiutil.inputFile2pageName(filename) try: os.unlink(wikiutil.pageName2outputFile(pagename)) print wikiutil.pageName2outputFile(pagename) except OSError: pass elif mode == 'make': print 'Making...' todo = [] for filename in wikipages: ofilename = wikiutil.inputFile2outputFile(filename) if not os.path.exists(ofilename) or \ os.path.getmtime(ofilename) < os.path.getmtime(filename): todo.append(filename) process(todo) elif mode == 'build': print 'Building...' process(wikipages) if options.synchronize: print 'Synchronizing with %s...' % getattr(config.ftp, 'host', '???') try: host = config.ftp.host except AttributeError: sys.exit('cannot synchronize, configure the FTP server access first') # Import ftpsync only if --synchronize was specified so that ftputil doesn't have to be # installed if this option is not used. from ftpsync import synchronize synchronize(options.force) elif options.force: sys.exit('error: --force can only be used together with --synchronize')
def process(wikipages): donepages = [] # Load all pragmas. global all_pragmas # used by Content objects too all_pragmas = Pragmas(force_update=wikipages) # Check for deleted pages. for filename, pragmas in all_pragmas.previous_items(): if not os.path.exists(filename): # Process categories so the page is removed from them. for category in pragmas.multiple('#category'): fname = wikiutil.pageName2inputFile(category) if fname not in wikipages: wikipages.append(fname) all_pragmas.remove(filename) # Process the pages. while wikipages: filename = wikipages[0] del wikipages[0] assert os.path.exists(filename), '%s does not exist' % filename pagename = wikiutil.inputFile2pageName(filename) pragmas = all_pragmas[filename] prev_pragmas = all_pragmas.previous(filename) # Prepare the namespace for the generator script. globs = dict(pagename=pagename, summary=pragmas.single('#summary', ''), genconfig=config.generator) print filename # Process categories this page was added to. for category in pragmas.multiple('#category'): if category not in prev_pragmas.multiple('#category'): fname = wikiutil.pageName2inputFile(category) wikiutil.assertFileNameCase(fname) if fname not in donepages and fname not in wikipages: wikipages.append(fname) # Process categories this page was removed from. for category in prev_pragmas.multiple('#category'): if category not in pragmas.multiple('#category'): fname = wikiutil.pageName2inputFile(category) if os.path.exists(fname): wikiutil.assertFileNameCase(fname) if fname not in donepages and fname not in wikipages and \ os.path.exists(fname): wikipages.append(fname) # Load included pages. for inc in pragmas.multiple('#include'): args = inc.split() assert len(args) == 3 and args[1] == 'as', '#include pragma syntax error' globs[args[2]] = Content(args[0]) # Create a HTML using the generator script. globs.update(dict(content=Content(pagename), modify_time=time.strftime(config.general.timeformat, time.gmtime(os.path.getmtime(filename))), generate_time=time.strftime(config.general.timeformat, time.gmtime()), out=codecs.open(wikiutil.pageName2outputFile(pagename), 'wt', encoding=config.charset))) try: for args in pragmas.multiple('#execute'): exec args in globs execfile(config.general.generator, globs) except: globs['out'].close() os.remove(wikiutil.pageName2outputFile(pagename)) raise globs['out'].close() if filename not in donepages: donepages.append(filename) # Add pages including the current page to processing. for filename, pragmas in all_pragmas.items(): try: inc = pragmas.single('#include').split(None, 1)[0] except ValueError: continue if inc != pagename: continue if filename not in donepages and filename not in wikipages: assert os.path.exists(filename), '%s does not exist' % filename wikipages.append(filename)