def transform(self, text, entry, *args): acros = self.acronyms if len(args) > 0: acros = dict( filter(lambda k: any(k[0] == v for v in args), iteritems(acros))) try: abbr = re.compile(r'\b(%s)\b' % '|'.join( (pat.pattern for pat in acros))) except re.error as e: log.warn("acronyms: %s", e.args[0]) def repl(match): abbr = match.group(0) desc = acros.get(abbr, None) if desc is None: for pat in acros: if pat.match(abbr): desc = acros.get(pat) break return '<abbr title="%s">%s</abbr>' % (desc, abbr) try: return ''.join(Acrynomify(text, abbr, repl).result) except HTMLParseError: log.exception('could not acronymize ' + entry.filename) return text
def execute(cmd, ns, src, dest=None): """Execute `cmd` such as `yui-compressor %1 -o %2` in-place. If `dest` is none, you don't have to supply %2.""" assert '%1' in cmd cmd = cmd.replace('%1', src) if dest: assert '%2' in cmd cmd = cmd.replace('%2', dest) if not isdir(dirname(dest)): os.makedirs(dirname(dest)) try: rv = system(cmd, shell=True) except (AcrylamidException, OSError): log.exception("uncaught exception during execution") return if dest is None: fd, path = mkstemp() with io.open(fd, 'w', encoding='utf-8') as fp: fp.write(rv) shutil.move(path, src) log.info('update %s', src) else: log.info('create %s', dest)
def discover(directories, index, filterfunc=lambda filename: True): """Import and initialize modules from `directories` list. :param directories: list of directories :param index: index function""" def find(directories, filterfunc): """Discover and yield python modules (aka files that endswith .py) if `filterfunc` returns True for that filename.""" for directory in directories: for root, dirs, files in os.walk(directory): for fname in files: if fname.endswith('.py') and filterfunc(join(root, fname)): yield join(root, fname) for filename in find(directories, filterfunc): modname, ext = os.path.splitext(os.path.basename(rchop(filename, os.sep + '__init__.py'))) fp, path, descr = imp.find_module(modname, directories) prefix = commonprefix((PATH, filename)) if prefix: modname = 'acrylamid.' modname += rchop(filename[len(prefix):].replace(os.sep, '.'), '.py') try: mod = sys.modules[modname] except KeyError: try: mod = imp.load_module(modname, fp, path, descr) except (ImportError, SyntaxError, ValueError) as e: log.exception('%r %s: %s', modname, e.__class__.__name__, e) continue index(mod)
def run(cmd, ns, src, dest=None): """Execute `cmd` such as `yui-compressor %1 -o %2` in-place. If `dest` is none, you don't have to supply %2.""" assert '%1' in cmd cmd = cmd.replace('%1', src) if dest: assert '%2' in cmd cmd = cmd.replace('%2', dest) if not isdir(dirname(dest)): os.makedirs(dirname(dest)) try: rv = system(cmd, shell=True) except (AcrylamidException, OSError): log.exception("uncaught exception during execution") return if dest is None: fd, path = mkstemp() with io.open(fd, 'w', encoding='utf-8') as fp: fp.write(rv) shutil.move(path, src) log.info('update %s', src) else: log.info('create %s', dest)
def transform(self, content, entry, *args): try: return ''.join(Text(content, args).result) except: log.exception('could not strip ' + entry.filename) return content
def write(self, src, dest, force=False, dryrun=False): dest = dest.replace(splitext(src)[-1], self.target) if not force and isfile(dest) and getmtime(dest) > getmtime(src): return event.skip(ns, dest) if isinstance(self.cmd, basestring): self.cmd = [self.cmd, ] tt = time.time() fd, path = mkstemp(dir=core.cache.cache_dir) # make destination group/world-readable as other files from Acrylamid os.chmod(path, os.stat(path).st_mode | stat.S_IRGRP | stat.S_IROTH) try: res = helpers.system(self.cmd + [src]) except (OSError, AcrylamidException) as e: if isfile(dest): os.unlink(dest) log.exception('%s: %s' % (e.__class__.__name__, e.args[0])) else: with os.fdopen(fd, 'w') as fp: fp.write(res) with io.open(path, 'rb') as fp: mkfile(fp, dest, time.time()-tt, ns, force, dryrun) finally: os.unlink(path)
def get_raw_gist(self, gistID): url = "https://raw.github.com/gist/%s" % (gistID) try: return get(url).read() except (URLError, HTTPError) as e: log.exception('Failed to access URL %s : %s' % (url, e)) return ''
def transform(self, content, entry, *args): options = helpers.union(Introduction.defaults, self.conf.fetch('intro_')) try: options.update(entry.intro) except AttributeError: pass try: maxparagraphs = int(options.get('maxparagraphs') or args[0]) except (IndexError, ValueError) as ex: if isinstance(ex, ValueError): log.warn('Introduction: invalid maxparagraphs argument %r', options.get('maxparagraphs') or args[0]) maxparagraphs = 1 try: return ''.join( Introducer(content, maxparagraphs, self.env.path + entry.permalink, options).result) except: log.exception('could not extract intro from ' + entry.filename) return content return content
def transform(self, text, entry, *args): acros = self.acronyms if len(args) > 0: acros = dict(filter(lambda k: any(k[0] == v for v in args), acros.items())) try: abbr = re.compile(r'\b(%s)\b' % '|'.join((pat.pattern for pat in acros))) except re.error as e: log.warn("acronyms: %s", e.args[0]) def repl(match): abbr = match.group(0) desc = acros.get(abbr, None) if desc is None: for pat in acros: if pat.match(abbr): desc = acros.get(pat) break return '<abbr title="%s">%s</abbr>' % (desc, abbr) try: return ''.join(Acrynomify(text, abbr, repl).result) except HTMLParseError: log.exception('could not acronymize ' + entry.filename) return text
def run(self): while True: func, args, kargs = self.tasks.get() try: func(*args, **kargs) except Exception as e: log.exception('%s: %s' % (e.__class__.__name__, str(e))) self.tasks.task_done()
def init(self, conf, env, *args): def system(cmd, stdin=None): try: return defaultsystem(cmd, stdin, shell=True).strip() except (OSError, AcrylamidException) as e: log.warn('%s: %s' % (e.__class__.__name__, e.args[0])) return e.args[0] self.conf = conf self.env = env # jinja2 is limited and can't import any module import time, datetime, os.path modules = [time, datetime, os.path] # check config for imports confimports = conf.get('jinja2_import') if confimports and isinstance(confimports, list): for modname in confimports: try: modules.append(__import__(modname)) except ImportError as e: log.exception('Failed loading user defined Jinja2 import: ' '%s (JINJA2_IMPORT = %s)' % (e, confimports)) if PY2K: import urllib modules += [urllib] else: import urllib.request, urllib.parse, urllib.error modules += [urllib.request, urllib.parse, urllib.error] if isinstance(env.engine, Environment): self.jinja2_env = env.engine.overlay(cache_size=0) else: self.jinja2_env = Environment(cache_size=0) self.jinja2_env.filters['system'] = system self.jinja2_env.filters['split'] = str.split # swap out platform specific os.path name (posixpath , ntpath, riscospath) ospathmodname, os.path.__name__ = os.path.__name__, 'os.path' for mod in modules: for name in dir(mod): if name.startswith('_') or isinstance(getattr(mod, name), types.ModuleType): continue self.jinja2_env.filters[mod.__name__ + '.' + name] = getattr( mod, name) # restore original os.path module name os.path.__name__ = ospathmodname
def init(self, conf, env, *args): def system(cmd, stdin=None): try: return defaultsystem(cmd, stdin, shell=True).strip() except (OSError, AcrylamidException) as e: log.warn('%s: %s' % (e.__class__.__name__, e.args[0])) return e.args[0] self.conf = conf self.env = env # jinja2 is limited and can't import any module import time, datetime, os.path modules = [time, datetime, os.path] # check config for imports confimports = conf.get('jinja2_import') if confimports and isinstance(confimports, list): for modname in confimports: try: modules.append(__import__(modname)) except ImportError as e: log.exception('Failed loading user defined Jinja2 import: ' '%s (JINJA2_IMPORT = %s)' % (e, confimports)) if PY2K: import urllib modules += [urllib] else: import urllib.request, urllib.parse, urllib.error modules += [urllib.request, urllib.parse, urllib.error] if isinstance(env.engine, Environment): self.jinja2_env = env.engine.overlay(cache_size=0) else: self.jinja2_env = Environment(cache_size=0) self.jinja2_env.filters['system'] = system self.jinja2_env.filters['split'] = str.split # swap out platform specific os.path name (posixpath , ntpath, riscospath) ospathmodname, os.path.__name__ = os.path.__name__, 'os.path' for mod in modules: for name in dir(mod): if name.startswith('_') or isinstance(getattr(mod, name), types.ModuleType): continue self.jinja2_env.filters[mod.__name__ + '.' + name] = getattr(mod, name) # restore original os.path module name os.path.__name__ = ospathmodname
def autocompile(ws, conf, env): """Subcommand: autocompile -- automatically re-compiles when something in content-dir has changed and parallel serving files.""" mtime = -1 cmtime = getmtime('conf.py') # config content_extension originally defined as string, not a list exts = conf.get('content_extension', ['.txt', '.rst', '.md']) if isinstance(exts, string_types): whitelist = (exts, ) else: whitelist = tuple(exts) while True: ws.wait = True ntime = max( max( getmtime(e) for e in readers.filelist(conf['content_dir'], conf['content_ignore']) if e.endswith(whitelist)), max( getmtime(p) for p in chain([ f for theme in conf['theme'] for f in readers.filelist(theme, conf['theme_ignore']) ], readers.filelist(conf['static'], conf['static_ignore'])))) if mtime != ntime: try: compile(conf, env) except (SystemExit, KeyboardInterrupt): raise except Exception: log.exception("uncaught exception during auto-compilation") else: conf = load(env.options.conf) env = Environment.new(env) event.reset() mtime = ntime ws.wait = False if cmtime != getmtime('conf.py'): log.info(' * Restarting due to change in conf.py') # Kill the webserver ws.shutdown() # Restart acrylamid os.execvp(sys.argv[0], sys.argv) time.sleep(1)
def autocompile(ws, conf, env): """Subcommand: autocompile -- automatically re-compiles when something in content-dir has changed and parallel serving files.""" mtime = -1 cmtime = getmtime('conf.py') # config content_extension originally defined as string, not a list exts = conf.get('content_extension',['.txt', '.rst', '.md']) if isinstance(exts, string_types): whitelist = (exts,) else: whitelist = tuple(exts) while True: ws.wait = True ntime = max( max(getmtime(e) for e in readers.filelist( conf['content_dir'], conf['content_ignore']) if e.endswith(whitelist)), max(getmtime(p) for p in chain( readers.filelist(conf['theme'], conf['theme_ignore']), readers.filelist(conf['static'], conf['static_ignore'])))) if mtime != ntime: try: compile(conf, env) except (SystemExit, KeyboardInterrupt): raise except Exception: log.exception("uncaught exception during auto-compilation") else: conf = load(env.options.conf) env = Environment.new(env) event.reset() mtime = ntime ws.wait = False if cmtime != getmtime('conf.py'): log.info(' * Restarting due to change in conf.py') # Kill the webserver ws.shutdown() # Restart acrylamid os.execvp(sys.argv[0], sys.argv) time.sleep(1)
def autocompile(ws, conf, env): """Subcommand: autocompile -- automatically re-compiles when something in content-dir has changed and parallel serving files.""" mtime = -1 cmtime = getmtime("conf.py") while True: ws.wait = True ntime = max( max(getmtime(e) for e in readers.filelist(conf["content_dir"], conf["content_ignore"]) if istext(e)), max( getmtime(p) for p in chain( readers.filelist(conf["theme"], conf["theme_ignore"]), readers.filelist(conf["static"], conf["static_ignore"]), ) ), ) if mtime != ntime: try: compile(conf, env) except (SystemExit, KeyboardInterrupt): raise except Exception: log.exception("uncaught exception during auto-compilation") else: conf = load(env.options.conf) env = Environment.new(env) event.reset() mtime = ntime ws.wait = False if cmtime != getmtime("conf.py"): log.info(" * Restarting due to change in conf.py") # Kill the webserver ws.shutdown() # Restart acrylamid os.execvp(sys.argv[0], sys.argv) time.sleep(1)
def load(conf): """Load and parse textfiles from content directory and optionally filter by an ignore pattern. Filenames ending with a known whitelist of extensions are processed. This function is *not* exception-tolerant. If Acrylamid could not handle a file it will raise an exception. It returns a tuple containing the list of entries sorted by date reverse (newest comes first) and other pages (unsorted). :param conf: configuration with CONTENT_DIR, CONTENT_EXTENSION and CONTENT_IGNORE set""" # list of Entry-objects reverse sorted by date. entries, pages, trans, drafts = [], [], [], [] # config content_extension originally defined as string, not a list exts = conf.get('content_extension', ['.txt', '.rst', '.md']) if isinstance(exts, string_types): whitelist = (exts, ) else: whitelist = tuple(exts) # collect and skip over malformed entries for path in filelist(conf['content_dir'], conf['content_ignore']): if path.endswith(whitelist): try: entry = Entry(path, conf) if entry.draft: drafts.append(entry) elif entry.type == 'entry': entries.append(entry) else: pages.append(entry) except AcrylamidException as e: log.exception('failed to parse file %s (%s)' % (path, e)) except: log.fatal('uncaught exception for ' + path) raise # sort by date, reverse return sorted(entries, key=lambda k: k.date, reverse=True), pages, trans, drafts
def transform(self, content, entry, *args): options = helpers.union(Summarize.defaults, self.conf.fetch("summarize_")) try: options.update(entry.summarize) except AttributeError: pass try: maxwords = int(options.get("maxwords") or args[0]) except (IndexError, ValueError) as ex: if isinstance(ex, ValueError): log.warn("Summarize: invalid maxwords argument %r", options.get("maxwords") or args[0]) maxwords = 100 try: return "".join(Summarizer(content, maxwords, self.env.path + entry.permalink, options).result) except HTMLParseError: log.exception("could not summarize " + entry.filename) return content
def load(conf): """Load and parse textfiles from content directory and optionally filter by an ignore pattern. Filenames ending with a known whitelist of extensions are processed. This function is *not* exception-tolerant. If Acrylamid could not handle a file it will raise an exception. It returns a tuple containing the list of entries sorted by date reverse (newest comes first) and other pages (unsorted). :param conf: configuration with CONTENT_DIR, CONTENT_EXTENSION and CONTENT_IGNORE set""" # list of Entry-objects reverse sorted by date. entries, pages, trans, drafts = [], [], [], [] # config content_extension originally defined as string, not a list exts = conf.get('content_extension',['.txt', '.rst', '.md']) if isinstance(exts, string_types): whitelist = (exts,) else: whitelist = tuple(exts) # collect and skip over malformed entries for path in filelist(conf['content_dir'], conf['content_ignore']): if path.endswith(whitelist): try: entry = Entry(path, conf) if entry.draft: drafts.append(entry) elif entry.type == 'entry': entries.append(entry) else: pages.append(entry) except AcrylamidException as e: log.exception('failed to parse file %s (%s)' % (path, e)) except: log.fatal('uncaught exception for ' + path) raise # sort by date, reverse return sorted(entries, key=lambda k: k.date, reverse=True), pages, trans, drafts
def transform(self, content, entry, *args): if entry.lang != self.conf['lang']: try: hyphenate_word = build(entry.lang.replace('_', '-')) except HyphenPatternNotFound as e: log.warn(e.args[0]) hyphenate_word = lambda x: [x] else: hyphenate_word = self.default try: length = int(args[0]) except (ValueError, IndexError) as e: if e.__class__.__name__ == 'ValueError': log.warn('Hyphenate: invalid length argument %r', args[0]) length = 10 try: return ''.join(Separator(content, hyphenate_word, length=length).result) except HTMLParseError as e: log.exception('could not hyphenate ' + entry.filename) return content
def transform(self, content, entry, *args): options = helpers.union(Introduction.defaults, self.conf.fetch('intro_')) try: options.update(entry.intro) except AttributeError: pass try: maxparagraphs = int(options.get('maxparagraphs') or args[0]) except (IndexError, ValueError) as ex: if isinstance(ex, ValueError): log.warn('Introduction: invalid maxparagraphs argument %r', options.get('maxparagraphs') or args[0]) maxparagraphs = 1 try: return ''.join(Introducer( content, maxparagraphs, self.env.path+entry.permalink, options).result) except HTMLParseError as e: log.exception('could not extract intro from ' + entry.filename) return content return content
def transform(self, content, entry, *args): if entry.lang != self.conf['lang']: try: hyphenate_word = build(entry.lang.replace('_', '-')) except HyphenPatternNotFound as e: log.warn(e.args[0]) hyphenate_word = lambda x: [x] else: hyphenate_word = self.default try: length = int(args[0]) except (ValueError, IndexError) as e: if e.__class__.__name__ == 'ValueError': log.warn('Hyphenate: invalid length argument %r', args[0]) length = 10 try: return ''.join( Separator(content, hyphenate_word, length=length).result) except: log.exception('could not hyphenate ' + entry.filename) return content
def transform(self, content, entry, *args): options = helpers.union(Summarize.defaults, self.conf.fetch('summarize_')) try: options.update(entry.summarize) except AttributeError: pass try: maxwords = int(options.get('maxwords') or args[0]) except (IndexError, ValueError) as ex: if isinstance(ex, ValueError): log.warn('Summarize: invalid maxwords argument %r', options.get('maxwords') or args[0]) maxwords = 100 try: return ''.join( Summarizer(content, maxwords, self.env.path + entry.permalink, options).result) except: log.exception('could not summarize ' + entry.filename) return content
def compile(conf, env): """The compilation process.""" hooks.initialize(conf, env) hooks.run(conf, env, 'pre') if env.options.force: cache.clear(conf.get('cache_dir')) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict( zip(['entrylist', 'pages', 'translations', 'drafts'], map(HashableList, readers.load(conf)))) entrylist, pages = rv['entrylist'], rv['pages'] translations, drafts = rv['translations'], rv['drafts'] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) # include actual used filters to trigger modified state env.filters = HashableList(iterkeys(ns)) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data['conf']['filters'] for fn in found: fx, _ = next((k for k in iteritems(ns) if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in 'entrylist', 'pages', 'translations', 'drafts': data[var] = HashableList(filter(v.condition, locals()[var])) \ if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile(buf, path, time.time() - tt, ns=v.name, force=env.options.force, dryrun=env.options.dryrun) except UnicodeError: log.exception(path) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # wait for unfinished hooks hooks.shutdown() # run post hooks (blocks) hooks.run(conf, env, 'post') # save conf/environment hash and new/changed/unchanged references helpers.memoize('Configuration', hash(conf)) helpers.memoize('Environment', hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)
def Acryl(): """The main function that dispatches the CLI. We use :class:`AcrylFormatter` as custom help formatter that ommits the useless list of available subcommands and their aliases. All flags from acrylamid --help are also available in subcommands altough not explicitely printed in their help.""" parser = argparse.ArgumentParser( parents=[], formatter_class=AcrylFormatter ) parser.add_argument("-v", "--verbose", action="store_const", dest="verbosity", help="more verbose", const=log.SKIP, default=log.INFO) parser.add_argument("-q", "--quiet", action="store_const", dest="verbosity", help="less verbose", const=log.WARN) parser.add_argument("-C", "--no-color", action="store_false", dest="colors", help="disable color", default=True) parser.add_argument("--conf", dest="conf", help="alternate conf.py", default="conf.py", metavar="/path/to/conf") parser.add_argument("--version", action="version", version=colors.blue('Acrylamid ') + dist.version) subparsers = parser.add_subparsers(dest="parser") # a repeat yourself of default arguments but not visible on subcommand --help default = argparse.ArgumentParser(add_help=False) default.add_argument("-v", "--verbose", action="store_const", dest="verbosity", help=argparse.SUPPRESS, const=log.SKIP, default=log.INFO) default.add_argument("-q", "--quiet", action="store_const", dest="verbosity", help=argparse.SUPPRESS, const=log.WARN) default.add_argument("-C", "--no-color", action="store_false", dest="colors", help=argparse.SUPPRESS, default=True) # --- gen params --- # generate = subparsers.add_parser('compile', help='compile blog', parents=[default]) generate.add_argument("-f", "--force", action="store_true", dest="force", help="clear cache before compilation", default=False) generate.add_argument("-n", "--dry-run", dest="dryrun", action='store_true', help="show what would have been compiled", default=False) generate.add_argument("--ignore", dest="ignore", action="store_true", help="ignore critical errors", default=False) generate.add_argument("--search", dest="search", action="store_true", help="build search index", default=False) # --- webserver params --- # view = subparsers.add_parser('view', help="fire up built-in webserver", parents=[default]) view.add_argument("-p", "--port", dest="port", type=int, default=8000, help="webserver port") # --- aco params --- # autocompile = subparsers.add_parser('autocompile', help="automatic compilation and serving", parents=[default]) autocompile.add_argument("-f", "--force", action="store_true", dest="force", help="clear cache before compilation", default=False) autocompile.add_argument("-n", "--dry-run", dest="dryrun", action='store_true', help="show what would have been compiled", default=False) autocompile.add_argument("--ignore", dest="ignore", action="store_true", help="ignore critical errors", default=False) autocompile.add_argument("--search", dest="search", action="store_true", help="build search index", default=False) autocompile.add_argument("-p", "--port", dest="port", type=int, default=8000, help="webserver port") for alias in ('co', 'gen', 'generate'): subparsers._name_parser_map[alias] = generate for alias in ('serve', 'srv'): subparsers._name_parser_map[alias] = view subparsers._name_parser_map['aco'] = autocompile # temporary log to catch issues during task initialization log.init('temporary', level=log.WARN, colors=False) # initialize other tasks tasks.initialize(subparsers, default) # parse args options = parser.parse_args() # initialize colored logger log.init('acrylamid', level=options.verbosity, colors=options.colors) env = core.Environment({'author': __author__, 'url': __url__, 'options': options, 'globals': Struct()}) try: conf = core.load(options.conf) except IOError: log.critical('no conf.py found. Are you inside your blog?') sys.exit(1) except Exception as e: log.critical("%s in `conf.py`" % e.__class__.__name__) traceback.print_exc(file=sys.stdout) sys.exit(1) # -- run -- # if options.parser in ('gen', 'generate', 'co', 'compile'): log.setLevel(options.verbosity) try: commands.compile(conf, env) except AcrylamidException as e: log.exception(e.args[0]) sys.exit(1) elif options.parser in ('srv', 'serve', 'view'): from acrylamid.lib.httpd import Webserver ws = partial(Webserver, options.port, conf['output_dir']) ws = ws(log.info) if options.verbosity < 20 else ws(); ws.start() log.info(' * Running on http://127.0.0.1:%i/' % options.port) try: while True: time.sleep(1) except (SystemExit, KeyboardInterrupt) as e: ws.kill_received = True sys.exit(0) elif options.parser in ('aco', 'autocompile'): from acrylamid.lib.httpd import Webserver # XXX compile on request _or_ use inotify/fsevent ws = Webserver(options.port, conf['output_dir']); ws.start() log.info(' * Running on http://127.0.0.1:%i/' % options.port) try: commands.autocompile(ws, conf, env) except (SystemExit, KeyboardInterrupt) as e: ws.kill_received = True log.error(e.args[0]) traceback.print_exc(file=sys.stdout) sys.exit(0) elif options.parser in tasks.collected: try: tasks.collected[options.parser](conf, env, options) except AcrylamidException as e: log.exception('uncaught exception') sys.exit(1) else: log.critical('No such command!') sys.exit(2) sys.exit(0)
def compile(conf, env): """The compilation process.""" if env.options.force: cache.clear(conf.get("cache_dir")) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict(zip(["entrylist", "pages", "translations", "drafts"], map(HashableList, readers.load(conf)))) entrylist, pages = rv["entrylist"], rv["pages"] translations, drafts = rv["translations"], rv["drafts"] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith("no") else val fname, fargs = f.split("+")[:1][0], f.split("+")[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith("no"): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split("+")[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException("no such filter: %s" % val) ns[fx].add(val) # include actual used filters to trigger modified state env.filters = HashableList(ns.keys()) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data["conf"]["filters"] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in "entrylist", "pages", "translations", "drafts": data[var] = HashableList(filter(v.condition, locals()[var])) if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile( buf, path, time.time() - tt, ns=v.name, force=env.options.force, dryrun=env.options.dryrun ) except UnicodeError: log.exception(path) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # wait for unfinished hooks hooks.shutdown() # save conf/environment hash and new/changed/unchanged references helpers.memoize("Configuration", hash(conf)) helpers.memoize("Environment", hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info( "%i new, %i updated, %i skipped [%.2fs]", event.count("create"), event.count("update"), event.count("identical") + event.count("skip"), time.time() - ctime, )