def do_summary(conf, env, options): limit = options.max if options.max > 0 else 5 entrylist, pages, translations, drafts = readers.load(conf) entrylist = sorted(entrylist + translations + drafts, key=lambda k: k.date, reverse=True) print() print('Acrylamid', blue(env['version']) + ',', end=' ') print('cache size:', blue('%0.2f' % (cache.size / 1024.0**2)) + ' mb') print() for entry in entrylist[:limit]: print(' ', green(ago(entry.date.replace(tzinfo=None)).ljust(13)), end=' ') print(white(entry.title) if entry.draft else normal(entry.title)) print() print('%s published,' % blue(len([e for e in entrylist if not e.draft])), end=' ') print('%s drafted articles' % blue(len([e for e in entrylist if e.draft]))) if not isfile(join(conf.get('cache_dir', '.cache/'), 'info')): return time = localtime(getmtime(join(conf.get('cache_dir', '.cache/'), 'info'))) print('last compilation at %s' % blue(u(strftime(u'%d. %B %Y, %H:%M', time))))
def run(conf, env, options): """Subcommand: ping -- notify external resources via Pingback etc.""" commands.initialize(conf, env) entrylist = [entry for entry in readers.load(conf)[0] if not entry.draft] if options.file: try: entrylist = [ filter(lambda e: e.filename == options.file, entrylist)[0] ] except IndexError: raise AcrylamidException("no such post!") if options.service == 'twitter': if twitter is None: raise AcrylamidException("'twitter' egg not found") for entry in entrylist if options.all else entrylist[:options. max or 1]: tweet(entry, conf, options.dryrun) return # XXX we should search for actual hrefs not random grepping, but this # requires access to the cache at non-runtime which is unfortunately # not possible yet. patterns = [ r'(?<=\n)\[.*?\]:\s?(https?://.+)$', # referenced markdown r'\[[^\]]+\]\((https?://[^\)]+)\)', # inline markdown r'(?<=\n)\.\.\s+[^:]+:\s+(https?://.+)$', # referenced docutils r'`[^<]+ <(https?://[^>]+)>`_', # inline docutils ] pool = Threadpool(options.jobs) ping = lambda src, dest: pingback(helpers.joinurl(conf['www_root'], src), dest, options.dryrun) for entry in entrylist if options.all else entrylist[:options.max or 1]: for href in sum( [re.findall(pat, entry.source, re.M) for pat in patterns], []): pool.add_task(ping, *[entry.permalink, href]) try: pool.wait_completion() except KeyboardInterrupt: sys.exit(1)
def run(conf, env, options): """Subcommand: ping -- notify external resources via Pingback etc.""" commands.initialize(conf, env) entrylist = [entry for entry in readers.load(conf)[0] if not entry.draft] if options.file: try: entrylist = [filter(lambda e: e.filename == options.file, entrylist)[0]] except IndexError: raise AcrylamidException("no such post!") if options.service == 'twitter': if twitter is None: raise AcrylamidException("'twitter' egg not found") for entry in entrylist if options.all else entrylist[:options.max or 1]: tweet(entry, conf, options.dryrun) return # XXX we should search for actual hrefs not random grepping, but this # requires access to the cache at non-runtime which is unfortunately # not possible yet. patterns = [ r'(?<=\n)\[.*?\]:\s?(https?://.+)$', # referenced markdown r'\[[^\]]+\]\((https?://[^\)]+)\)', # inline markdown r'(?<=\n)\.\.\s+[^:]+:\s+(https?://.+)$', # referenced docutils r'`[^<]+ <(https?://[^>]+)>`_', # inline docutils ] pool = Threadpool(options.jobs) ping = lambda src, dest: pingback(helpers.joinurl(conf['www_root'], src), dest, options.dryrun) for entry in entrylist if options.all else entrylist[:options.max or 1]: for href in sum([re.findall(pat, entry.source, re.M) for pat in patterns], []): pool.add_task(ping, *[entry.permalink, href]) try: pool.wait_completion() except KeyboardInterrupt: sys.exit(1)
def do_tags(conf, env, options): limit = options.max if options.max > 0 else 100 entrylist = readers.load(conf)[0] if options.coverage: for tag, entries in sorted(fetch(entrylist).iteritems()): if len(entries) <= options.coverage: print blue(tag).encode('utf-8'), print ', '.join(e.filename.encode('utf-8') for e in entries) return tags = ['%i %s' % (len(value), key) for key, value in sorted(fetch(entrylist).iteritems(), key=lambda k: len(k[1]), reverse=True)] colprint( list(izip(*list(batch(tags[:limit], ceil(len(tags)/4.0))), fillvalue='')), os.popen('stty size', 'r').read().split()[1] )
def do_summary(conf, env, options): limit = options.max if options.max > 0 else 5 entrylist, pages = readers.load(conf) print print 'acrylamid', blue(env['version']) + ',', print 'cache size:', blue('%0.2f' % (cache.size / 1024.0**2)) + ' mb' print for entry in entrylist[:limit]: print ' ', green(ago(entry.date.replace(tzinfo=None)).ljust(13)), print white(entry.title) if entry.draft else normal(entry.title) print print '%s published,' % blue(len([e for e in entrylist if not e.draft])), print '%s drafted articles' % blue(len([e for e in entrylist if e.draft])) time = localtime(getmtime(join(conf.get('cache_dir', '.cache/'), 'info'))) print 'last compilation at %s' % blue(strftime('%d. %B %Y, %H:%M', time))
def run(conf, env, options): """Subcommand: info -- a short overview of a blog.""" limit = options.max if options.max > 0 else 5 commands.initialize(conf, env) entrylist, pages = readers.load(conf) print print "acrylamid", blue(env["version"]) + ",", print "cache size:", blue("%0.2f" % (cache.size / 1024.0 ** 2)) + " mb" print for entry in entrylist[:limit]: print " ", green(ago(entry.date.replace(tzinfo=None)).ljust(13)), print white(entry.title) if entry.draft else normal(entry.title) print print "%s published," % blue(len([e for e in entrylist if not e.draft])), print "%s drafted articles" % blue(len([e for e in entrylist if e.draft])) time = localtime(getmtime(join(conf.get("cache_dir", ".cache/"), "info"))) print "last compilation at %s" % blue(strftime("%d. %B %Y, %H:%M", time))
def run(conf, env, options): """Subcommand: info -- a short overview of a blog.""" limit = options.max if options.max > 0 else 5 commands.initialize(conf, env) entrylist, pages = readers.load(conf) print print 'acrylamid', blue(env['version']) + ',', print 'cache size:', blue('%0.2f' % (cache.size / 1024.0**2)) + ' mb' print for entry in entrylist[:limit]: print ' ', green(ago(entry.date.replace(tzinfo=None)).ljust(13)), print white(entry.title) if entry.draft else entry.title print print '%s published,' % blue(len([e for e in entrylist if not e.draft])), print '%s drafted articles' % blue(len([e for e in entrylist if e.draft])) time = localtime(getmtime(join(conf.get('cache_dir', '.cache/'), 'info'))) print 'last compilation at %s' % blue(strftime('%d. %B %Y, %H:%M', time))
def do_tags(conf, env, options): limit = options.max if options.max > 0 else 100 entrylist = readers.load(conf)[0] if options.coverage: for tag, entries in sorted(iteritems(fetch(entrylist))): if len(entries) <= options.coverage: print(blue(tag).encode('utf-8'), end=' ') print(', '.join(e.filename.encode('utf-8') for e in entries)) return tags = [ '%i %s' % (len(value), key) for key, value in sorted( iteritems(fetch(entrylist)), key=lambda k: len(k[1]), reverse=True) ] colprint( list( izip(*list(batch(tags[:limit], ceil(len(tags) / 4.0))), fillvalue='')), os.popen('stty size', 'r').read().split()[1])
def compile(conf, env, force=False, **options): """The compilation process. Current API: #. when we require context #. when we called an event New API: #. before we start with view Initialization #. after we initialized views #. before we require context #. after we required context #. before we template #. before we write a file #. when we called an event #. when we finish """ # time measurement ctime = time.time() # populate env and corrects some conf things request = initialize(conf, env) # load pages/entries and store them in env entrylist, pages = readers.load(conf) env.globals['entrylist'] = entrylist env.globals['pages'] = pages # XXX translations should be moved out of core env.globals['translations'] = translations = [] if force: # acrylamid compile -f cache.clear() # here we store all found filter and their aliases ns = defaultdict(set) # get available filter list, something like with obj.get-function # list = [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py found = sum((x.filters for x in entrylist+pages+_views), []) + request['conf']['filters'] for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) for entry in entrylist + pages: for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + request['conf']['filters'] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v.__class__.__name__) # lets offer a last break to populate tags or so # XXX this API component needs a review for v in _views: env = v.context(env, {'entrylist': entrylist, 'pages': pages, 'translations': translations}) # now teh real thing! for v in _views: # XXX the entry should automatically determine its caller (using # some sys magic to recursively check wether the calling class is # derieved from `View`.) for entry in entrylist + pages + translations: entry.context = v.__class__.__name__ request['pages'], request['translations'] = pages, translations request['entrylist'] = filter(v.condition, entrylist) tt = time.time() for html, path in v.generate(request): helpers.mkfile(html, path, time.time()-tt, **options) tt = time.time() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)
def compile(conf, env): """The compilation process.""" if env.options.force: cache.clear(conf.get("cache_dir")) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict(zip(["entrylist", "pages", "translations", "drafts"], map(HashableList, readers.load(conf)))) entrylist, pages = rv["entrylist"], rv["pages"] translations, drafts = rv["translations"], rv["drafts"] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith("no") else val fname, fargs = f.split("+")[:1][0], f.split("+")[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith("no"): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split("+")[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException("no such filter: %s" % val) ns[fx].add(val) # include actual used filters to trigger modified state env.filters = HashableList(ns.keys()) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data["conf"]["filters"] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in "entrylist", "pages", "translations", "drafts": data[var] = HashableList(filter(v.condition, locals()[var])) if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile( buf, path, time.time() - tt, ns=v.name, force=env.options.force, dryrun=env.options.dryrun ) except UnicodeError: log.exception(path) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # wait for unfinished hooks hooks.shutdown() # save conf/environment hash and new/changed/unchanged references helpers.memoize("Configuration", hash(conf)) helpers.memoize("Environment", hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info( "%i new, %i updated, %i skipped [%.2fs]", event.count("create"), event.count("update"), event.count("identical") + event.count("skip"), time.time() - ctime, )
def compile(conf, env, force=False, **options): """The compilation process. Current API: #. when we require context #. when we called an event New API: #. before we start with view Initialization #. after we initialized views #. before we require context #. after we required context #. before we template #. before we write a file #. when we called an event #. when we finish """ # time measurement ctime = time.time() # populate env and corrects some conf things request = initialize(conf, env) # load pages/entries and store them in env entrylist, pages = readers.load(conf) env.globals['entrylist'] = entrylist env.globals['pages'] = pages # XXX translations should be moved out of core env.globals['translations'] = translations = [] if force: # acrylamid compile -f cache.clear() # here we store all found filter and their aliases ns = defaultdict(set) # get available filter list, something like with obj.get-function # list = [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py found = sum( (x.filters for x in entrylist + pages + _views), []) + request['conf']['filters'] for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) for entry in entrylist + pages: for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + request['conf']['filters'] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags or so # XXX this API component needs a review for v in _views: env = v.context(env, { 'entrylist': entrylist, 'pages': pages, 'translations': translations }) # now teh real thing! for v in _views: # XXX the entry should automatically determine its caller (using # some sys magic to recursively check wether the calling class is # derieved from `View`.) for entry in entrylist + pages + translations: entry.context = v request['pages'], request['translations'] = pages, translations request['entrylist'] = filter(v.condition, entrylist) tt = time.time() for html, path in v.generate(request): helpers.mkfile(html, path, time.time() - tt, **options) tt = time.time() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)
def compile(conf, env): """The compilation process.""" if env.options.force: cache.clear(conf.get('cache_dir')) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict(zip(['entrylist', 'pages', 'translations', 'drafts'], map(HashableList, readers.load(conf)))) entrylist, pages = rv['entrylist'], rv['pages'] translations, drafts = rv['translations'], rv['drafts'] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # get available filter list, something like with obj.get-function # list = [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data['conf']['filters'] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in 'entrylist', 'pages', 'translations', 'drafts': data[var] = HashableList(filter(v.condition, locals()[var])) \ if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile(buf, path, time.time()-tt, **env.options.__dict__) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # save conf/environment hash and new/changed/unchanged references helpers.memoize('Configuration', hash(conf)) helpers.memoize('Environment', hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)
def compile(conf, env): """The compilation process.""" hooks.initialize(conf, env) hooks.run(conf, env, 'pre') if env.options.force: cache.clear(conf.get('cache_dir')) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict( zip(['entrylist', 'pages', 'translations', 'drafts'], map(HashableList, readers.load(conf)))) entrylist, pages = rv['entrylist'], rv['pages'] translations, drafts = rv['translations'], rv['drafts'] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) # include actual used filters to trigger modified state env.filters = HashableList(iterkeys(ns)) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data['conf']['filters'] for fn in found: fx, _ = next((k for k in iteritems(ns) if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in 'entrylist', 'pages', 'translations', 'drafts': data[var] = HashableList(filter(v.condition, locals()[var])) \ if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile(buf, path, time.time() - tt, ns=v.name, force=env.options.force, dryrun=env.options.dryrun) except UnicodeError: log.exception(path) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # wait for unfinished hooks hooks.shutdown() # run post hooks (blocks) hooks.run(conf, env, 'post') # save conf/environment hash and new/changed/unchanged references helpers.memoize('Configuration', hash(conf)) helpers.memoize('Environment', hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)