def generate(self, request): entrylist = sorted((e for e in request['entrylist'] if not e.draft), key=lambda k: k.date, reverse=True) tt = self.env.engine.fromfile(self.template) path = joinurl(self.conf['output_dir'], self.path, 'index.html') hv = md5(*entrylist, attr=lambda o: o.md5) rv = memoize('articles-hash') if rv == hv: has_changed = False else: # save new value for next run memoize('articles-hash', hv) has_changed = True if exists(path) and not has_changed and not tt.has_changed: event.skip(path) raise StopIteration articles = {} for entry in entrylist: articles.setdefault((entry.year, entry.imonth), []).append(entry) route = self.path html = tt.render(conf=self.conf, articles=articles, env=union(self.env, num_entries=len(entrylist), route=route)) yield html, path
def generate(self, request): entrylist = sorted((e for e in request['entrylist'] if not e.draft), key=lambda k: k.date, reverse=True) tt = self.env.tt.fromfile(self.template) path = joinurl(self.conf['output_dir'], self.path, 'index.html') hv = md5(*entrylist, attr=lambda o: o.md5) rv = memoize('articles-hash') if rv == hv: has_changed = False else: # save new value for next run memoize('articles-hash', hv) has_changed = True if exists(path) and not has_changed and not tt.has_changed: event.skip(path) raise StopIteration articles = {} for entry in entrylist: articles.setdefault((entry.year, entry.month), []).append(entry) html = tt.render(conf=self.conf, articles=articles, env=union(self.env, num_entries=len(entrylist))) yield html, path
def generate(self, request): entrylist = filter(lambda e: not e.draft, request['entrylist']) entrylist = list(entrylist)[0:self.num_entries] tt = self.env.engine.fromfile('%s.xml' % self.type) path = joinurl(self.conf['output_dir'], self.path) if not path.endswith(('.xml', '.html')): path = joinurl(path, 'index.html') # detect removed entries hv = md5(*entrylist, attr=lambda e: e.permalink) if memoize(path) != hv: memoize(path, hv) has_changed = True else: has_changed = False if (exists(path) and not filter(lambda e: e.has_changed, entrylist) and not has_changed and not tt.has_changed): event.skip(path) raise StopIteration updated = entrylist[0].date if entrylist else datetime.utcnow() html = tt.render(conf=self.conf, env=union(self.env, route=self.path, updated=updated, entrylist=entrylist)) yield html, path
def breaks(env, firstrun): """Return whether the new version may break current configuration and print all changes between the current and new version.""" version = memoize('version') or (0, 4) if version >= (env.version.major, env.version.minor): return False memoize('version', (env.version.major, env.version.minor, env.version.patch)) if firstrun: return False broken = False print for major in range(version[0], env.version.major or 1): for minor in range(version[1], env.version.minor): rv, hints = changesfor('%i.%i' % (major, minor + 1)) broken = broken or rv print (blue('Acrylamid') + ' %i.%s' % (major, minor+1) + u' – changes').encode('utf-8'), if broken: print (u'– ' + red('may break something.')).encode('utf-8') else: print print print colorize(hints).encode('utf-8') print return broken
def has_changed(self, entrylist): # detect changes in prev and next hv = md5(*entrylist, attr=lambda e: e.permalink) if memoize('entry-permalinks') != hv: memoize('entry-permalinks', hv) return True return False
def generate(self, request): tt = self.env.tt.fromfile(self.template) entrylist = request['entrylist'] pathes = dict() for entry in entrylist: if entry.permalink != expand(self.path, entry): p = joinurl(self.conf['output_dir'], entry.permalink) else: p = joinurl(self.conf['output_dir'], expand(self.path, entry)) if p.endswith('/'): p = joinurl(p, 'index.html') if p in pathes: raise AcrylamidException("title collision %r in %r" % (entry.permalink, entry.filename)) pathes[p] = entry has_changed = False hv = md5(*entrylist, attr=lambda e: e.permalink) if memoize('entry-permalinks') != hv: memoize('entry-permalinks', hv) has_changed = True pathes = sorted(pathes.iteritems(), key=lambda k: k[1].date, reverse=True) for i, (path, entry) in enumerate(pathes): next = None if i == 0 else link(entrylist[i-1].title, entrylist[i-1].permalink.rstrip('/'), entrylist[i-1]) prev = None if i == len(pathes) - 1 else link(entrylist[i+1].title, entrylist[i+1].permalink.rstrip('/'), entrylist[i+1]) if exists(path) and not any([has_changed, entry.has_changed, tt.has_changed]): event.skip(path) continue html = tt.render(conf=self.conf, entry=entry, env=union(self.env, entrylist=[entry], type='entry', prev=prev, next=next)) yield html, path
def generate(self, conf, env, data): tt = env.engine.fromfile(env, self.template) keyfunc = lambda k: () if '/:year' in self.path: keyfunc = lambda k: (k.year, ) if '/:month' in self.path: keyfunc = lambda k: (k.year, k.imonth) if '/:day' in self.path: keyfunc = lambda k: (k.year, k.imonth, k.iday) for next, curr, prev in neighborhood( groupby(data['entrylist'], keyfunc)): salt, group = '-'.join(str(i) for i in curr[0]), list(curr[1]) modified = memoize('archive-' + salt, hash(*group)) or any( e.modified for e in group) if prev: prev = link(u'/'.join('%02i' % i for i in prev[0]), expand(self.path, prev[1][0])) if next: next = link(u'/'.join('%02i' % i for i in next[0]), expand(self.path, next[1][0])) route = expand(self.path, group[0]) path = joinurl(conf['output_dir'], route) # an object storing year, zero-padded month and day as attributes (may be None) key = type( 'Archive', (object, ), dict( zip(('year', 'month', 'day'), map(lambda x: '%02i' % x if x else None, keyfunc(group[0])))))() if isfile(path) and not (modified or tt.modified or env.modified or conf.modified): event.skip('archive', path) continue html = tt.render(conf=conf, env=union(env, entrylist=group, type='archive', prev=prev, curr=link(route), next=next, num_entries=len(group), route=route, archive=key)) yield html, path
def breaks(env, firstrun): """Return whether the new version may break current configuration and print all changes between the current and new version.""" version = memoize("version") or (0, 4) if version >= (env.version.major, env.version.minor): return False memoize("version", (env.version.major, env.version.minor)) if firstrun: return False broken = False for major in range(version[0], env.version.major or 1): for minor in range(version[1], env.version.minor): rv, hints = changesfor("%i.%i" % (major, minor + 1)) broken = broken or rv if not hints: continue print() print((blue("Acrylamid") + " %i.%s" % (major, minor + 1) + u" – changes").encode("utf-8"), end="") if broken: print((u"– " + red("may break something.")).encode("utf-8")) else: print() print() print(colorize(hints).encode("utf-8")) print() return broken
def breaks(env, firstrun): """Return whether the new version may break current configuration and print all changes between the current and new version.""" version = memoize('version') or (0, 4) if version >= (env.version.major, env.version.minor): return False memoize('version', (env.version.major, env.version.minor)) if firstrun: return False broken = False for major in range(version[0], env.version.major or 1): for minor in range(version[1], env.version.minor): rv, hints = changesfor('%i.%i' % (major, minor + 1)) broken = broken or rv if not hints: continue print() print((blue('Acrylamid') + ' %i.%s' % (major, minor+1) + u' – changes').encode('utf-8'), end="") if broken: print((u'– ' + red('may break something.')).encode('utf-8')) else: print() print() print(colorize(hints).encode('utf-8')) print() return broken
def generate(self, conf, env, data): tt = env.engine.fromfile(env, self.template) keyfunc = lambda k: ( ) if '/:year' in self.path: keyfunc = lambda k: (k.year, ) if '/:month' in self.path: keyfunc = lambda k: (k.year, k.imonth) if '/:day' in self.path: keyfunc = lambda k: (k.year, k.imonth, k.iday) for next, curr, prev in neighborhood(groupby(data['entrylist'], keyfunc)): salt, group = '-'.join(str(i) for i in curr[0]), list(curr[1]) modified = memoize('archive-' + salt, hash(*group)) or any(e.modified for e in group) if prev: prev = link(u'/'.join('%02i' % i for i in prev[0]), expand(self.path, prev[1][0])) if next: next = link(u'/'.join('%02i' % i for i in next[0]), expand(self.path, next[1][0])) route = expand(self.path, group[0]) path = joinurl(conf['output_dir'], route) # an object storing year, zero-padded month and day as attributes (may be None) key = type('Archive', (object, ), dict(zip(('year', 'month', 'day'), map(lambda x: '%02i' % x if x else None, keyfunc(group[0])) )))() if isfile(path) and not (modified or tt.modified or env.modified or conf.modified): event.skip('archive', path) continue html = tt.render(conf=conf, env=union(env, entrylist=group, type='archive', prev=prev, curr=link(route), next=next, num_entries=len(group), route=route, archive=key)) yield html, path
def compile(conf, env): """The compilation process.""" if env.options.force: cache.clear(conf.get("cache_dir")) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict(zip(["entrylist", "pages", "translations", "drafts"], map(HashableList, readers.load(conf)))) entrylist, pages = rv["entrylist"], rv["pages"] translations, drafts = rv["translations"], rv["drafts"] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith("no") else val fname, fargs = f.split("+")[:1][0], f.split("+")[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith("no"): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split("+")[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException("no such filter: %s" % val) ns[fx].add(val) # include actual used filters to trigger modified state env.filters = HashableList(ns.keys()) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data["conf"]["filters"] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in "entrylist", "pages", "translations", "drafts": data[var] = HashableList(filter(v.condition, locals()[var])) if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile( buf, path, time.time() - tt, ns=v.name, force=env.options.force, dryrun=env.options.dryrun ) except UnicodeError: log.exception(path) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # wait for unfinished hooks hooks.shutdown() # save conf/environment hash and new/changed/unchanged references helpers.memoize("Configuration", hash(conf)) helpers.memoize("Environment", hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info( "%i new, %i updated, %i skipped [%.2fs]", event.count("create"), event.count("update"), event.count("identical") + event.count("skip"), time.time() - ctime, )
def compile(conf, env): """The compilation process.""" if env.options.force: cache.clear(conf.get('cache_dir')) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict(zip(['entrylist', 'pages', 'translations', 'drafts'], map(HashableList, readers.load(conf)))) entrylist, pages = rv['entrylist'], rv['pages'] translations, drafts = rv['translations'], rv['drafts'] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # get available filter list, something like with obj.get-function # list = [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data['conf']['filters'] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in 'entrylist', 'pages', 'translations', 'drafts': data[var] = HashableList(filter(v.condition, locals()[var])) \ if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile(buf, path, time.time()-tt, **env.options.__dict__) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # save conf/environment hash and new/changed/unchanged references helpers.memoize('Configuration', hash(conf)) helpers.memoize('Environment', hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)
def compile(conf, env): """The compilation process.""" hooks.initialize(conf, env) hooks.run(conf, env, 'pre') if env.options.force: cache.clear(conf.get('cache_dir')) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict( zip(['entrylist', 'pages', 'translations', 'drafts'], map(HashableList, readers.load(conf)))) entrylist, pages = rv['entrylist'], rv['pages'] translations, drafts = rv['translations'], rv['drafts'] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) # include actual used filters to trigger modified state env.filters = HashableList(iterkeys(ns)) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data['conf']['filters'] for fn in found: fx, _ = next((k for k in iteritems(ns) if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in 'entrylist', 'pages', 'translations', 'drafts': data[var] = HashableList(filter(v.condition, locals()[var])) \ if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile(buf, path, time.time() - tt, ns=v.name, force=env.options.force, dryrun=env.options.dryrun) except UnicodeError: log.exception(path) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # wait for unfinished hooks hooks.shutdown() # run post hooks (blocks) hooks.run(conf, env, 'post') # save conf/environment hash and new/changed/unchanged references helpers.memoize('Configuration', hash(conf)) helpers.memoize('Environment', hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)