def write(self, src, dest, force=False, dryrun=False): dest = dest.replace(splitext(src)[-1], self.target) if not force and isfile(dest) and getmtime(dest) > getmtime(src): return event.skip(ns, dest) if isinstance(self.cmd, basestring): self.cmd = [self.cmd, ] tt = time.time() fd, path = mkstemp(dir=core.cache.cache_dir) # make destination group/world-readable as other files from Acrylamid os.chmod(path, os.stat(path).st_mode | stat.S_IRGRP | stat.S_IROTH) try: res = helpers.system(self.cmd + [src]) except (OSError, AcrylamidException) as e: if isfile(dest): os.unlink(dest) log.exception('%s: %s' % (e.__class__.__name__, e.args[0])) else: with os.fdopen(fd, 'w') as fp: fp.write(res) with io.open(path, 'rb') as fp: mkfile(fp, dest, time.time()-tt, ns, force, dryrun) finally: os.unlink(path)
def write(self, src, dest, force=False, dryrun=False): if not force and isfile(dest) and getmtime(dest) > getmtime(src): return event.skip(dest) with io.open(src, 'rb') as fp: mkfile(fp, dest, force=force, dryrun=dryrun, mode="b")
def write(self, src, dest, force=False, dryrun=False): dest = dest.replace(self.ext, self.target) if not force and isfile(dest) and getmtime(dest) > getmtime(src): return event.skip(dest) if isinstance(self.cmd, basestring): self.cmd = [self.cmd, ] tt = time.time() fd, path = mkstemp(dir=core.cache.cache_dir) try: res = helpers.system(self.cmd + [src]) except (OSError, AcrylamidException) as e: if isfile(dest): os.unlink(dest) log.warn('%s: %s' % (e.__class__.__name__, e.args[0])) else: with os.fdopen(fd, 'w') as fp: fp.write(res) with io.open(path, 'rb') as fp: mkfile(fp, dest, ctime=time.time()-tt, force=force, dryrun=dryrun) finally: os.unlink(path)
def generate(self, conf, env, data): pathes, entrylist = set(), data[self.type] unmodified = not env.modified and not conf.modified for i, entry in enumerate(entrylist): if entry.hasproperty('permalink'): path = joinurl(conf['output_dir'], entry.permalink) else: path = joinurl(conf['output_dir'], expand(self.path, entry)) if isfile(path) and path in pathes: try: os.remove(path) finally: other = [e.filename for e in entrylist if e is not entry and e.permalink == entry.permalink][0] log.error("title collision %s caused by %s and %s", entry.permalink, entry.filename, other) raise SystemExit pathes.add(path) next, prev = self.next(entrylist, i), self.prev(entrylist, i) # per-entry template tt = env.engine.fromfile(env, entry.props.get('layout', self.template)) if all([isfile(path), unmodified, not tt.modified, not entry.modified, not modified(*references(entry))]): event.skip(self.name, path) else: html = tt.render(conf=conf, entry=entry, env=union(env, entrylist=[entry], type=self.__class__.__name__.lower(), prev=prev, next=next, route=expand(self.path, entry))) yield html, path # check if any resources need to be moved if entry.hasproperty('copy'): for res_src in entry.resources: res_dest = join(dirname(path), basename(res_src)) # Note, presence of res_src check in FileReader.getresources if isfile(res_dest) and getmtime(res_dest) > getmtime(res_src): event.skip(self.name, res_dest) continue try: fp = io.open(res_src, 'rb') # use mkfile rather than yield so different ns can be specified (and filtered by sitemap) mkfile(fp, res_dest, ns='resource', force=env.options.force, dryrun=env.options.dryrun) except IOError as e: log.warn("Failed to copy resource '%s' whilst processing '%s' (%s)" % (res_src, entry.filename, e.strerror))
def write(self, src, dest, force=False, dryrun=False): dest = dest.replace(self.ext, self.target) if not force and isfile(dest) and getmtime(dest) > getmtime(src): return event.skip(dest) if isinstance(self.cmd, basestring): self.cmd = [self.cmd, ] tt = time.time() fd, path = mkstemp(dir=core.cache.cache_dir) try: res = helpers.system(self.cmd + [src]) except OSError: if isfile(dest): os.unlink(dest) raise AcrylamidException('%s is not available!' % self.cmd[0]) with os.fdopen(fd, 'w') as fp: fp.write(res) with io.open(path, 'rb') as fp: mkfile(fp, dest, ctime=time.time()-tt, force=force, dryrun=dryrun, mode="b")
def compile(conf, env, force=False, **options): """The compilation process. Current API: #. when we require context #. when we called an event New API: #. before we start with view Initialization #. after we initialized views #. before we require context #. after we required context #. before we template #. before we write a file #. when we called an event #. when we finish """ # time measurement ctime = time.time() # populate env and corrects some conf things request = initialize(conf, env) # load pages/entries and store them in env entrylist, pages = readers.load(conf) env.globals['entrylist'] = entrylist env.globals['pages'] = pages # XXX translations should be moved out of core env.globals['translations'] = translations = [] if force: # acrylamid compile -f cache.clear() # here we store all found filter and their aliases ns = defaultdict(set) # get available filter list, something like with obj.get-function # list = [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py found = sum((x.filters for x in entrylist+pages+_views), []) + request['conf']['filters'] for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) for entry in entrylist + pages: for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + request['conf']['filters'] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v.__class__.__name__) # lets offer a last break to populate tags or so # XXX this API component needs a review for v in _views: env = v.context(env, {'entrylist': entrylist, 'pages': pages, 'translations': translations}) # now teh real thing! for v in _views: # XXX the entry should automatically determine its caller (using # some sys magic to recursively check wether the calling class is # derieved from `View`.) for entry in entrylist + pages + translations: entry.context = v.__class__.__name__ request['pages'], request['translations'] = pages, translations request['entrylist'] = filter(v.condition, entrylist) tt = time.time() for html, path in v.generate(request): helpers.mkfile(html, path, time.time()-tt, **options) tt = time.time() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)
def compile(conf, env): """The compilation process.""" if env.options.force: cache.clear(conf.get("cache_dir")) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict(zip(["entrylist", "pages", "translations", "drafts"], map(HashableList, readers.load(conf)))) entrylist, pages = rv["entrylist"], rv["pages"] translations, drafts = rv["translations"], rv["drafts"] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith("no") else val fname, fargs = f.split("+")[:1][0], f.split("+")[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith("no"): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split("+")[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException("no such filter: %s" % val) ns[fx].add(val) # include actual used filters to trigger modified state env.filters = HashableList(ns.keys()) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data["conf"]["filters"] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in "entrylist", "pages", "translations", "drafts": data[var] = HashableList(filter(v.condition, locals()[var])) if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile( buf, path, time.time() - tt, ns=v.name, force=env.options.force, dryrun=env.options.dryrun ) except UnicodeError: log.exception(path) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # wait for unfinished hooks hooks.shutdown() # save conf/environment hash and new/changed/unchanged references helpers.memoize("Configuration", hash(conf)) helpers.memoize("Environment", hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info( "%i new, %i updated, %i skipped [%.2fs]", event.count("create"), event.count("update"), event.count("identical") + event.count("skip"), time.time() - ctime, )
def write(self, src, dest, force=False, dryrun=False): if not force and not self.modified(src, dest): return event.skip(ns, dest) mkfile(self.generate(src, dest), dest, ns=ns, force=force, dryrun=dryrun)
def compile(conf, env, force=False, **options): """The compilation process. Current API: #. when we require context #. when we called an event New API: #. before we start with view Initialization #. after we initialized views #. before we require context #. after we required context #. before we template #. before we write a file #. when we called an event #. when we finish """ # time measurement ctime = time.time() # populate env and corrects some conf things request = initialize(conf, env) # load pages/entries and store them in env entrylist, pages = readers.load(conf) env.globals['entrylist'] = entrylist env.globals['pages'] = pages # XXX translations should be moved out of core env.globals['translations'] = translations = [] if force: # acrylamid compile -f cache.clear() # here we store all found filter and their aliases ns = defaultdict(set) # get available filter list, something like with obj.get-function # list = [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py found = sum( (x.filters for x in entrylist + pages + _views), []) + request['conf']['filters'] for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) for entry in entrylist + pages: for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + request['conf']['filters'] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags or so # XXX this API component needs a review for v in _views: env = v.context(env, { 'entrylist': entrylist, 'pages': pages, 'translations': translations }) # now teh real thing! for v in _views: # XXX the entry should automatically determine its caller (using # some sys magic to recursively check wether the calling class is # derieved from `View`.) for entry in entrylist + pages + translations: entry.context = v request['pages'], request['translations'] = pages, translations request['entrylist'] = filter(v.condition, entrylist) tt = time.time() for html, path in v.generate(request): helpers.mkfile(html, path, time.time() - tt, **options) tt = time.time() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)
def compile(conf, env): """The compilation process.""" if env.options.force: cache.clear(conf.get('cache_dir')) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict(zip(['entrylist', 'pages', 'translations', 'drafts'], map(HashableList, readers.load(conf)))) entrylist, pages = rv['entrylist'], rv['pages'] translations, drafts = rv['translations'], rv['drafts'] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # get available filter list, something like with obj.get-function # list = [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data['conf']['filters'] for fn in found: fx, _ = next((k for k in ns.iteritems() if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in 'entrylist', 'pages', 'translations', 'drafts': data[var] = HashableList(filter(v.condition, locals()[var])) \ if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile(buf, path, time.time()-tt, **env.options.__dict__) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # save conf/environment hash and new/changed/unchanged references helpers.memoize('Configuration', hash(conf)) helpers.memoize('Environment', hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)
def compile(conf, env, force=False, **options): # time measurement ctime = time.time() # populate env and corrects some conf things request = initialize(conf, env) if force: # acrylamid compile -f cache.clear() # list of FileEntry-objects reverse sorted by date. entrylist = sorted([FileEntry(e, conf) for e in utils.filelist(conf['content_dir'], conf.get('entries_ignore', []))], key=lambda k: k.date, reverse=True) # here we store all possible filter configurations ns = set() # get available filter list, something like with obj.get-function # list = [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py found = sum((x.filters for x in entrylist+_views), []) + request['conf']['filters'] for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](val, *fargs) if val.startswith('no'): fx.transform = lambda x, y, *z: x fx.__hash__ = lambda : 0 except ValueError: try: fx = aflist[val.split('+')[:1][0]](val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns.add(fx) for entry in entrylist: for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + request['conf']['filters'] for fn in found: fx = filter(lambda k: fn == k.name, ns)[0] if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v.__class__.__name__) # lets offer a last break to populate tags or so # XXX this API component needs a review for v in _views: env = v.context(env, {'entrylist': filter(v.condition, entrylist)}) # now teh real thing! for v in _views: # XXX the entry should automatically determine its caller (using # some sys magic to recursively check wether the calling class is # derieved from `View`.) for entry in entrylist: entry.context = v.__class__.__name__ request['entrylist'] = filter(v.condition, entrylist) tt = time.time() for html, path in v.generate(request): helpers.mkfile(html, path, time.time()-tt, **options) tt = time.time() # remove abandoned cache files cache.shutdown() log.info('Blog compiled in %.2fs' % (time.time() - ctime))
def compile(conf, env): """The compilation process.""" hooks.initialize(conf, env) hooks.run(conf, env, 'pre') if env.options.force: cache.clear(conf.get('cache_dir')) # time measurement ctime = time.time() # populate env and corrects some conf things data = initialize(conf, env) # load pages/entries and store them in env rv = dict( zip(['entrylist', 'pages', 'translations', 'drafts'], map(HashableList, readers.load(conf)))) entrylist, pages = rv['entrylist'], rv['pages'] translations, drafts = rv['translations'], rv['drafts'] # load references refs.load(entrylist, pages, translations, drafts) data.update(rv) env.globals.update(rv) # here we store all found filter and their aliases ns = defaultdict(set) # [<class head_offset.Headoffset at 0x1014882c0>, <class html.HTML at 0x101488328>,...] aflist = filters.get_filters() # ... and get all configured views _views = views.get_views() # filters found in all entries, views and conf.py (skip translations, has no items) found = sum((x.filters for x in chain(entrylist, pages, drafts, _views, [conf])), []) for val in found: # first we for `no` and get the function name and arguments f = val[2:] if val.startswith('no') else val fname, fargs = f.split('+')[:1][0], f.split('+')[1:] try: # initialize the filter with its function name and arguments fx = aflist[fname](conf, env, val, *fargs) if val.startswith('no'): fx = filters.disable(fx) except ValueError: try: fx = aflist[val.split('+')[:1][0]](conf, env, val, *fargs) except ValueError: raise AcrylamidException('no such filter: %s' % val) ns[fx].add(val) # include actual used filters to trigger modified state env.filters = HashableList(iterkeys(ns)) for entry in chain(entrylist, pages, drafts): for v in _views: # a list that sorts out conflicting and duplicated filters flst = filters.FilterList() # filters found in this specific entry plus views and conf.py found = entry.filters + v.filters + data['conf']['filters'] for fn in found: fx, _ = next((k for k in iteritems(ns) if fn in k[1])) if fx not in flst: flst.append(fx) # sort them ascending because we will pop within filters.add entry.filters.add(sorted(flst, key=lambda k: (-k.priority, k.name)), context=v) # lets offer a last break to populate tags and such for v in _views: env = v.context(conf, env, data) # now teh real thing! for v in _views: for entry in chain(entrylist, pages, translations, drafts): entry.context = v for var in 'entrylist', 'pages', 'translations', 'drafts': data[var] = HashableList(filter(v.condition, locals()[var])) \ if v.condition else locals()[var] tt = time.time() for buf, path in v.generate(conf, env, data): try: helpers.mkfile(buf, path, time.time() - tt, ns=v.name, force=env.options.force, dryrun=env.options.dryrun) except UnicodeError: log.exception(path) finally: buf.close() tt = time.time() # copy modified/missing assets to output assets.compile(conf, env) # wait for unfinished hooks hooks.shutdown() # run post hooks (blocks) hooks.run(conf, env, 'post') # save conf/environment hash and new/changed/unchanged references helpers.memoize('Configuration', hash(conf)) helpers.memoize('Environment', hash(env)) refs.save() # remove abandoned cache files cache.shutdown() # print a short summary log.info('%i new, %i updated, %i skipped [%.2fs]', event.count('create'), event.count('update'), event.count('identical') + event.count('skip'), time.time() - ctime)
def generate(self, conf, env, data): pathes, entrylist = set(), data[self.type] unmodified = not env.modified and not conf.modified for i, entry in enumerate(entrylist): if entry.hasproperty('permalink'): path = joinurl(conf['output_dir'], entry.permalink) else: path = joinurl(conf['output_dir'], expand(self.path, entry)) if isfile(path) and path in pathes: try: os.remove(path) finally: other = [ e.filename for e in entrylist if e is not entry and e.permalink == entry.permalink ][0] log.error("title collision %s caused by %s and %s", entry.permalink, entry.filename, other) raise SystemExit pathes.add(path) next, prev = self.next(entrylist, i), self.prev(entrylist, i) # per-entry template tt = env.engine.fromfile(env, entry.props.get('layout', self.template)) if all([ isfile(path), unmodified, not tt.modified, not entry.modified, not modified(*references(entry)) ]): event.skip(self.name, path) else: html = tt.render(conf=conf, entry=entry, env=union( env, entrylist=[entry], type=self.__class__.__name__.lower(), prev=prev, next=next, route=expand(self.path, entry))) yield html, path # check if any resources need to be moved if entry.hasproperty('copy'): for res_src in entry.resources: res_dest = join(dirname(path), basename(res_src)) # Note, presence of res_src check in FileReader.getresources if isfile(res_dest ) and getmtime(res_dest) > getmtime(res_src): event.skip(self.name, res_dest) continue try: fp = io.open(res_src, 'rb') # use mkfile rather than yield so different ns can be specified (and filtered by sitemap) mkfile(fp, res_dest, ns='resource', force=env.options.force, dryrun=env.options.dryrun) except IOError as e: log.warn( "Failed to copy resource '%s' whilst processing '%s' (%s)" % (res_src, entry.filename, e.strerror))