def _build(self, env, output_path, force, no_filters, parent_filters=[]): """Internal recursive build method. """ # TODO: We could support a nested bundle downgrading it's debug # setting from "filters" to "merge only", i.e. enabling # ``no_filters``. We cannot support downgrading to # "full debug/no merge" (debug=True), of course. # # Right now we simply use the debug setting of the root bundle # we build, und it overrides all the nested bundles. If we # allow nested bundles to overwrite the debug value of parent # bundles, as described above, then we should also deal with # a child bundle enabling debug=True during a merge, i.e. # raising an error rather than ignoring it as we do now. resolved_contents = self.resolve_contents(env) if not resolved_contents: raise BuildError('empty bundle cannot be built') # Ensure that the filters are ready for filter in self.filters: filter.set_environment(env) # Apply input filters to all the contents. Note that we use # both this bundle's filters as well as those given to us by # the parent. We ONLY do those this for the input filters, # because we need them to be applied before the apply our own # output filters. # TODO: Note that merge_filters() removes duplicates. Is this # really the right thing to do, or does it just confuse things # due to there now being different kinds of behavior... combined_filters = merge_filters(self.filters, parent_filters) cache = get_cache(env) hunks = [] for c in resolved_contents: if isinstance(c, Bundle): hunk = c._build(env, output_path, force, no_filters, combined_filters) hunks.append(hunk) else: if is_url(c): hunk = UrlHunk(c) else: hunk = FileHunk(env.abspath(c)) if no_filters: hunks.append(hunk) else: hunks.append( apply_filters(hunk, combined_filters, 'input', cache, output_path=output_path)) # Return all source hunks as one, with output filters applied final = merge(hunks) if no_filters: return final else: return apply_filters(final, self.filters, 'output', cache)
def _build(self, env, output_path, force, no_filters, parent_filters=[]): """Internal recursive build method. """ # TODO: We could support a nested bundle downgrading it's debug # setting from "filters" to "merge only", i.e. enabling # ``no_filters``. We cannot support downgrading to # "full debug/no merge" (debug=True), of course. # # Right now we simply use the debug setting of the root bundle # we build, und it overrides all the nested bundles. If we # allow nested bundles to overwrite the debug value of parent # bundles, as described above, then we should also deal with # a child bundle enabling debug=True during a merge, i.e. # raising an error rather than ignoring it as we do now. resolved_contents = self.resolve_contents(env) if not resolved_contents: raise BuildError('empty bundle cannot be built') # Ensure that the filters are ready for filter in self.filters: filter.set_environment(env) # Apply input filters to all the contents. Note that we use # both this bundle's filters as well as those given to us by # the parent. We ONLY do those this for the input filters, # because we need them to be applied before the apply our own # output filters. # TODO: Note that merge_filters() removes duplicates. Is this # really the right thing to do, or does it just confuse things # due to there now being different kinds of behavior... combined_filters = merge_filters(self.filters, parent_filters) cache = get_cache(env) hunks = [] for c in resolved_contents: if isinstance(c, Bundle): hunk = c._build(env, output_path, force, no_filters, combined_filters) hunks.append(hunk) else: if is_url(c): hunk = UrlHunk(c) else: hunk = FileHunk(env.abspath(c)) if no_filters: hunks.append(hunk) else: hunks.append(apply_filters( hunk, combined_filters, 'input', cache, output_path=output_path)) # Return all source hunks as one, with output filters applied final = merge(hunks) if no_filters: return final else: return apply_filters(final, self.filters, 'output', cache)
def _merge_and_apply(self, env, output_path, force, parent_debug=None, parent_filters=[], extra_filters=[], disable_cache=None): """Internal recursive build method. ``parent_debug`` is the debug setting used by the parent bundle. This is not necessarily ``bundle.debug``, but rather what the calling method in the recursion tree is actually using. ``parent_filters`` are what the parent passes along, for us to be applied as input filters. Like ``parent_debug``, it is a collection of the filters of all parents in the hierarchy. ``extra_filters`` may exist if the parent is a container bundle passing filters along to it's children; these are applied as input and output filters (since there is no parent who could do the latter), and they are not passed further down the hierarchy (but instead they become part of ``parent_filters``. ``disable_cache`` is necessary because in some cases, when an external bundle dependency has changed, we must not rely on the cache, since the cache key is not taking into account changes in those dependencies (for now). """ # Determine the debug option to work, which will tell us what # building the bundle entails. The reduce chooses the first # non-None value. debug = reduce(lambda x, y: x if not x is None else y, [self.debug, parent_debug, env.debug]) if debug == 'merge': no_filters = True elif debug is True: # This should be caught by urls(). if any([self.debug, parent_debug]): raise BuildError("a bundle with debug=True cannot be built") else: raise BuildError("cannot build while in debug mode") elif debug is False: no_filters = False else: raise BundleError('Invalid debug value: %s' % debug) # Prepare contents resolved_contents = self.resolve_contents(env, force=True) if not resolved_contents: raise BuildError('empty bundle cannot be built') # Prepare filters filters = merge_filters(self.filters, extra_filters) for filter in filters: filter.set_environment(env) # Unless we have been told by our caller to use or not use the # cache for this, try to decide for ourselves. The issue here # is that when a bundle has dependencies, like a sass file with # includes otherwise not listed in the bundle sources, a change # in such an external include would not influence the cache key, # those the use of the cache causing such a change to be ignored. # For now, we simply do not use the cache for any bundle with # dependencies. Another option would be to read the contents of # all files declared via "depends", and use them as a cache key # modifier. For now I am worried about the performance impact. # # Note: This decision only affects the current bundle instance. # Even if dependencies cause us to ignore the cache for this # bundle instance, child bundles may still use it! if disable_cache is None: actually_skip_cache_here = bool(self.resolve_depends(env)) else: actually_skip_cache_here = disable_cache # Apply input filters to all the contents. Note that we use # both this bundle's filters as well as those given to us by # the parent. We ONLY do those this for the input filters, # because we need them to be applied before the apply our own # output filters. combined_filters = merge_filters(filters, parent_filters) hunks = [] for _, c in resolved_contents: if isinstance(c, Bundle): hunk = c._merge_and_apply( env, output_path, force, debug, combined_filters, disable_cache=disable_cache) hunks.append(hunk) else: if is_url(c): hunk = UrlHunk(c) else: hunk = FileHunk(c) if no_filters: hunks.append(hunk) else: hunks.append(apply_filters( hunk, combined_filters, 'input', env.cache, actually_skip_cache_here, output_path=output_path)) # Return all source hunks as one, with output filters applied try: final = merge(hunks) except IOError, e: raise BuildError(e)
output_path=output_path)) # Return all source hunks as one, with output filters applied try: final = merge(hunks) except IOError, e: raise BuildError(e) if no_filters: return final else: # TODO: So far, all the situations where bundle dependencies # are used/useful, are based on input filters having those # dependencies. Is it even required to consider them here # with respect to the cache? return apply_filters(final, filters, 'output', env.cache, actually_skip_cache_here) def _build(self, env, extra_filters=[], force=None, output=None): """Internal bundle build function. This actually tries to build this very bundle instance, as opposed to the public-facing ``build()``, which first deals with the possibility that we are a container bundle, i.e. having no files of our own. First checks whether an update for this bundle is required, via the configured ``updater`` (which is almost always the timestamp-based one). Unless ``force`` is given, in which case the bundle will always be built, without considering timestamps.
def _merge_and_apply(self, env, output_path, force, parent_debug=None, parent_filters=[], extra_filters=[], disable_cache=False): """Internal recursive build method. ``parent_debug`` is the debug setting used by the parent bundle. This is not necessarily ``bundle.debug``, but rather what the calling method in the recursion tree is actually using. ``parent_filters`` are what the parent passes along, for us to be applied as input filters. Like ``parent_debug``, it is a collection of the filters of all parents in the hierarchy. ``extra_filters`` may exist if the parent is a container bundle passing filters along to it's children; these are applied as input and output filters (since there is no parent who could do the latter), and they are not passed further down the hierarchy (but instead they become part of ``parent_filters``. ``disable_cache`` is necessary because in some cases, when an external bundle dependency has changed, we must not rely on the cache. """ # Determine the debug option to work, which will tell us what # building the bundle entails. The reduce chooses the first # non-None value. debug = reduce(lambda x, y: x if not x is None else y, [self.debug, parent_debug, env.debug]) if debug == 'merge': no_filters = True elif debug is True: # This should be caught by urls(). if any([self.debug, parent_debug]): raise BuildError("a bundle with debug=True cannot be built") else: raise BuildError("cannot build while in debug mode") elif debug is False: no_filters = False else: raise BundleError('Invalid debug value: %s' % debug) # Prepare contents resolved_contents = self.resolve_contents(env, force=True) if not resolved_contents: raise BuildError('empty bundle cannot be built') # Prepare filters filters = merge_filters(self.filters, extra_filters) for filter in filters: filter.set_environment(env) # Apply input filters to all the contents. Note that we use # both this bundle's filters as well as those given to us by # the parent. We ONLY do those this for the input filters, # because we need them to be applied before the apply our own # output filters. combined_filters = merge_filters(filters, parent_filters) hunks = [] for _, c in resolved_contents: if isinstance(c, Bundle): hunk = c._merge_and_apply( env, output_path, force, debug, combined_filters, disable_cache=disable_cache) hunks.append(hunk) else: if is_url(c): hunk = UrlHunk(c) else: hunk = FileHunk(c) if no_filters: hunks.append(hunk) else: hunks.append(apply_filters( hunk, combined_filters, 'input', env.cache, disable_cache, output_path=output_path)) # Return all source hunks as one, with output filters applied try: final = merge(hunks) except IOError, e: raise BuildError(e)
else: hunks.append(apply_filters( hunk, combined_filters, 'input', env.cache, disable_cache, output_path=output_path)) # Return all source hunks as one, with output filters applied try: final = merge(hunks) except IOError, e: raise BuildError(e) if no_filters: return final else: return apply_filters(final, filters, 'output', env.cache, disable_cache) def _build(self, env, extra_filters=[], force=False): """Internal bundle build function. Check if an update for this bundle is required, and if so, build it. A ``FileHunk`` will be returned. TODO: Support locking. When called from inside a template tag, this should lock, so that multiple requests don't all start to build. When called from the command line, there is no need to lock. """
def _merge_and_apply(self, env, output_path, force, parent_debug=None, parent_filters=[], extra_filters=[], disable_cache=False): """Internal recursive build method. ``parent_debug`` is the debug setting used by the parent bundle. This is not necessarily ``bundle.debug``, but rather what the calling method in the recursion tree is actually using. ``parent_filters`` are what the parent passes along, for us to be applied as input filters. Like ``parent_debug``, it is a collection of the filters of all parents in the hierarchy. ``extra_filters`` may exist if the parent is a container bundle passing filters along to it's children; these are applied as input and output filters (since there is no parent who could do the latter), and they are not passed further down the hierarchy (but instead they become part of ``parent_filters``. ``disable_cache`` is necessary because in some cases, when an external bundle dependency has changed, we must not rely on the cache. """ # Determine the debug option to work, which will tell us what # building the bundle entails. The reduce chooses the first # non-None value. debug = reduce(lambda x, y: x if not x is None else y, [self.debug, parent_debug, env.debug]) if debug == 'merge': no_filters = True elif debug is True: # This should be caught by urls(). if any([self.debug, parent_debug]): raise BuildError("a bundle with debug=True cannot be built") else: raise BuildError("cannot build while in debug mode") elif debug is False: no_filters = False else: raise BundleError('Invalid debug value: %s' % debug) # Prepare contents resolved_contents = self.resolve_contents(env, force=True) if not resolved_contents: raise BuildError('empty bundle cannot be built') # Prepare filters filters = merge_filters(self.filters, extra_filters) for filter in filters: filter.set_environment(env) # Apply input filters to all the contents. Note that we use # both this bundle's filters as well as those given to us by # the parent. We ONLY do those this for the input filters, # because we need them to be applied before the apply our own # output filters. combined_filters = merge_filters(filters, parent_filters) hunks = [] for _, c in resolved_contents: if isinstance(c, Bundle): hunk = c._merge_and_apply(env, output_path, force, debug, combined_filters, disable_cache=disable_cache) hunks.append(hunk) else: if is_url(c): hunk = UrlHunk(c) else: hunk = FileHunk(c) if no_filters: hunks.append(hunk) else: hunks.append( apply_filters(hunk, combined_filters, 'input', env.cache, disable_cache, output_path=output_path)) # Return all source hunks as one, with output filters applied try: final = merge(hunks) except IOError, e: raise BuildError(e)
combined_filters, 'input', env.cache, disable_cache, output_path=output_path)) # Return all source hunks as one, with output filters applied try: final = merge(hunks) except IOError, e: raise BuildError(e) if no_filters: return final else: return apply_filters(final, filters, 'output', env.cache, disable_cache) def _build(self, env, extra_filters=[], force=None): """Internal bundle build function. Check if an update for this bundle is required, and if so, build it. If ``force`` is given, the bundle will always be built, without checking for an update. If no ``updater`` is configured, then ``force`` defaults to ``True``. A ``FileHunk`` will be returned, or in a certain case, with no updater defined and force=False, the return value may be ``False``.