Example #1
0
    def get_version(self, env=None, refresh=False):
        """Return the current version of the Bundle.

        If the version is not cached in memory, it will first look in the
        manifest, then ask the versioner.

        ``refresh`` causes a value in memory to be ignored, and the version
        to be looked up anew.
        """
        env = self._get_env(env)
        if not self.version or refresh:
            version = None
            # First, try a manifest. This should be the fastest way.
            if env.manifest:
                version = env.manifest.query(self, env)
            # Often the versioner is able to help.
            if not version:
                from version import VersionIndeterminableError
                if env.versions:
                    try:
                        version = env.versions.determine_version(self, env)
                        assert version
                    except VersionIndeterminableError, e:
                        reason = e
                else:
                    reason = '"versions" option not set'
            if not version:
                raise BundleError(
                    ('Cannot find version of %s. There is no manifest '
                     'which knows the version, and it cannot be '
                     'determined dynamically, because: %s') % (self, reason))
            self.version = version
Example #2
0
    def _urls(self, env, extra_filters, *args, **kwargs):
        # Resolve debug: see whether we have to merge the contents
        debug = self.debug if self.debug is not None else env.debug
        if debug == 'merge':
            supposed_to_merge = True
        elif debug is True:
            supposed_to_merge = False
        elif debug is False:
            supposed_to_merge = True
        else:
            raise BundleError('Invalid debug value: %s' % debug)

        if supposed_to_merge and (self.filters or self.output):
            # We need to build this bundle, unless a) the configuration
            # tells us not to ("supposed_to_merge"), or b) this bundle
            # isn't actually configured to be built, that is, has no
            # filters and no output target.
            hunk = self._build(env,
                               extra_filters=extra_filters,
                               force=False,
                               *args,
                               **kwargs)
            return [self._make_url(env, self.output)]
        else:
            # We either have no files (nothing to build), or we are
            # in debug mode: Instead of building the bundle, we
            # source all contents instead.
            urls = []
            for c, _ in self.resolve_contents(env):
                if isinstance(c, Bundle):
                    urls.extend(c.urls(env, *args, **kwargs))
                else:
                    urls.append(self._make_url(env, c, expire=False))
            return urls
Example #3
0
    def _urls(self, env, extra_filters, *args, **kwargs):
        """Return a list of urls for this bundle, and all subbundles,
        and, when it becomes necessary, start a build process.
        """

        # Look at the debug value to see if this bundle should return the
        # source urls (in debug mode), or a single url of the bundle in built
        # form. Once a bundle needs to be built, all of it's child bundles
        # are built as well of course, so at this point we leave the urls()
        # recursion and start a build() recursion.
        debug = _effective_debug_level(env, self, extra_filters)
        if debug == 'merge':
            supposed_to_merge = True
        elif debug is True:
            supposed_to_merge = False
        elif debug is False:
            supposed_to_merge = True
        else:
            raise BundleError('Invalid debug value: %s' % debug)

        # We will output a single url for this bundle unless a) the
        # configuration tells us to output the source urls
        # ("supposed_to_merge"), or b) this bundle isn't actually configured to
        # be built, that is, has no filters and no output target.
        if supposed_to_merge and (self.filters or self.output):
            # With ``auto_build``, build the bundle to make sure the output is
            # up to date; otherwise, we just assume the file already exists.
            # (not wasting any IO ops)
            if env.auto_build:
                self._build(env,
                            extra_filters=extra_filters,
                            force=False,
                            *args,
                            **kwargs)
            return [self._make_output_url(env)]
        else:
            # We either have no files (nothing to build), or we are
            # in debug mode: Instead of building the bundle, we
            # source all contents instead.
            urls = []
            for org, cnt in self.resolve_contents(env):
                if isinstance(cnt, Bundle):
                    urls.extend(org.urls(env, *args, **kwargs))
                elif is_url(cnt):
                    urls.append(cnt)
                else:
                    try:
                        url = env.resolver.resolve_source_to_url(cnt, org)
                    except ValueError:
                        # If we cannot generate a url to a path outside the
                        # media directory. So if that happens, we copy the
                        # file into the media directory.
                        external = pull_external(env, cnt)
                        url = env.resolver.resolve_source_to_url(external, org)

                    urls.append(url)
            return urls
Example #4
0
def cmp_debug_levels(level1, level2):
    """cmp() for debug levels, returns -1, 0 or +1 indicating which debug
    level is higher than the other one."""
    level_ints = { False: 0, 'merge': 1, True: 2 }
    try:
        return cmp(level_ints[level1], level_ints[level2])
    except KeyError, e:
        # Not sure if a dependency on BundleError is proper here. Validating
        # debug values should probably be done on assign. But because this
        # needs to happen in two places (Environment and Bundle) we do it here.
        raise BundleError('Invalid debug value: %s' % e)
Example #5
0
    def resolve_contents(self, env=None, force=False):
        """Return an actual list of source files.

        What the user specifies as the bundle contents cannot be
        processed directly. There may be glob patterns of course. We
        may need to search the load path. It's common for third party
        extensions to provide support for referencing assets spread
        across multiple directories.

        This passes everything through :class:`Environment.resolver`,
        through which this process can be customized.

        At this point, we also validate source paths to complain about
        missing files early.

        The return value is a list of 2-tuples ``(original_item,
        abspath)``. In the case of urls and nested bundles both tuple
        values are the same.

        Set ``force`` to ignore any cache, and always re-resolve
        glob  patterns.
        """
        env = self._get_env(env)

        # TODO: We cache the values, which in theory is problematic, since
        # due to changes in the env object, the result of the globbing may
        # change. Not to mention that a different env object may be passed
        # in. We should find a fix for this.
        if getattr(self, '_resolved_contents', None) is None or force:
            resolved = []
            for item in self.contents:
                try:
                    result = env.resolver.resolve_source(item)
                except IOError, e:
                    raise BundleError(e)
                if not isinstance(result, list):
                    result = [result]

                # Exclude the output file.
                # TODO: This will not work for nested bundle contents. If it
                # doesn't work properly anyway, should be do it in the first
                # place? If there are multiple versions, it will fail as well.
                # TODO: There is also the question whether we can/should
                # exclude glob duplicates.
                if self.output:
                    try:
                        result.remove(self.resolve_output(env))
                    except (ValueError, BundleError):
                        pass

                resolved.extend(map(lambda r: (item, r), result))

            self._resolved_contents = resolved
Example #6
0
    def resolve_contents(self, env=None, force=False):
        """Convert bundle contents into something that can be easily
        processed.

        - Glob patterns are resolved
        - Validate all the source paths to complain about
          missing files early.
        - Third party extensions get to hook into this to
          provide a basic virtualized filesystem.

        The return value is a list of 2-tuples (relpath, abspath).
        The first element is the path that is assumed to be relative
        to the ``Environment.directory`` value. We need it to construct
        urls to the source files.
        The second element is the absolute path to the actual location
        of the file. Depending on the magic a third party extension
        does, this may be somewhere completely different.

        URLs and nested Bundles are returned as a 2-tuple where
        both items are the same.

        Set ``force`` to ignore any cache, and always re-resolve
        glob patterns.
        """
        env = self._get_env(env)

        # TODO: We cache the values, which in theory is problematic, since
        # due to changes in the env object, the result of the globbing may
        # change. Not to mention that a different env object may be passed
        # in. We should find a fix for this.
        if getattr(self, '_resolved_contents', None) is None or force:
            l = []
            for item in self.contents:
                if isinstance(item, Bundle):
                    l.append((item, item))
                else:
                    if is_url(item):
                        # Is a URL
                        l.append((item, item))
                    elif isinstance(item, basestring) and has_magic(item):
                        # Is globbed pattern
                        path = env.abspath(item)
                        for f in glob.glob(path):
                            l.append((f[len(path) - len(item):], f))
                    else:
                        # Is just a normal path; Send it through
                        # _normalize_source_path().
                        try:
                            l.append((item, env._normalize_source_path(item)))
                        except IOError, e:
                            raise BundleError(e)
            self._resolved_contents = l
Example #7
0
 def resolve_depends(self, env):
     # TODO: Caching is as problematic here as it is in resolve_contents().
     if not self.depends:
         return []
     if getattr(self, '_resolved_depends', None) is None:
         resolved = []
         for item in self.depends:
             try:
                 result = env.resolver.resolve_source(item)
             except IOError, e:
                 raise BundleError(e)
             if not isinstance(result, list):
                 result = [result]
             resolved.extend(result)
         self._resolved_depends = resolved
Example #8
0
 def resolve_depends(self, env):
     # TODO: Caching is as problematic here as it is in resolve_contents().
     if not self.depends:
         return []
     if getattr(self, '_resolved_depends', None) is None:
         l = []
         for item in self.depends:
             if has_magic(item):
                 dir = env.abspath(item)
                 for f in glob.glob(dir):
                     l.append(f)
             else:
                 try:
                     l.append(env._normalize_source_path(item))
                 except IOError, e:
                     raise BundleError(e)
         self._resolved_depends = l
Example #9
0
    def _urls(self, env, extra_filters, *args, **kwargs):
        """Return a list of urls for this bundle, and all subbundles,
        and, when it becomes necessary, start a build process.
        """

        # Look at the debug value to see of this bundle we should return the
        # source urls (in debug mode), or a single url of the bundle in built
        # form. Once a bundle needs to be built, all of it's child bundles
        # are built as well of course, so at this point we leave the urls()
        # recursion and start a build() recursion.
        debug = _effective_debug_level(env, self, extra_filters)
        if debug == 'merge':
            supposed_to_merge = True
        elif debug is True:
            supposed_to_merge = False
        elif debug is False:
            supposed_to_merge = True
        else:
            raise BundleError('Invalid debug value: %s' % debug)

        if supposed_to_merge and (self.filters or self.output):
            # We need to build this bundle, unless a) the configuration
            # tells us not to ("supposed_to_merge"), or b) this bundle
            # isn't actually configured to be built, that is, has no
            # filters and no output target.
            hunk = self._build(env,
                               extra_filters=extra_filters,
                               force=False,
                               *args,
                               **kwargs)
            return [self._make_url(env)]
        else:
            # We either have no files (nothing to build), or we are
            # in debug mode: Instead of building the bundle, we
            # source all contents instead.
            urls = []
            for c, _ in self.resolve_contents(env):
                if isinstance(c, Bundle):
                    urls.extend(c.urls(env, *args, **kwargs))
                elif is_url(c):
                    urls.append(c)
                else:
                    urls.append(env.absurl(c))
            return urls
Example #10
0
 def _get_env(self, env):
     # Note how bool(env) can be False, due to __len__.
     env = env if env is not None else self.env
     if env is None:
         raise BundleError('Bundle is not connected to an environment')
     return env
Example #11
0
    def _merge_and_apply(self,
                         env,
                         output_path,
                         force,
                         parent_debug=None,
                         parent_filters=[],
                         extra_filters=[],
                         disable_cache=False):
        """Internal recursive build method.

        ``parent_debug`` is the debug setting used by the parent bundle.
        This is not necessarily ``bundle.debug``, but rather what the
        calling method in the recursion tree is actually using.

        ``parent_filters`` are what the parent passes along, for
        us to be applied as input filters. Like ``parent_debug``, it is
        a collection of the filters of all parents in the hierarchy.

        ``extra_filters`` may exist if the parent is a container bundle
        passing filters along to it's children; these are applied as input
        and output filters (since there is no parent who could do the
        latter), and they are not passed further down the hierarchy
        (but instead they become part of ``parent_filters``.

        ``disable_cache`` is necessary because in some cases, when an
        external bundle dependency has changed, we must not rely on the
        cache.
        """
        # Determine the debug option to work, which will tell us what
        # building the bundle entails. The reduce chooses the first
        # non-None value.
        debug = reduce(lambda x, y: x if not x is None else y,
                       [self.debug, parent_debug, env.debug])
        if debug == 'merge':
            no_filters = True
        elif debug is True:
            # This should be caught by urls().
            if any([self.debug, parent_debug]):
                raise BuildError("a bundle with debug=True cannot be built")
            else:
                raise BuildError("cannot build while in debug mode")
        elif debug is False:
            no_filters = False
        else:
            raise BundleError('Invalid debug value: %s' % debug)

        # Prepare contents
        resolved_contents = self.resolve_contents(env, force=True)
        if not resolved_contents:
            raise BuildError('empty bundle cannot be built')

        # Prepare filters
        filters = merge_filters(self.filters, extra_filters)
        for filter in filters:
            filter.set_environment(env)

        # Apply input filters to all the contents. Note that we use
        # both this bundle's filters as well as those given to us by
        # the parent. We ONLY do those this for the input filters,
        # because we need them to be applied before the apply our own
        # output filters.
        combined_filters = merge_filters(filters, parent_filters)
        hunks = []
        for _, c in resolved_contents:
            if isinstance(c, Bundle):
                hunk = c._merge_and_apply(env,
                                          output_path,
                                          force,
                                          debug,
                                          combined_filters,
                                          disable_cache=disable_cache)
                hunks.append(hunk)
            else:
                if is_url(c):
                    hunk = UrlHunk(c)
                else:
                    hunk = FileHunk(c)
                if no_filters:
                    hunks.append(hunk)
                else:
                    hunks.append(
                        apply_filters(hunk,
                                      combined_filters,
                                      'input',
                                      env.cache,
                                      disable_cache,
                                      output_path=output_path))

        # Return all source hunks as one, with output filters applied
        try:
            final = merge(hunks)
        except IOError, e:
            raise BuildError(e)