Esempio n. 1
0
    def __init__(self, filename, files_root, url_root, media, bundle_type, precompile_in_debug, extra=None):
        # basic settings
        self.bundle_type = bundle_type
        self.file_path = os.path.join(files_root, filename)
        self.file_url = os.path.join(url_root, filename)
        self.precompile_in_debug = precompile_in_debug
        if self.precompile_in_debug:
            self.precompile_url = os.path.join(url_root, '%s.%s' % (os.path.splitext(filename)[0], self.bundle_type))
            self.precompile_path = '%s.%s' % (os.path.splitext(self.file_path)[0], self.bundle_type)
        self.media = media

        # file_type (or take from extension)
        if extra and 'type' in extra:
            self.file_type = extra['type']
        else:
            self.file_type = os.path.splitext(filename)[1][1:]

        # Lint setting and default
        if bundle_type in bundles_settings.BUNDLES_LINTING:
            self.lint = bundles_settings.BUNDLES_LINTING[bundle_type].get('default', False)
            if extra and 'lint' in extra:
                self.lint = extra['lint']
        else:
            self.lint = False

        # Preprocessors or get defaults
        if extra and 'processors' in extra:
            self.processors = processor_library.get_processors(extra['processors'])
        else:
            self.processors = processor_library.get_default_preprocessors_for(self.file_type)
Esempio n. 2
0
    def __init__(self, filename, files_root, url_root, media, bundle_type, extra=None):
        # basic settings
        self.file_path = os.path.join(files_root, filename)
        self.file_url = os.path.join(url_root, filename)
        self.media = media

        # file_type (or take from extension)
        if extra and 'type' in extra:
            self.file_type = extra['type']
        else:
            self.file_type = os.path.splitext(filename)[1][1:]

        # Lint setting and default
        if bundle_type in bundles_settings.BUNDLES_LINTING:
            self.lint = bundles_settings.BUNDLES_LINTING[bundle_type].get('default', False)
            if extra and 'lint' in extra:
                self.lint = extra['lint']
        else:
            self.lint = False

        # Preprocessors or get defaults
        if extra and 'processors' in extra:
            self.processors = processor_library.get_processors(extra['processors'])
        else:
            self.processors = processor_library.get_default_preprocessors_for(self.file_type)
Esempio n. 3
0
    def handle(self, *args, **options):
        self.stdout.write("Bundling...\n")
        dev_mode = bool(options.get('dev'))

        _bundle_versions = {}
        set_bundle_versions(_bundle_versions)

        if options.get('parallel'):
            self.stdout.write("Writing bundles in parallel\n")
            pool = Pool()
            results = pool.map(do_make_bundle, [
                (bundle, '_' if dev_mode else None)
                for bundle in get_bundles()
            ])
            pool.close()
            pool.join()

            for bundle_name, hash_version in results:
                _bundle_versions[bundle_name] = hash_version
        else:
            for bundle in get_bundles():
                self.stdout.write("Writing bundle: %s\n" % bundle.name)

                _, hash_version = do_make_bundle((bundle, '_' if dev_mode else None))
                # Build bundle versions as we're going along in case they're used in templated bundles
                _bundle_versions[bundle.name] = hash_version

                self.stdout.write("\t%s\n" % bundle.get_version())

        version_info = '\n'.join(['    "%s": "%s",' % version for version in _bundle_versions.iteritems()])

        with open(bundles_settings.BUNDLES_VERSION_FILE, 'wb') as bundles_versions:
            bundles_versions.write("""\
#!/usr/bin/env python

BUNDLES_VERSIONS = {
%s
}
""" % version_info)

        for single_file_input, single_file_output in bundles_settings.BUNDLES_SINGLE_FILES:
            self.stdout.write("Writing: %s\n" % single_file_output)
            file_type = os.path.splitext(single_file_input)[1][1:]
            processors = processor_library.get_default_preprocessors_for(file_type) + processor_library.get_default_postprocessors_for(file_type)

            with open(single_file_output, 'wb') as output_file:
                for chunk in processor_pipeline(processors, FileChunkGenerator(open(single_file_input, 'rb'))):
                    output_file.write(chunk)

        self.stdout.write("Done.\n")
    def handle(self, *args, **options):
        self.stdout.write("Bundling...\n")
        dev_mode = bool(options.get('dev'))

        _bundle_versions = {}
        set_bundle_versions(_bundle_versions)

        for bundle in get_bundles():
            self.stdout.write("Writing bundle: %s\n" % bundle.name)

            if bundle.uglify_command:
                hash_version = make_uglify_bundle(bundle, fixed_version='_' if dev_mode else None)
            else:
                hash_version = make_bundle(bundle, fixed_version='_' if dev_mode else None)

            # Build bundle versions as we're going along in case they're used in templated bundles
            _bundle_versions[bundle.name] = hash_version

            if bundle.create_debug:
                if bundle.uglify_command:
                    _bundle_versions['debug:' + bundle.name] = make_uglify_bundle(bundle, debug=True)
                else:
                    _bundle_versions['debug:' + bundle.name] = make_bundle(bundle, debug=True)

            self.stdout.write("\t%s\n" % bundle.get_version())

        version_info = '\n'.join(['    "%s": "%s",' % version for version in _bundle_versions.iteritems()])

        with open(bundles_settings.BUNDLES_VERSION_FILE, 'wb') as bundles_versions:
            bundles_versions.write("""\
#!/usr/bin/env python

BUNDLES_VERSIONS = {
%s
}
""" % version_info)

        for single_file_input, single_file_output in bundles_settings.BUNDLES_SINGLE_FILES:
            self.stdout.write("Writing: %s\n" % single_file_output)
            file_type = os.path.splitext(single_file_input)[1][1:]
            processors = processor_library.get_default_preprocessors_for(file_type) + processor_library.get_default_postprocessors_for(file_type)

            with open(single_file_output, 'wb') as output_file:
                for chunk in processor_pipeline(processors, FileChunkGenerator(open(single_file_input, 'rb'))):
                    output_file.write(chunk)

        self.stdout.write("Done.\n")