def lint_file(self, full_path, bundle): try: iter_input = processor_pipeline(bundle[full_path].processors, FileChunkGenerator(open(full_path, 'rb'))) command = bundles_settings.BUNDLES_LINTING[bundle.bundle_type]['command'] input_file = None stdin = None if '{infile}' in command: if hasattr(iter_input, 'file_path'): filename = iter_input.file_path else: input_file = NamedTemporaryFile() for chunk in iter_input: input_file.write(chunk) input_file.flush() filename = input_file.name command = command.format(infile=filename) else: stdin = iter_input # Consume the iterator into a zero length deque collections.deque(run_process(command, stdin=stdin, to_close=input_file), maxlen=0) self.log_watch_result(full_path, True) except CalledProcessError as e: self.log_watch_result(full_path, False, error_message=e.output)
def get_file(path): global file_cache if not file_cache: for bundle in get_bundles(): for bundle_file in bundle.files: file_cache[os.path.realpath(bundle_file.file_path)] = { 'bundle_file': bundle_file, 'cache': None } if path in file_cache: if not file_cache[path]['cache']: mimetype, encoding = mimetypes.guess_type(path) mimetype = mimetype or 'application/octet-stream' # TODO: less files need to change the way they are rendered in the template print "Generating", path file_cache[path]['cache'] = { 'contents': reduce(concat, (chunk for chunk in processor_pipeline(file_cache[path]['bundle_file'].processors, FileChunkGenerator(open(file_cache[path]['bundle_file'].file_path, 'rb'))))), 'mimetype': mimetype, } return file_cache[path]['cache'] return None
def make_bundle(bundle, debug=False, fixed_version=None): """ Does all of the processing required to create a bundle and write it to disk, returning its hash version """ tmp_output_file_name = '%s.%s.%s' % (os.path.join(bundle.bundle_file_root, bundle.bundle_filename), 'temp', bundle.bundle_type) iter_input = iter_bundle_files(bundle, debug=debug) output_pipeline = processor_pipeline(bundle.processors, iter_input, debug=debug) m = md5() with open(tmp_output_file_name, 'wb') as output_file: for chunk in output_pipeline: m.update(chunk) output_file.write(chunk) hash_version = fixed_version or m.hexdigest() if debug: output_file_name = '%s.debug.%s.%s' % (os.path.join(bundle.bundle_file_root, bundle.bundle_filename), hash_version, bundle.bundle_type) else: output_file_name = '%s.%s.%s' % (os.path.join(bundle.bundle_file_root, bundle.bundle_filename), hash_version, bundle.bundle_type) os.rename(tmp_output_file_name, output_file_name) return hash_version
def precompile(self, full_path, bundle): if bundle.precompile_in_debug and bundle[full_path].processors: try: with open(bundle[full_path].precompile_path, 'wb') as output_file: for chunk in processor_pipeline(bundle[full_path].processors, FileChunkGenerator(open(full_path, 'rb'))): output_file.write(chunk) self.log_precompile_result(full_path, True) except CalledProcessError: self.log_precompile_result(full_path, False)
def make_uglify_bundle(bundle, debug=False, fixed_version=None): m = md5() infile_list = [] source_map_processed_input_files = [] try: for bundle_file in bundle.files: if bundle_file.processors: # for now preprocessed files are written to temp files and therefore won't be available in the source map output_pipeline = processor_pipeline(bundle_file.processors, FileChunkGenerator(open(bundle_file.file_path, 'rb')), debug=debug) tmp_input_file = NamedTemporaryFile() source_map_processed_input_files.append(tmp_input_file) for chunk in output_pipeline: m.update(chunk) tmp_input_file.write(chunk) tmp_input_file.seek(0) infile_list.append(tmp_input_file.name) else: for chunk in FileChunkGenerator(open(bundle_file.file_path, 'rb')): m.update(chunk) infile_list.append(bundle_file.file_path) hash_version = fixed_version or m.hexdigest() debug_part = '.debug' if debug else '' output_file_name = '%s%s.%s.%s' % (os.path.join(bundle.bundle_file_root, bundle.bundle_filename), debug_part, hash_version, bundle.bundle_type) if bundle.source_map_file_root and bundle.source_map_url_root: source_map_file_name = '%s%s.%s.%s.map' % (os.path.join(bundle.source_map_file_root, bundle.bundle_filename), debug_part, hash_version, bundle.bundle_type) source_map_url = '%s.%s.%s.map' % (os.path.join(bundle.source_map_url_root, bundle.bundle_filename), hash_version, bundle.bundle_type) else: source_map_file_name = output_file_name + '.map' source_map_url = bundle.get_url(version=hash_version) + '.map' source_map_options = [ '--source-map %s' % source_map_file_name, '--source-map-root %s' % bundle.source_map_files_url_root, '--source-map-url %s' % source_map_url, '-p %s' % os.path.realpath(bundle.files_root).count('/'), '-o %s' % output_file_name, ] # Consume the iterator into a zero length deque collections.deque(run_process(bundle.uglify_command.format(infile_list=' '.join(infile_list), source_map_options=' '.join(source_map_options))), maxlen=0) finally: for tmp_input_file in source_map_processed_input_files: tmp_input_file.close() return hash_version
def handle(self, *args, **options): self.stdout.write("Bundling...\n") dev_mode = bool(options.get('dev')) _bundle_versions = {} set_bundle_versions(_bundle_versions) if options.get('parallel'): self.stdout.write("Writing bundles in parallel\n") pool = Pool() results = pool.map(do_make_bundle, [ (bundle, '_' if dev_mode else None) for bundle in get_bundles() ]) pool.close() pool.join() for bundle_name, hash_version in results: _bundle_versions[bundle_name] = hash_version else: for bundle in get_bundles(): self.stdout.write("Writing bundle: %s\n" % bundle.name) _, hash_version = do_make_bundle((bundle, '_' if dev_mode else None)) # Build bundle versions as we're going along in case they're used in templated bundles _bundle_versions[bundle.name] = hash_version self.stdout.write("\t%s\n" % bundle.get_version()) version_info = '\n'.join([' "%s": "%s",' % version for version in _bundle_versions.iteritems()]) with open(bundles_settings.BUNDLES_VERSION_FILE, 'wb') as bundles_versions: bundles_versions.write("""\ #!/usr/bin/env python BUNDLES_VERSIONS = { %s } """ % version_info) for single_file_input, single_file_output in bundles_settings.BUNDLES_SINGLE_FILES: self.stdout.write("Writing: %s\n" % single_file_output) file_type = os.path.splitext(single_file_input)[1][1:] processors = processor_library.get_default_preprocessors_for(file_type) + processor_library.get_default_postprocessors_for(file_type) with open(single_file_output, 'wb') as output_file: for chunk in processor_pipeline(processors, FileChunkGenerator(open(single_file_input, 'rb'))): output_file.write(chunk) self.stdout.write("Done.\n")
def check_and_lint_file(src): # TODO: don't repeatedly lint the same file for watchdir in watching: if watchdir in src: for bundle in watching[watchdir]: if src in bundle: result, error_message = self.lint_file( bundle.bundle_type, src, iter_input=processor_pipeline( bundle[src].processors, FileChunkGenerator(open(src, 'rb')))) self.log_watch_result( src, result, error_message=error_message) break
def handle(self, *args, **options): self.stdout.write("Bundling...\n") dev_mode = bool(options.get('dev')) _bundle_versions = {} set_bundle_versions(_bundle_versions) for bundle in get_bundles(): self.stdout.write("Writing bundle: %s\n" % bundle.name) if bundle.uglify_command: hash_version = make_uglify_bundle(bundle, fixed_version='_' if dev_mode else None) else: hash_version = make_bundle(bundle, fixed_version='_' if dev_mode else None) # Build bundle versions as we're going along in case they're used in templated bundles _bundle_versions[bundle.name] = hash_version if bundle.create_debug: if bundle.uglify_command: _bundle_versions['debug:' + bundle.name] = make_uglify_bundle(bundle, debug=True) else: _bundle_versions['debug:' + bundle.name] = make_bundle(bundle, debug=True) self.stdout.write("\t%s\n" % bundle.get_version()) version_info = '\n'.join([' "%s": "%s",' % version for version in _bundle_versions.iteritems()]) with open(bundles_settings.BUNDLES_VERSION_FILE, 'wb') as bundles_versions: bundles_versions.write("""\ #!/usr/bin/env python BUNDLES_VERSIONS = { %s } """ % version_info) for single_file_input, single_file_output in bundles_settings.BUNDLES_SINGLE_FILES: self.stdout.write("Writing: %s\n" % single_file_output) file_type = os.path.splitext(single_file_input)[1][1:] processors = processor_library.get_default_preprocessors_for(file_type) + processor_library.get_default_postprocessors_for(file_type) with open(single_file_output, 'wb') as output_file: for chunk in processor_pipeline(processors, FileChunkGenerator(open(single_file_input, 'rb'))): output_file.write(chunk) self.stdout.write("Done.\n")
def handle(self, target_directory, *args, **options): try: os.mkdir(target_directory) except OSError: pass for bundle in get_bundles(): if options.get('bundle_type') and bundle.bundle_type != options.get('bundle_type'): continue manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest' with open(manifest_filename, 'w') as manifest: for bundle_file in bundle.files: if bundle_file.processors: # The file has a preprocessor. This means in its current state it may not be a valid file # and thus not suitable for inclusion in the manifest. Do any appropriate preprocessing and # write out an appropriate version output_pipeline = processor_pipeline(bundle_file.processors, FileChunkGenerator(open(bundle_file.file_path, 'rb'))) output_file_name = os.path.realpath(os.path.join(target_directory, '%s-%s.%s' % (str(uuid.uuid4())[-8:], os.path.split(bundle_file.file_path)[1], bundle.bundle_type))) with open(output_file_name, 'wb') as output_file: for chunk in output_pipeline: output_file.write(chunk) manifest.write(output_file_name + "\n") else: manifest.write(bundle_file.file_path + "\n")
def do_lint_file(args): bundle_type, file_path, processors = args success, error_message = lint_file(bundle_type, file_path, iter_input=processor_pipeline(processors, FileChunkGenerator(open(file_path, 'rb')))) return success, error_message, file_path
def check_and_lint_file(src): # TODO: don't repeatedly lint the same file for watchdir in watching: if watchdir in src: for bundle in watching[watchdir]: if src in bundle: result, error_message = self.lint_file(bundle.bundle_type, src, iter_input=processor_pipeline(bundle[src].processors, FileChunkGenerator(open(src, 'rb')))) self.log_watch_result(src, result, error_message=error_message) break
def handle(self, *args, **options): self.show_successes = not bool(options.get('failures_only')) watch = bool(options.get('watch')) file_pattern = options.get('pattern') if watch: try: import time from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler import curses except ImportError: raise CommandError('watchdog is required for this (pip install watchdog') self.errored_files = {} self.log_lines = [] watching = {} def check_and_lint_file(src): # TODO: don't repeatedly lint the same file for watchdir in watching: if watchdir in src: for bundle in watching[watchdir]: if src in bundle: result, error_message = self.lint_file(bundle.bundle_type, src, iter_input=processor_pipeline(bundle[src].processors, FileChunkGenerator(open(src, 'rb')))) self.log_watch_result(src, result, error_message=error_message) break class FileEventHandler(FileSystemEventHandler): def on_created(self, event): if not event.is_directory: check_and_lint_file(event.src_path) def on_modified(self, event): if not event.is_directory: check_and_lint_file(event.src_path) # TODO: watchdog dirsnapshot line 97 patched (otherwise it doesn't work with PyCharm) # #if stat_info.st_ino == ref_stat_info.st_ino and stat_info.st_mtime != ref_stat_info.st_mtime: # if stat_info.st_mtime != ref_stat_info.st_mtime: event_handler = FileEventHandler() observer = Observer() curses.setupterm() self.drawscreen() for bundle in get_bundles(): if bundle.files_root not in watching: watching[bundle.files_root] = set() observer.schedule(event_handler, path=bundle.files_root, recursive=True) watching[bundle.files_root].add(bundle) observer.start() try: while True: time.sleep(10) except KeyboardInterrupt: observer.stop() observer.join() return files_linted = set() failures = 0 def file_checked(success, error_message, file_path): files_linted.add(file_path) if success: if self.show_successes: self.stdout.write(self.style.HTTP_SUCCESS('OK\t\t%s\n' % file_path)) return 0 else: self.stdout.write(self.style.HTTP_SERVER_ERROR('FAIL\t\t%s\n' % file_path)) self.stdout.write(self.style.HTTP_SERVER_ERROR(error_message)) return 1 for bundle in get_bundles(): for bundle_file in bundle.files: if file_pattern and file_pattern not in bundle_file.file_path: continue if bundle_file.file_path in files_linted: continue # Check the file exists, even for non-linted files if not os.path.exists(bundle_file.file_path): self.stdout.write(self.style.HTTP_SERVER_ERROR('FAIL\t\t%s\n' % bundle_file.file_path)) self.stdout.write(self.style.HTTP_SERVER_ERROR('File does not exist (referenced from %s)\n' % bundle.name)) failures += 1 continue if not bundle_file.lint: continue success, error_message = self.lint_file(bundle.bundle_type, bundle_file.file_path, iter_input=processor_pipeline(bundle_file.processors, FileChunkGenerator(open(bundle_file.file_path, 'rb')))) failures += file_checked(success, error_message, bundle_file.file_path) for single_file_path, _ in bundles_settings.BUNDLES_SINGLE_FILES: success, error_message = self.lint_file(os.path.splitext(single_file_path)[1][1:], single_file_path) failures += file_checked(success, error_message, single_file_path) if failures: raise CommandError('%s FILE%s FAILED' % (failures, 'S' if failures > 1 else '')) else: self.stdout.write(self.style.HTTP_REDIRECT('\nALL FILES PASSED\n'))
def iter_bundle_files(bundle, debug=False): for bundle_file in bundle.files: for chunk in processor_pipeline(bundle_file.processors, FileChunkGenerator(open(bundle_file.file_path, 'rb')), debug=debug): yield chunk yield '\n'
def handle(self, *args, **options): self.show_successes = not bool(options.get('failures_only')) watch = bool(options.get('watch')) file_pattern = options.get('pattern') if watch: try: import time from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler import curses except ImportError: raise CommandError( 'watchdog is required for this (pip install watchdog') self.errored_files = {} self.log_lines = [] watching = {} def check_and_lint_file(src): # TODO: don't repeatedly lint the same file for watchdir in watching: if watchdir in src: for bundle in watching[watchdir]: if src in bundle: result, error_message = self.lint_file( bundle.bundle_type, src, iter_input=processor_pipeline( bundle[src].processors, FileChunkGenerator(open(src, 'rb')))) self.log_watch_result( src, result, error_message=error_message) break class FileEventHandler(FileSystemEventHandler): def on_created(self, event): if not event.is_directory: check_and_lint_file(event.src_path) def on_modified(self, event): if not event.is_directory: check_and_lint_file(event.src_path) # TODO: watchdog dirsnapshot line 97 patched (otherwise it doesn't work with PyCharm) # #if stat_info.st_ino == ref_stat_info.st_ino and stat_info.st_mtime != ref_stat_info.st_mtime: # if stat_info.st_mtime != ref_stat_info.st_mtime: event_handler = FileEventHandler() observer = Observer() curses.setupterm() self.drawscreen() for bundle in get_bundles(): if bundle.files_root not in watching: watching[bundle.files_root] = set() observer.schedule(event_handler, path=bundle.files_root, recursive=True) watching[bundle.files_root].add(bundle) observer.start() try: while True: time.sleep(10) except KeyboardInterrupt: observer.stop() observer.join() return files_linted = set() failures = 0 def file_checked(success, error_message, file_path): files_linted.add(file_path) if success: if self.show_successes: self.stdout.write( self.style.HTTP_SUCCESS('OK\t\t%s\n' % file_path)) return 0 else: self.stdout.write( self.style.HTTP_SERVER_ERROR('FAIL\t\t%s\n' % file_path)) self.stdout.write(self.style.HTTP_SERVER_ERROR(error_message)) return 1 for bundle in get_bundles(): for bundle_file in bundle.files: if file_pattern and file_pattern not in bundle_file.file_path: continue if bundle_file.file_path in files_linted: continue # Check the file exists, even for non-linted files if not os.path.exists(bundle_file.file_path): self.stdout.write( self.style.HTTP_SERVER_ERROR('FAIL\t\t%s\n' % bundle_file.file_path)) self.stdout.write( self.style.HTTP_SERVER_ERROR( 'File does not exist (referenced from %s)\n' % bundle.name)) failures += 1 continue if not bundle_file.lint: continue success, error_message = self.lint_file( bundle.bundle_type, bundle_file.file_path, iter_input=processor_pipeline( bundle_file.processors, FileChunkGenerator(open(bundle_file.file_path, 'rb')))) failures += file_checked(success, error_message, bundle_file.file_path) for single_file_path, _ in bundles_settings.BUNDLES_SINGLE_FILES: success, error_message = self.lint_file( os.path.splitext(single_file_path)[1][1:], single_file_path) failures += file_checked(success, error_message, single_file_path) if failures: raise CommandError('%s FILE%s FAILED' % (failures, 'S' if failures > 1 else '')) else: self.stdout.write(self.style.HTTP_REDIRECT('\nALL FILES PASSED\n'))