def apply(cls, message, paths_to_content, write_func, batch_write_func, delete_func, threaded=True, batch_writes=False): if pool is None: text = 'Deployment is unavailable in this environment.' raise common_utils.UnavailableError(text) thread_pool = pool.ThreadPool(cls.POOL_SIZE) diff = message num_files = len(diff.adds) + len(diff.edits) + len(diff.deletes) text = 'Deploying: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(num_files))] progress = progressbar_non.create_progressbar( "Deploying...", widgets=widgets, max_value=num_files) def run_with_progress(func, *args): func(*args) progress.update(progress.value + 1) if batch_writes: writes_paths_to_contents = {} for file_message in diff.adds: writes_paths_to_contents[file_message.path] = \ paths_to_content[file_message.path] for file_message in diff.edits: writes_paths_to_contents[file_message.path] = \ paths_to_content[file_message.path] deletes_paths = [ file_message.path for file_message in diff.deletes] if writes_paths_to_contents: batch_write_func(writes_paths_to_contents) if deletes_paths: delete_func(deletes_paths) else: progress.start() for file_message in diff.adds: content = paths_to_content[file_message.path] if threaded: args = (write_func, file_message.path, content) thread_pool.apply_async(run_with_progress, args=args) else: run_with_progress(write_func, file_message.path, content) for file_message in diff.edits: content = paths_to_content[file_message.path] if threaded: args = (write_func, file_message.path, content) thread_pool.apply_async(run_with_progress, args=args) else: run_with_progress(write_func, file_message.path, content) for file_message in diff.deletes: if threaded: args = (delete_func, file_message.path) thread_pool.apply_async(run_with_progress, args=args) else: run_with_progress(delete_func, file_message.path) if threaded: thread_pool.close() thread_pool.join() if not batch_writes: progress.finish()
def rendered_docs(pod, routes, use_threading=True, source_dir=None): """Generate the rendered documents for the given routes.""" with pod.profile.timer('renderer.Renderer.render_docs'): routes_len = len(routes) text = 'Building: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(routes_len))] progress = progressbar_non.create_progressbar("Building pod...", widgets=widgets, max_value=routes_len) progress.start() def tick(): """Tick the progress bar value.""" progress.update(progress.value + 1) batches = render_batch.RenderBatches(pod.render_pool, pod.profile, tick=tick) for controller in Renderer.controller_generator(pod, routes): batches.add(controller) if source_dir: # When using an input directory, load the files instead of render. rendered_docs, render_errors = batches.load( use_threading=use_threading, source_dir=source_dir) else: # Default to rendering the documents. rendered_docs, render_errors = batches.render( use_threading=use_threading) progress.finish() if render_errors: for error in render_errors: print error.message print error.err.message traceback.print_tb(error.err_tb) print '' text = 'There were {} errors during rendering.' raise RenderErrors(text.format(len(render_errors)), render_errors) return rendered_docs
def rendered_docs(pod, routes, use_threading=True, source_dir=None): """Generate the rendered documents for the given routes.""" with pod.profile.timer('renderer.Renderer.render_docs'): routes_len = len(routes) text = 'Building: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(routes_len))] progress = progressbar_non.create_progressbar( "Building pod...", widgets=widgets, max_value=routes_len) progress.start() def tick(): """Tick the progress bar value.""" progress.update(progress.value + 1) batches = render_batch.RenderBatches( pod.render_pool, pod.profile, tick=tick) for controller in Renderer.controller_generator(pod, routes): batches.add(controller) if source_dir: # When using an input directory, load the files instead of render. rendered_docs, render_errors = batches.load( use_threading=use_threading, source_dir=source_dir) else: # Default to rendering the documents. rendered_docs, render_errors = batches.render( use_threading=use_threading) progress.finish() if render_errors: for error in render_errors: print error.message print error.err.message traceback.print_tb(error.err_tb) print '' text = 'There were {} errors during rendering.' raise RenderErrors(text.format( len(render_errors)), render_errors) return rendered_docs
def export_ui(self): """Builds the grow ui tools, returning a mapping of paths to content.""" paths = [] source_prefix = 'node_modules/' destination_root = '_grow/ui/' tools_dir = 'tools/' tool_prefix = 'grow-tool-' # Add the base ui files. source_root = os.path.join(utils.get_grow_dir(), 'ui', 'dist') for path in ['css/ui.min.css', 'js/ui.min.js']: source_path = os.path.join(source_root, path) output_path = os.sep + os.path.join(destination_root, path) yield rendered_document.RenderedDocument( output_path, self.storage.read(source_path)) # Add the files from each of the tools. for tool in self.ui.get('tools', []): tool_path = '{}{}{}'.format(source_prefix, tool_prefix, tool['kind']) for root, dirs, files in self.walk(tool_path): for directory in dirs: if directory.startswith('.'): dirs.remove(directory) pod_dir = root.replace(self.root, '') for file_name in files: paths.append(os.path.join(pod_dir, file_name)) text = 'Building UI Tools: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(len(paths)))] progress = progressbar_non.create_progressbar("Building UI Tools...", widgets=widgets, max_value=len(paths)) progress.start() for path in paths: output_path = path.replace( source_prefix, '{}{}'.format(destination_root, tools_dir)) yield rendered_document.RenderedDocument(output_path, self.read_file(path)) progress.update(progress.value + 1) progress.finish()
def export_ui(self): """Builds the grow ui tools, returning a mapping of paths to content.""" paths = [] source_prefix = 'node_modules/' destination_root = '_grow/ui/' tools_dir = 'tools/' tool_prefix = 'grow-tool-' # Add the base ui files. source_root = os.path.join(utils.get_grow_dir(), 'ui', 'dist') for path in ['css/ui.min.css', 'js/ui.min.js']: source_path = os.path.join(source_root, path) output_path = os.sep + os.path.join(destination_root, path) yield rendered_document.RenderedDocument( output_path, self.storage.read(source_path)) # Add the files from each of the tools. for tool in self.ui.get('tools', []): tool_path = '{}{}{}'.format( source_prefix, tool_prefix, tool['kind']) for root, dirs, files in self.walk(tool_path): for directory in dirs: if directory.startswith('.'): dirs.remove(directory) pod_dir = root.replace(self.root, '') for file_name in files: paths.append(os.path.join(pod_dir, file_name)) text = 'Building UI Tools: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(len(paths)))] progress = progressbar_non.create_progressbar( "Building UI Tools...", widgets=widgets, max_value=len(paths)) progress.start() for path in paths: output_path = path.replace( source_prefix, '{}{}'.format(destination_root, tools_dir)) yield rendered_document.RenderedDocument( output_path, self.read_file(path)) progress.update(progress.value + 1) progress.finish()
def render_paths(self, paths, routes, suffix=None, append_slashes=False): """Renders the given paths and yields each path and content.""" text = 'Building: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(len(paths)))] bar = progressbar_non.create_progressbar("Building pod...", widgets=widgets, max_value=len(paths)) bar.start() for path in paths: output_path = path controller, params = routes.match(path, env=self.env.to_wsgi_env()) # Append a suffix onto rendered routes only. This supports dumping # paths that would serve at URLs that terminate in "/" or without # an extension to an HTML file suitable for writing to a # filesystem. Static routes and other routes that may export to # paths without extensions should remain unmodified. if suffix and controller.KIND == messages.Kind.RENDERED: if (append_slashes and not output_path.endswith('/') and not os.path.splitext(output_path)[-1]): output_path = output_path.rstrip('/') + '/' if append_slashes and output_path.endswith('/') and suffix: output_path += suffix try: key = 'Pod.render_paths.render' if isinstance(controller, grow_static.StaticController): key = 'Pod.render_paths.render.static' with self.profile.timer(key, label=output_path, meta={'path': output_path}): yield rendered_document.RenderedDocument( output_path, controller.render(params, inject=False), tmp_dir=self.tmp_dir) except: self.logger.error('Error building: {}'.format(controller)) raise bar.update(bar.value + 1) bar.finish()
def apply(cls, message, paths_to_rendered_doc, write_func, batch_write_func, delete_func, threaded=True, batch_writes=False): if pool is None: text = 'Deployment is unavailable in this environment.' raise common_utils.UnavailableError(text) apply_errors = [] thread_pool = None if threaded: thread_pool = pool.ThreadPool(cls.POOL_SIZE) diff = message num_files = len(diff.adds) + len(diff.edits) + len(diff.deletes) text = 'Deploying: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(num_files))] progress = progressbar_non.create_progressbar("Deploying...", widgets=widgets, max_value=num_files) def run_func(kwargs): """Run an arbitrary function.""" try: kwargs['func'](*kwargs['args']) return True # pylint: disable=broad-except except Exception as err: _, _, err_tb = sys.exc_info() return DeploymentError( "Error deploying {}".format(kwargs['args']), err, err_tb) if batch_writes: writes_paths_to_rendered_doc = {} for file_message in diff.adds: writes_paths_to_rendered_doc[file_message.path] = \ paths_to_rendered_doc[file_message.path] for file_message in diff.edits: writes_paths_to_rendered_doc[file_message.path] = \ paths_to_rendered_doc[file_message.path] deletes_paths = [ file_message.path for file_message in diff.deletes ] if writes_paths_to_rendered_doc: batch_write_func(writes_paths_to_rendered_doc) if deletes_paths: delete_func(deletes_paths) else: progress.start() threaded_args = [] for file_message in diff.adds: rendered_doc = paths_to_rendered_doc[file_message.path] threaded_args.append({ 'func': write_func, 'args': (rendered_doc, ), }) for file_message in diff.edits: rendered_doc = paths_to_rendered_doc[file_message.path] threaded_args.append({ 'func': write_func, 'args': (rendered_doc, ), }) for file_message in diff.deletes: threaded_args.append({ 'func': delete_func, 'args': (file_message.path, ), }) if threaded: results = thread_pool.imap_unordered(run_func, threaded_args) for result in results: if isinstance(result, Exception): apply_errors.append(result) progress.update(progress.value + 1) else: for kwargs in threaded_args: try: kwargs['func'](*kwargs['args']) progress.update(progress.value + 1) except DeploymentError as err: apply_errors.append(err) if threaded: thread_pool.close() thread_pool.join() if not batch_writes: progress.finish() if apply_errors: for error in apply_errors: print(error.message) print(error.err) traceback.print_tb(error.err_tb) print('') text = 'There were {} errors during deployment.' raise DeploymentErrors(text.format(len(apply_errors)), apply_errors)
def upload(self, locales=None, force=True, verbose=False, save_stats=True, prune=False): source_lang = self.pod.podspec.default_locale locales = locales or self.pod.catalogs.list_locales() locales = self._cleanup_locales(locales) stats = [] num_files = len(locales) if not locales: self.pod.logger.info('No locales to upload.') return if not force: if (self.has_immutable_translation_resources and self.pod.file_exists(Translator.TRANSLATOR_STATS_PATH)): text = 'Found existing translator data in: {}' self.pod.logger.info(text.format( Translator.TRANSLATOR_STATS_PATH)) text = 'This will be updated with new data after the upload is complete.' self.pod.logger.info(text) text = 'Proceed to upload {} translation catalogs?' text = text.format(num_files) if not utils.interactive_confirm(text): self.pod.logger.info('Aborted.') return if self.has_multiple_langs_in_one_resource: catalogs_to_upload = [] for locale in locales: catalog_to_upload = self.pod.catalogs.get(locale) if catalog_to_upload: catalogs_to_upload.append(catalog_to_upload) stats = self._upload_catalogs(catalogs_to_upload, source_lang, prune=prune) else: text = 'Uploading translations: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(num_files))] bar = progressbar_non.create_progressbar( "Uploading translations...", widgets=widgets, max_value=num_files) bar.start() threads = [] def _do_upload(locale): catalog = self.pod.catalogs.get(locale) stat = self._upload_catalog(catalog, source_lang, prune=prune) stats.append(stat) for i, locale in enumerate(locales): thread = utils.ProgressBarThread( bar, True, target=_do_upload, args=(locale,)) threads.append(thread) thread.start() # Perform the first operation synchronously to avoid oauth2 refresh # locking issues. if i == 0: thread.join() for i, thread in enumerate(threads): if i > 0: thread.join() bar.finish() stats = sorted(stats, key=lambda stat: stat.lang) if verbose: self.pretty_print_stats(stats) if save_stats: self.save_stats(stats) return stats
def download(self, locales, save_stats=True, inject=False, include_obsolete=False): # TODO: Rename to `download_and_import`. if not self.pod.file_exists(Translator.TRANSLATOR_STATS_PATH): text = 'File {} not found. Nothing to download.' self.pod.logger.info(text.format(Translator.TRANSLATOR_STATS_PATH)) return stats_to_download = self._get_stats_to_download(locales) if not stats_to_download: return num_files = len(stats_to_download) text = 'Downloading translations: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(num_files))] if not inject: bar = progressbar_non.create_progressbar( "Downloading translations...", widgets=widgets, max_value=num_files) bar.start() threads = [] langs_to_translations = {} new_stats = [] def _do_download(lang, stat): try: new_stat, content = self._download_content(stat) except translator_errors.NotFoundError: text = 'No translations to download for: {}' self.pod.logger.info(text.format(lang)) return new_stat.uploaded = stat.uploaded # Preserve uploaded field. langs_to_translations[lang] = content new_stats.append(new_stat) for i, (lang, stat) in enumerate(stats_to_download.iteritems()): if inject: thread = threading.Thread( target=_do_download, args=(lang, stat)) else: thread = utils.ProgressBarThread( bar, True, target=_do_download, args=(lang, stat)) threads.append(thread) thread.start() # Perform the first operation synchronously to avoid oauth2 refresh # locking issues. if i == 0: thread.join() for i, thread in enumerate(threads): if i > 0: thread.join() if not inject: bar.finish() has_changed_content = False for lang, translations in langs_to_translations.iteritems(): if inject: if self.pod.catalogs.inject_translations(locale=lang, content=translations): has_changed_content = True elif self.pod.catalogs.import_translations( locale=lang, content=translations, include_obsolete=include_obsolete): has_changed_content = True if save_stats and has_changed_content: self.save_stats(new_stats) return new_stats
def upload(self, locales=None, force=True, verbose=False, save_stats=True, prune=False): source_lang = self.pod.podspec.default_locale locales = locales or self.pod.catalogs.list_locales() locales = self._cleanup_locales(locales) stats = [] num_files = len(locales) if not locales: self.pod.logger.info('No locales to upload.') return if not force: if (self.has_immutable_translation_resources and self.pod.file_exists(Translator.TRANSLATOR_STATS_PATH)): text = 'Found existing translator data in: {}' self.pod.logger.info( text.format(Translator.TRANSLATOR_STATS_PATH)) text = 'This will be updated with new data after the upload is complete.' self.pod.logger.info(text) text = 'Proceed to upload {} translation catalogs?' text = text.format(num_files) if not utils.interactive_confirm(text): self.pod.logger.info('Aborted.') return if self.has_multiple_langs_in_one_resource: catalogs_to_upload = [] for locale in locales: catalog_to_upload = self.pod.catalogs.get(locale) if catalog_to_upload: catalogs_to_upload.append(catalog_to_upload) stats = self._upload_catalogs(catalogs_to_upload, source_lang, prune=prune) else: text = 'Uploading translations: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(num_files))] bar = progressbar_non.create_progressbar( "Uploading translations...", widgets=widgets, max_value=num_files) bar.start() threads = [] def _do_upload(locale): catalog = self.pod.catalogs.get(locale) stat = self._upload_catalog(catalog, source_lang, prune=prune) stats.append(stat) for i, locale in enumerate(locales): thread = utils.ProgressBarThread(bar, True, target=_do_upload, args=(locale, )) threads.append(thread) thread.start() # Perform the first operation synchronously to avoid oauth2 refresh # locking issues. if i == 0: thread.join() for i, thread in enumerate(threads): if i > 0: thread.join() bar.finish() stats = sorted(stats, key=lambda stat: stat.lang) if verbose: self.pretty_print_stats(stats) if save_stats: self.save_stats(stats) return stats
def download(self, locales, save_stats=True, inject=False, include_obsolete=False): # TODO: Rename to `download_and_import`. if not self.pod.file_exists(Translator.TRANSLATOR_STATS_PATH): text = 'File {} not found. Nothing to download.' self.pod.logger.info(text.format(Translator.TRANSLATOR_STATS_PATH)) return stats_to_download = self._get_stats_to_download(locales) if not stats_to_download: return num_files = len(stats_to_download) text = 'Downloading translations: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(num_files))] if not inject: bar = progressbar_non.create_progressbar( "Downloading translations...", widgets=widgets, max_value=num_files) bar.start() threads = [] langs_to_translations = {} new_stats = [] def _do_download(lang, stat): try: new_stat, content = self._download_content(stat) except translator_errors.NotFoundError: text = 'No translations to download for: {}' self.pod.logger.info(text.format(lang)) return new_stat.uploaded = stat.uploaded # Preserve uploaded field. langs_to_translations[lang] = content new_stats.append(new_stat) for i, (lang, stat) in enumerate(stats_to_download.iteritems()): if inject: thread = threading.Thread(target=_do_download, args=(lang, stat)) else: thread = utils.ProgressBarThread(bar, True, target=_do_download, args=(lang, stat)) threads.append(thread) thread.start() # Perform the first operation synchronously to avoid oauth2 refresh # locking issues. if i == 0: thread.join() for i, thread in enumerate(threads): if i > 0: thread.join() if not inject: bar.finish() has_changed_content = False for lang, translations in langs_to_translations.iteritems(): if inject: if self.pod.catalogs.inject_translations(locale=lang, content=translations): has_changed_content = True elif self.pod.catalogs.import_translations( locale=lang, content=translations, include_obsolete=include_obsolete): has_changed_content = True if save_stats and has_changed_content: self.save_stats(new_stats) return new_stats
def download(self, locales, save_stats=True, include_obsolete=False): """Override base download to remove the threading and simplify.""" if not self.pod.file_exists(base.Translator.TRANSLATOR_STATS_PATH): text = 'File {} not found. Nothing to download.' self.pod.logger.info(text.format(base.Translator.TRANSLATOR_STATS_PATH)) return stats_to_download = self._get_stats_to_download(locales) if not stats_to_download: return num_files = len(stats_to_download) text = 'Downloading translations: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(num_files))] bar = progressbar_non.create_progressbar( "Downloading translations...", widgets=widgets, max_value=num_files) bar.start() spreadsheet_id_to_locales = {} for locale in stats_to_download: stat = stats_to_download[locale] if stat.ident not in spreadsheet_id_to_locales: spreadsheet_id_to_locales[stat.ident] = set() spreadsheet_id_to_locales[stat.ident].add(locale) langs_to_translations = {} new_stats = [] # Batch download for each sheet id. for spreadsheet_id in spreadsheet_id_to_locales: locales = spreadsheet_id_to_locales[spreadsheet_id] locale_to_values = self._download_sheets(spreadsheet_id, locales) for i, (lang, stat) in enumerate(stats_to_download.items()): if lang not in locale_to_values: continue new_stat, content = self._download_content(stat, locale_to_values[lang]) bar.update(bar.value + 1) new_stat.uploaded = stat.uploaded # Preserve uploaded field. langs_to_translations[lang] = content new_stats.append(new_stat) bar.finish() text = 'Importing translations: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(num_files))] bar = progressbar_non.create_progressbar( "Importing translations...", widgets=widgets, max_value=num_files) bar.start() has_changed_content = False unchanged_locales = [] changed_locales = {} for lang, translations in langs_to_translations.items(): has_changed_content, imported_translations, total_translations = self.pod.catalogs.import_translations( locale=lang, content=translations, include_obsolete=include_obsolete) bar.update(bar.value + 1) if imported_translations == 0: unchanged_locales.append(lang) else: changed_locales[lang] = { 'imported': imported_translations, 'total': total_translations, } if has_changed_content: has_changed_content = True bar.finish() if save_stats and has_changed_content: self.save_stats(new_stats) self._log_catalog_changes(unchanged_locales, changed_locales) return new_stats
def rendered_docs(pod, routes): """Generate the rendered documents for the given routes.""" cont_generator = Renderer.controller_generator(pod, routes) # Turn off the pooling until it becomes faster than not pooling. # pylint: disable=redefined-outer-name, invalid-name ThreadPool = None with pod.profile.timer('renderer.Renderer.render_docs'): # Preload the render_pool before attempting to use. _ = pod.render_pool def render_func(args): """Render the content.""" controller = args['controller'] try: return controller.render(jinja_env=args['jinja_env']) # pylint: disable=broad-except except Exception as err: _, _, err_tb = sys.exc_info() return RenderError( "Error rendering {}".format(controller.serving_path), err, err_tb) routes_len = len(routes) text = 'Building: %(value)d/{} (in %(time_elapsed).9s)' widgets = [progressbar.FormatLabel(text.format(routes_len))] progress = progressbar_non.create_progressbar( "Building pod...", widgets=widgets, max_value=routes_len) progress.start() rendered_docs = [] if not ThreadPool: for controller in cont_generator: jinja_env = pod.render_pool.get_jinja_env( controller.doc.locale) if controller.use_jinja else None rendered_docs.append(render_func({ 'controller': controller, 'jinja_env': jinja_env, })) progress.update(progress.value + 1) progress.finish() return rendered_docs pod.render_pool.pool_size = Renderer.POOL_SIZE # pylint: disable=not-callable thread_pool = ThreadPool(Renderer.POOL_SIZE) threaded_args = [] for controller in cont_generator: jinja_env = pod.render_pool.get_jinja_env( controller.doc.locale) if controller.use_jinja else None threaded_args.append({ 'controller': controller, 'jinja_env': jinja_env, }) results = thread_pool.imap_unordered(render_func, threaded_args) render_errors = [] for result in results: if isinstance(result, Exception): render_errors.append(result) else: pod.profile.add_timer(result.render_timer) progress.update(progress.value + 1) thread_pool.close() thread_pool.join() progress.finish() if render_errors: for error in render_errors: print error.message print error.err.message traceback.print_tb(error.err_tb) print '' text = 'There were {} errors during rendering.' raise RenderErrors(text.format( len(render_errors)), render_errors) return rendered_docs