def listen(server, port, output, excqueue=None): # set logging level to at least "INFO" (so we can see the server requests) if logger.level < logging.INFO: logger.setLevel(logging.INFO) RootedHTTPServer.allow_reuse_address = True try: httpd = RootedHTTPServer(output, (server, port), ComplexHTTPRequestHandler) except OSError as e: logging.error("Could not listen on port %s, server %s.", port, server) if excqueue is not None: excqueue.put(traceback.format_exception_only(type(e), e)[-1]) return try: console.print( "Serving site at: http://{}:{} - Tap CTRL-C to stop".format( server, port)) httpd.serve_forever() except Exception as e: if excqueue is not None: excqueue.put(traceback.format_exception_only(type(e), e)[-1]) return except KeyboardInterrupt: httpd.socket.close() if excqueue is not None: return raise
def autoreload(args, excqueue=None): console.print(' --- AutoReload Mode: Monitoring `content`, `theme` and' ' `settings` for changes. ---') pelican, settings = get_instance(args) watcher = FileSystemWatcher(args.settings, Readers, settings) sleep = False while True: try: # Don't sleep first time, but sleep afterwards to reduce cpu load if sleep: time.sleep(0.5) else: sleep = True modified = watcher.check() if modified['settings']: pelican, settings = get_instance(args) watcher.update_watchers(settings) if any(modified.values()): console.print('\n-> Modified: {}. re-generating...'.format( ', '.join(k for k, v in modified.items() if v))) pelican.run() except KeyboardInterrupt: if excqueue is not None: excqueue.put(None) return raise except Exception as e: if (args.verbosity == logging.DEBUG): if excqueue is not None: excqueue.put( traceback.format_exception_only(type(e), e)[-1]) else: raise logger.warning('Caught exception:\n"%s".', e, exc_info=settings.get('DEBUG', False))
def __call__(self, parser, namespace, values, option_string): init_logging(name=__name__) try: instance, settings = get_instance(namespace) except Exception as e: logger.critical("%s: %s", e.__class__.__name__, e) console.print_exception() sys.exit(getattr(e, 'exitcode', 1)) if values: # One or more arguments provided, so only print those settings for setting in values: if setting in settings: # Only add newline between setting name and value if dict if isinstance(settings[setting], (dict, tuple, list)): setting_format = '\n{}:\n{}' else: setting_format = '\n{}: {}' console.print( setting_format.format( setting, pprint.pformat(settings[setting]))) else: console.print( '\n{} is not a recognized setting.'.format(setting)) break else: # No argument was given to --print-settings, so print all settings console.print(settings) parser.exit()
def run(self): """Run the generators and return""" start_time = time.time() context = self.settings.copy() # Share these among all the generators and content objects # They map source paths to Content objects or None context['generated_content'] = {} context['static_links'] = set() context['static_content'] = {} context['localsiteurl'] = self.settings['SITEURL'] generators = [ cls( context=context, settings=self.settings, path=self.path, theme=self.theme, output_path=self.output_path, ) for cls in self._get_generator_classes() ] # Delete the output directory if (1) the appropriate setting is True # and (2) that directory is not the parent of the source directory if (self.delete_outputdir and os.path.commonpath( [os.path.realpath(self.output_path)]) != os.path.commonpath([ os.path.realpath(self.output_path), os.path.realpath(self.path) ])): clean_output_dir(self.output_path, self.output_retention) for p in generators: if hasattr(p, 'generate_context'): p.generate_context() for p in generators: if hasattr(p, 'refresh_metadata_intersite_links'): p.refresh_metadata_intersite_links() signals.all_generators_finalized.send(generators) writer = self._get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer) signals.finalized.send(self) articles_generator = next(g for g in generators if isinstance(g, ArticlesGenerator)) pages_generator = next(g for g in generators if isinstance(g, PagesGenerator)) pluralized_articles = maybe_pluralize( (len(articles_generator.articles) + len(articles_generator.translations)), 'article', 'articles') pluralized_drafts = maybe_pluralize( (len(articles_generator.drafts) + len(articles_generator.drafts_translations)), 'draft', 'drafts') pluralized_hidden_articles = maybe_pluralize( (len(articles_generator.hidden_articles) + len(articles_generator.hidden_translations)), 'hidden article', 'hidden articles') pluralized_pages = maybe_pluralize( (len(pages_generator.pages) + len(pages_generator.translations)), 'page', 'pages') pluralized_hidden_pages = maybe_pluralize( (len(pages_generator.hidden_pages) + len(pages_generator.hidden_translations)), 'hidden page', 'hidden pages') pluralized_draft_pages = maybe_pluralize( (len(pages_generator.draft_pages) + len(pages_generator.draft_translations)), 'draft page', 'draft pages') console.print( 'Done: Processed {}, {}, {}, {}, {} and {} in {:.2f} seconds.'. format(pluralized_articles, pluralized_drafts, pluralized_hidden_articles, pluralized_pages, pluralized_hidden_pages, pluralized_draft_pages, time.time() - start_time))