def initialize(self): # Create the app local to this worker. app = PieCrust(self.ctx.root_dir, debug=self.ctx.debug) app._useSubCacheDir(self.ctx.sub_cache_dir) app.config.set('baker/is_baking', True) app.config.set('baker/worker_id', self.wid) app.env.base_asset_url_format = '%uri%' app.env.fs_cache_only_for_main_page = True app.env.registerTimer("BakeWorker_%d_Total" % self.wid) app.env.registerTimer("BakeWorkerInit") app.env.registerTimer("JobReceive") apply_variant_and_values(app, self.ctx.config_variant, self.ctx.config_values) self.ctx.app = app # Load previous record if self.ctx.previous_record_path: self.ctx.previous_record = BakeRecord.load( self.ctx.previous_record_path) self.ctx.previous_record_index = {} for e in self.ctx.previous_record.entries: key = _get_transition_key(e.path, e.taxonomy_info) self.ctx.previous_record_index[key] = e # Create the job handlers. job_handlers = { JOB_LOAD: LoadJobHandler(self.ctx), JOB_RENDER_FIRST: RenderFirstSubJobHandler(self.ctx), JOB_BAKE: BakeJobHandler(self.ctx)} for jt, jh in job_handlers.items(): app.env.registerTimer(type(jh).__name__) self.job_handlers = job_handlers app.env.stepTimerSince("BakeWorkerInit", self.work_start_time)
def get_app_for_server(root_dir, debug=False, sub_cache_dir=None): app = PieCrust(root_dir=root_dir, debug=debug) if sub_cache_dir: app._useSubCacheDir(sub_cache_dir) app.config.set('site/root', '/') app.config.set('server/is_serving', True) return app
def initialize(self): # Create the app local to this worker. app = PieCrust(self.ctx.root_dir, debug=self.ctx.debug) app._useSubCacheDir(self.ctx.sub_cache_dir) app.config.set('baker/is_baking', True) app.env.base_asset_url_format = '%uri%' app.env.fs_cache_only_for_main_page = True app.env.registerTimer("BakeWorker_%d_Total" % self.wid) app.env.registerTimer("BakeWorkerInit") app.env.registerTimer("JobReceive") apply_variant_and_values(app, self.ctx.config_variant, self.ctx.config_values) self.ctx.app = app # Load previous record if self.ctx.previous_record_path: self.ctx.previous_record = BakeRecord.load( self.ctx.previous_record_path) self.ctx.previous_record_index = {} for e in self.ctx.previous_record.entries: key = _get_transition_key(e.path, e.taxonomy_info) self.ctx.previous_record_index[key] = e # Create the job handlers. job_handlers = { JOB_LOAD: LoadJobHandler(self.ctx), JOB_RENDER_FIRST: RenderFirstSubJobHandler(self.ctx), JOB_BAKE: BakeJobHandler(self.ctx)} for jt, jh in job_handlers.items(): app.env.registerTimer(type(jh).__name__) self.job_handlers = job_handlers app.env.stepTimerSince("BakeWorkerInit", self.work_start_time)
def get_app_for_server(root_dir, debug=False, sub_cache_dir=None, root_url='/'): app = PieCrust(root_dir=root_dir, debug=debug) if sub_cache_dir: app._useSubCacheDir(sub_cache_dir) app.config.set('site/root', root_url) app.config.set('server/is_serving', True) return app
def _try_run_request(self, environ, start_response): request = Request(environ) # We don't support anything else than GET requests since we're # previewing something that will be static later. if self.static_preview and request.method != 'GET': logger.error("Only GET requests are allowed, got %s" % request.method) raise MethodNotAllowed() # Handle special requests right away. response = self._try_special_request(environ, request) if response is not None: return response(environ, start_response) # Also handle requests to a pipeline-built asset right away. response = self._try_serve_asset(environ, request) if response is not None: return response(environ, start_response) # Create the app for this request. app = PieCrust(root_dir=self.root_dir, debug=self.debug) if self.sub_cache_dir: app._useSubCacheDir(self.sub_cache_dir) app.config.set('site/root', '/') app.config.set('server/is_serving', True) if (app.config.get('site/enable_debug_info') and self.enable_debug_info and '!debug' in request.args): app.config.set('site/show_debug_info', True) # We'll serve page assets directly from where they are. app.env.base_asset_url_format = '/_asset/%path%' # Let's see if it can be a page asset. response = self._try_serve_page_asset(app, environ, request) if response is not None: return response(environ, start_response) # Nope. Let's see if it's an actual page. try: response = self._try_serve_page(app, environ, request) return response(environ, start_response) except (RouteNotFoundError, SourceNotFoundError) as ex: raise NotFound(str(ex)) from ex except HTTPException: raise except Exception as ex: if app.debug: logger.exception(ex) raise msg = str(ex) logger.error(msg) raise InternalServerError(msg) from ex
def _try_run_request(self, environ, start_response): request = Request(environ) # We don't support anything else than GET requests since we're # previewing something that will be static later. if self.static_preview and request.method != 'GET': logger.error("Only GET requests are allowed, got %s" % request.method) raise MethodNotAllowed() # Handle special requests right away. response = self._try_special_request(environ, request) if response is not None: return response(environ, start_response) # Also handle requests to a pipeline-built asset right away. response = self._try_serve_asset(environ, request) if response is not None: return response(environ, start_response) # Create the app for this request. app = PieCrust(root_dir=self.root_dir, debug=self.debug) app._useSubCacheDir(self.sub_cache_dir) app.config.set('site/root', '/') app.config.set('server/is_serving', True) if (app.config.get('site/enable_debug_info') and self.enable_debug_info and '!debug' in request.args): app.config.set('site/show_debug_info', True) # We'll serve page assets directly from where they are. app.env.base_asset_url_format = '/_asset/%path%' # Let's see if it can be a page asset. response = self._try_serve_page_asset(app, environ, request) if response is not None: return response(environ, start_response) # Nope. Let's see if it's an actual page. try: response = self._try_serve_page(app, environ, request) return response(environ, start_response) except (RouteNotFoundError, SourceNotFoundError) as ex: raise NotFound(str(ex)) from ex except HTTPException: raise except Exception as ex: if app.debug: logger.exception(ex) raise msg = str(ex) logger.error(msg) raise InternalServerError(msg) from ex
def _get_location_response(source_name, content_item): from piecrust.app import PieCrust pcapp = PieCrust(g.site.root_dir) source = pcapp.getSource(source_name) page = Page(source, content_item) uri = page.getUri() logger.debug("Redirecting to: %s" % uri) r = Response() r.status_code = 201 r.headers.add('Location', uri) return r
def test_default_source_resolve_ref(ref_path, expected_path, expected_metadata): fs = mock_fs() fs.withConfig({ 'site': { 'sources': { 'test': {}}, 'routes': [ {'url': '/%path%', 'source': 'test'}] } }) expected_path = fs.path(expected_path).replace('/', os.sep) with mock_fs_scope(fs): app = PieCrust(fs.path('kitchen'), cache=False) s = app.getSource('test') actual_path, actual_metadata = s.resolveRef(ref_path) assert actual_path == expected_path assert actual_metadata == expected_metadata
def test_default_source_factories(fs, expected_paths, expected_slugs): fs.withConfig({ 'site': { 'sources': { 'test': {}}, 'routes': [ {'url': '/%path%', 'source': 'test'}] } }) fs.withDir('kitchen/test') with mock_fs_scope(fs): app = PieCrust(fs.path('kitchen'), cache=False) s = app.getSource('test') facs = list(s.buildPageFactories()) paths = [f.rel_path for f in facs] assert paths == expected_paths slugs = [f.metadata['slug'] for f in facs] assert slugs == expected_slugs
def test_default_source_factories(fs, expected_paths, expected_slugs): fs.withConfig({ 'site': { 'sources': { 'test': {} }, 'routes': [{ 'url': '/%path%', 'source': 'test' }] } }) fs.withDir('kitchen/test') with mock_fs_scope(fs): app = PieCrust(fs.path('kitchen'), cache=False) s = app.getSource('test') facs = list(s.buildPageFactories()) paths = [f.rel_path for f in facs] assert paths == expected_paths slugs = [f.metadata['slug'] for f in facs] assert slugs == expected_slugs
def test_default_source_resolve_ref(ref_path, expected_path, expected_metadata): fs = mock_fs() fs.withConfig({ 'site': { 'sources': { 'test': {} }, 'routes': [{ 'url': '/%path%', 'source': 'test' }] } }) expected_path = fs.path(expected_path).replace('/', os.sep) with mock_fs_scope(fs): app = PieCrust(fs.path('kitchen'), cache=False) s = app.getSource('test') actual_path, actual_metadata = s.resolveRef(ref_path) assert actual_path == expected_path assert actual_metadata == expected_metadata
def _initPipeline(self): # Create the app and pipeline. self.app = PieCrust(root_dir=self.root_dir, debug=self.debug) if self.sub_cache_dir: self.app._useSubCacheDir(self.sub_cache_dir) self.pipeline = ProcessorPipeline(self.app, self.out_dir) # Get the list of assets directories. self._roots = list(self.pipeline.mounts.keys()) # The 'assets' folder may not be in the mounts list if it doesn't # exist yet, but we want to monitor for when the user creates it. default_root = os.path.join(self.app.root_dir, 'assets') self._monitor_assets_root = (default_root not in self._roots) # Build the list of initial asset files. self._paths = set() for root in self._roots: for dirpath, dirnames, filenames in os.walk(root): self._paths |= set([os.path.join(dirpath, f) for f in filenames])
def getWsgiApp(self): # Bake all the assets so we know what we have, and so we can serve # them to the client. We need a temp app for this. app = PieCrust(root_dir=self.root_dir, debug=self.debug) app._useSubCacheDir(self.sub_cache_dir) self._out_dir = os.path.join(app.sub_cache_dir, 'server') self._page_record = ServeRecord() if not self.run_sse_check or self.run_sse_check(): # When using a server with code reloading, some implementations # use process forking and we end up going here twice. We only want # to start the pipeline loop in the inner process most of the # time so we let the implementation tell us if this is OK. from piecrust.processing.base import ProcessorPipeline from piecrust.serving.procloop import ProcessingLoop pipeline = ProcessorPipeline(app, self._out_dir) self._proc_loop = ProcessingLoop(pipeline) self._proc_loop.start() # Run the WSGI app. wsgi_wrapper = WsgiServerWrapper(self) return wsgi_wrapper
def getWsgiApp(self): # Bake all the assets so we know what we have, and so we can serve # them to the client. We need a temp app for this. app = PieCrust(root_dir=self.root_dir, debug=self.debug) if self.sub_cache_dir: app._useSubCacheDir(self.sub_cache_dir) self._out_dir = os.path.join(app.sub_cache_dir, 'server') if not self.run_sse_check or self.run_sse_check(): # When using a server with code reloading, some implementations # use process forking and we end up going here twice. We only want # to start the pipeline loop in the inner process most of the # time so we let the implementation tell us if this is OK. from piecrust.processing.pipeline import ProcessorPipeline from piecrust.serving.procloop import ProcessingLoop pipeline = ProcessorPipeline(app, self._out_dir) self._proc_loop = ProcessingLoop(pipeline) self._proc_loop.start() # Run the WSGI app. wsgi_wrapper = WsgiServerWrapper(self) return wsgi_wrapper
def _initPipeline(self): # Create the app and pipeline. self.app = PieCrust(root_dir=self.root_dir, debug=self.debug) if self.sub_cache_dir: self.app._useSubCacheDir(self.sub_cache_dir) self.pipeline = ProcessorPipeline(self.app, self.out_dir) # Get the list of assets directories. self._roots = list(self.pipeline.mounts.keys()) # The 'assets' folder may not be in the mounts list if it doesn't # exist yet, but we want to monitor for when the user creates it. default_root = os.path.join(self.app.root_dir, "assets") self._monitor_assets_root = default_root not in self._roots # Build the list of initial asset files. self._paths = set() for root in self._roots: for dirpath, dirnames, filenames in os.walk(root): self._paths |= set([os.path.join(dirpath, f) for f in filenames])
def initialize(self): # Create the app local to this worker. app = PieCrust(self.ctx.root_dir, debug=self.ctx.debug) app.env.registerTimer("PipelineWorker_%d_Total" % self.wid) app.env.registerTimer("PipelineWorkerInit") app.env.registerTimer("JobReceive") app.env.registerTimer('BuildProcessingTree') app.env.registerTimer('RunProcessingTree') self.app = app processors = app.plugin_loader.getProcessors() if self.ctx.enabled_processors: logger.debug("Filtering processors to: %s" % self.ctx.enabled_processors) processors = get_filtered_processors(processors, self.ctx.enabled_processors) if self.ctx.additional_processors: logger.debug("Adding %s additional processors." % len(self.ctx.additional_processors)) for proc in self.ctx.additional_processors: app.env.registerTimer(proc.__class__.__name__) proc.initialize(app) processors.append(proc) self.processors = processors # Invoke pre-processors. pipeline_ctx = PipelineContext(self.wid, self.app, self.ctx.out_dir, self.ctx.tmp_dir, self.ctx.force) for proc in processors: proc.onPipelineStart(pipeline_ctx) # Sort our processors again in case the pre-process step involved # patching the processors with some new ones. processors.sort(key=lambda p: p.priority) app.env.stepTimerSince("PipelineWorkerInit", self.work_start_time)
def getApp(self, *, cache=True, theme_site=False): root_dir = self.path('/kitchen') return PieCrust(root_dir, cache=cache, debug=True, theme_site=theme_site)
class ProcessingLoop(threading.Thread): def __init__(self, root_dir, out_dir, sub_cache_dir=None, debug=False): super(ProcessingLoop, self).__init__(name="pipeline-reloader", daemon=True) self.root_dir = root_dir self.out_dir = out_dir self.sub_cache_dir = sub_cache_dir self.debug = debug self.last_status_id = 0 self.interval = 1 self.app = None self._roots = [] self._monitor_assets_root = False self._paths = set() self._config_path = os.path.join(root_dir, "config.yml") self._record = None self._last_bake = 0 self._last_config_mtime = 0 self._obs = [] self._obs_lock = threading.Lock() def addObserver(self, obs): with self._obs_lock: self._obs.append(obs) def removeObserver(self, obs): with self._obs_lock: self._obs.remove(obs) def run(self): self._initPipeline() self._last_bake = time.time() self._last_config_mtime = os.path.getmtime(self._config_path) self._record = self.pipeline.run() while True: cur_config_time = os.path.getmtime(self._config_path) if self._last_config_mtime < cur_config_time: logger.info("Site configuration changed, reloading pipeline.") self._last_config_mtime = cur_config_time self._initPipeline() for root in self._roots: self._runPipeline(root) continue if self._monitor_assets_root: assets_dir = os.path.join(self.app.root_dir, "assets") if os.path.isdir(assets_dir): logger.info("Assets directory was created, reloading " "pipeline.") self._initPipeline() self._runPipeline(assets_dir) continue for root in self._roots: # For each mount root we try to find the first new or # modified file. If any, we just run the pipeline on # that mount. found_new_or_modified = False for dirpath, dirnames, filenames in os.walk(root): for filename in filenames: path = os.path.join(dirpath, filename) if path not in self._paths: logger.debug("Found new asset: %s" % path) self._paths.add(path) found_new_or_modified = True break if os.path.getmtime(path) > self._last_bake: logger.debug("Found modified asset: %s" % path) found_new_or_modified = True break if found_new_or_modified: break if found_new_or_modified: self._runPipeline(root) time.sleep(self.interval) def _initPipeline(self): # Create the app and pipeline. self.app = PieCrust(root_dir=self.root_dir, debug=self.debug) if self.sub_cache_dir: self.app._useSubCacheDir(self.sub_cache_dir) self.pipeline = ProcessorPipeline(self.app, self.out_dir) # Get the list of assets directories. self._roots = list(self.pipeline.mounts.keys()) # The 'assets' folder may not be in the mounts list if it doesn't # exist yet, but we want to monitor for when the user creates it. default_root = os.path.join(self.app.root_dir, "assets") self._monitor_assets_root = default_root not in self._roots # Build the list of initial asset files. self._paths = set() for root in self._roots: for dirpath, dirnames, filenames in os.walk(root): self._paths |= set([os.path.join(dirpath, f) for f in filenames]) def _runPipeline(self, root): self._last_bake = time.time() try: self._record = self.pipeline.run(root, previous_record=self._record, save_record=False) status_id = self.last_status_id + 1 self.last_status_id += 1 if self._record.success: changed = filter(lambda i: not i.was_collapsed_from_last_run, self._record.entries) changed = itertools.chain.from_iterable(map(lambda i: i.rel_outputs, changed)) changed = list(changed) item = {"id": status_id, "type": "pipeline_success", "assets": changed} self._notifyObservers(item) else: item = {"id": status_id, "type": "pipeline_error", "assets": []} for entry in self._record.entries: if entry.errors: asset_item = {"path": entry.path, "errors": list(entry.errors)} item["assets"].append(asset_item) self._notifyObservers(item) except Exception as ex: logger.exception(ex) def _notifyObservers(self, item): with self._obs_lock: observers = list(self._obs) for obs in observers: obs.addBuildEvent(item)
class ProcessingLoop(threading.Thread): def __init__(self, root_dir, out_dir, sub_cache_dir=None, debug=False): super(ProcessingLoop, self).__init__( name='pipeline-reloader', daemon=True) self.root_dir = root_dir self.out_dir = out_dir self.sub_cache_dir = sub_cache_dir self.debug = debug self.last_status_id = 0 self.interval = 1 self.app = None self._roots = [] self._monitor_assets_root = False self._paths = set() self._config_path = os.path.join(root_dir, 'config.yml') self._record = None self._last_bake = 0 self._last_config_mtime = 0 self._obs = [] self._obs_lock = threading.Lock() def addObserver(self, obs): with self._obs_lock: self._obs.append(obs) def removeObserver(self, obs): with self._obs_lock: self._obs.remove(obs) def run(self): self._initPipeline() self._last_bake = time.time() self._last_config_mtime = os.path.getmtime(self._config_path) self._record = self.pipeline.run() while True: cur_config_time = os.path.getmtime(self._config_path) if self._last_config_mtime < cur_config_time: logger.info("Site configuration changed, reloading pipeline.") self._last_config_mtime = cur_config_time self._initPipeline() for root in self._roots: self._runPipeline(root) continue if self._monitor_assets_root: assets_dir = os.path.join(self.app.root_dir, 'assets') if os.path.isdir(assets_dir): logger.info("Assets directory was created, reloading " "pipeline.") self._initPipeline() self._runPipeline(assets_dir) continue for root in self._roots: # For each mount root we try to find the first new or # modified file. If any, we just run the pipeline on # that mount. found_new_or_modified = False for dirpath, dirnames, filenames in os.walk(root): for filename in filenames: path = os.path.join(dirpath, filename) if path not in self._paths: logger.debug("Found new asset: %s" % path) self._paths.add(path) found_new_or_modified = True break if os.path.getmtime(path) > self._last_bake: logger.debug("Found modified asset: %s" % path) found_new_or_modified = True break if found_new_or_modified: break if found_new_or_modified: self._runPipeline(root) time.sleep(self.interval) def _initPipeline(self): # Create the app and pipeline. self.app = PieCrust(root_dir=self.root_dir, debug=self.debug) if self.sub_cache_dir: self.app._useSubCacheDir(self.sub_cache_dir) self.pipeline = ProcessorPipeline(self.app, self.out_dir) # Get the list of assets directories. self._roots = list(self.pipeline.mounts.keys()) # The 'assets' folder may not be in the mounts list if it doesn't # exist yet, but we want to monitor for when the user creates it. default_root = os.path.join(self.app.root_dir, 'assets') self._monitor_assets_root = (default_root not in self._roots) # Build the list of initial asset files. self._paths = set() for root in self._roots: for dirpath, dirnames, filenames in os.walk(root): self._paths |= set([os.path.join(dirpath, f) for f in filenames]) def _runPipeline(self, root): self._last_bake = time.time() try: self._record = self.pipeline.run( root, previous_record=self._record, save_record=False) status_id = self.last_status_id + 1 self.last_status_id += 1 if self._record.success: changed = filter( lambda i: not i.was_collapsed_from_last_run, self._record.entries) changed = itertools.chain.from_iterable( map(lambda i: i.rel_outputs, changed)) changed = list(changed) item = { 'id': status_id, 'type': 'pipeline_success', 'assets': changed} self._notifyObservers(item) else: item = { 'id': status_id, 'type': 'pipeline_error', 'assets': []} for entry in self._record.entries: if entry.errors: asset_item = { 'path': entry.path, 'errors': list(entry.errors)} item['assets'].append(asset_item) self._notifyObservers(item) except Exception as ex: logger.exception(ex) def _notifyObservers(self, item): with self._obs_lock: observers = list(self._obs) for obs in observers: obs.addBuildEvent(item)
def getApp(self, cache=True): root_dir = self.path('/kitchen') return PieCrust(root_dir, cache=cache, debug=True)
def _run_chef(pre_args): # Setup the app. start_time = time.clock() root = pre_args.root if root is None: try: root = find_app_root() except SiteNotFoundError: root = None if not root: app = NullPieCrust() else: app = PieCrust(root, cache=pre_args.cache, debug=pre_args.debug) # Build a hash for a custom cache directory. cache_key = 'default' # Handle a configuration variant. if pre_args.config_variant is not None: if not root: raise SiteNotFoundError("Can't apply any variant.") app.config.applyVariant('variants/' + pre_args.config_variant) cache_key += ',variant=%s' % pre_args.config_variant for name, value in pre_args.config_values: logger.debug("Setting configuration '%s' to: %s" % (name, value)) app.config.set(name, value) cache_key += ',%s=%s' % (name, value) # Setup the arg parser. parser = argparse.ArgumentParser( prog='chef', description="The PieCrust chef manages your website.", formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--version', action='version', version=('%(prog)s ' + APP_VERSION)) parser.add_argument('--root', help="The root directory of the website.") parser.add_argument( '--config', help="The configuration variant to use for this command.") parser.add_argument('--config-set', help="Sets a specific site configuration setting.") parser.add_argument('--debug', help="Show debug information.", action='store_true') parser.add_argument('--no-cache', help="When applicable, disable caching.", action='store_true') parser.add_argument('--quiet', help="Print only important information.", action='store_true') parser.add_argument('--log', help="Send log messages to the specified file.") parser.add_argument('--log-debug', help="Log debug messages to the log file.", action='store_true') commands = sorted(app.plugin_loader.getCommands(), key=lambda c: c.name) subparsers = parser.add_subparsers(title='list of commands') for c in commands: p = subparsers.add_parser(c.name, help=c.description) c.setupParser(p, app) p.set_defaults(func=c.checkedRun) p.set_defaults(cache_name=c.cache_name) help_cmd = next(filter(lambda c: c.name == 'help', commands), None) if help_cmd and help_cmd.has_topics: with io.StringIO() as epilog: epilog.write("additional help topics:\n") for name, desc in help_cmd.getTopics(): print_help_item(epilog, name, desc) parser.epilog = epilog.getvalue() # Parse the command line. result = parser.parse_args() logger.debug( format_timed(start_time, 'initialized PieCrust', colored=False)) # Print the help if no command was specified. if not hasattr(result, 'func'): parser.print_help() return 0 # Use a customized cache for the command and current config. if result.cache_name != 'default' or cache_key != 'default': app.useSubCache(result.cache_name, cache_key) # Run the command! ctx = CommandContext(app, parser, result) exit_code = result.func(ctx) if exit_code is None: return 0 if not isinstance(exit_code, int): logger.error("Got non-integer exit code: %s" % exit_code) return -1 return exit_code