Esempio n. 1
0
 def _bakeAssets(self, ctx, out_dir):
     proc = ProcessorPipeline(
             ctx.app, out_dir,
             force=ctx.args.force)
     record = proc.run()
     _merge_timers(record.timers, ctx.timers)
     return record.success
Esempio n. 2
0
 def _bakeAssets(self, ctx, out_dir):
     proc = ProcessorPipeline(
             ctx.app, out_dir,
             force=ctx.args.force,
             applied_config_variant=ctx.config_variant,
             applied_config_values=ctx.config_values)
     record = proc.run()
     _merge_stats(record.stats, ctx.stats)
     return record.success
Esempio n. 3
0
    def _initPipeline(self):
        # Create the app and pipeline.
        self.app = PieCrust(root_dir=self.root_dir, debug=self.debug)
        if self.sub_cache_dir:
            self.app._useSubCacheDir(self.sub_cache_dir)
        self.pipeline = ProcessorPipeline(self.app, self.out_dir)

        # Get the list of assets directories.
        self._roots = list(self.pipeline.mounts.keys())

        # The 'assets' folder may not be in the mounts list if it doesn't
        # exist yet, but we want to monitor for when the user creates it.
        default_root = os.path.join(self.app.root_dir, 'assets')
        self._monitor_assets_root = (default_root not in self._roots)

        # Build the list of initial asset files.
        self._paths = set()
        for root in self._roots:
            for dirpath, dirnames, filenames in os.walk(root):
                self._paths |= set([os.path.join(dirpath, f)
                                    for f in filenames])
Esempio n. 4
0
    def runtest(self):
        fs = self._prepareMockFs()

        from piecrust.processing.pipeline import ProcessorPipeline
        with mock_fs_scope(fs):
            out_dir = fs.path('kitchen/_counter')
            app = fs.getApp()
            pipeline = ProcessorPipeline(app, out_dir)

            proc_names = self.spec.get('processors')
            if proc_names:
                pipeline.enabled_processors = proc_names

            record = pipeline.run()

            if not record.success:
                errors = []
                for e in record.entries:
                    errors += e.errors
                raise PipelineError(errors)

            check_expected_outputs(self.spec, fs, ExpectedPipelineOutputError)
Esempio n. 5
0
    def runtest(self):
        fs = self._prepareMockFs()

        from piecrust.processing.pipeline import ProcessorPipeline
        with mock_fs_scope(fs):
            out_dir = fs.path('kitchen/_counter')
            app = fs.getApp()
            pipeline = ProcessorPipeline(app, out_dir)

            proc_names = self.spec.get('processors')
            if proc_names:
                pipeline.enabled_processors = proc_names

            record = pipeline.run()

            if not record.success:
                errors = []
                for e in record.entries:
                    errors += e.errors
                raise PipelineError(errors)

            check_expected_outputs(self.spec, fs, ExpectedPipelineOutputError)
Esempio n. 6
0
    def _initPipeline(self):
        # Create the app and pipeline.
        self.app = self.appfactory.create()
        self.pipeline = ProcessorPipeline(self.app, self.out_dir)

        # Get the list of assets directories.
        self._roots = list(self.pipeline.mounts.keys())

        # The 'assets' folder may not be in the mounts list if it doesn't
        # exist yet, but we want to monitor for when the user creates it.
        default_root = os.path.join(self.app.root_dir, 'assets')
        self._monitor_assets_root = (default_root not in self._roots)

        # Build the list of initial asset files.
        self._paths = set()
        for root in self._roots:
            for dirpath, dirnames, filenames in os.walk(root):
                self._paths |= set([os.path.join(dirpath, f)
                                    for f in filenames])
Esempio n. 7
0
    def _initPipeline(self):
        # Create the app and pipeline.
        self.app = PieCrust(root_dir=self.root_dir, debug=self.debug)
        if self.sub_cache_dir:
            self.app._useSubCacheDir(self.sub_cache_dir)
        self.pipeline = ProcessorPipeline(self.app, self.out_dir)

        # Get the list of assets directories.
        self._roots = list(self.pipeline.mounts.keys())

        # The 'assets' folder may not be in the mounts list if it doesn't
        # exist yet, but we want to monitor for when the user creates it.
        default_root = os.path.join(self.app.root_dir, "assets")
        self._monitor_assets_root = default_root not in self._roots

        # Build the list of initial asset files.
        self._paths = set()
        for root in self._roots:
            for dirpath, dirnames, filenames in os.walk(root):
                self._paths |= set([os.path.join(dirpath, f) for f in filenames])
Esempio n. 8
0
class ProcessingLoop(threading.Thread):
    def __init__(self, root_dir, out_dir, sub_cache_dir=None, debug=False):
        super(ProcessingLoop, self).__init__(
                name='pipeline-reloader', daemon=True)
        self.root_dir = root_dir
        self.out_dir = out_dir
        self.sub_cache_dir = sub_cache_dir
        self.debug = debug
        self.last_status_id = 0
        self.interval = 1
        self.app = None
        self._roots = []
        self._monitor_assets_root = False
        self._paths = set()
        self._config_path = os.path.join(root_dir, 'config.yml')
        self._record = None
        self._last_bake = 0
        self._last_config_mtime = 0
        self._obs = []
        self._obs_lock = threading.Lock()

    def addObserver(self, obs):
        with self._obs_lock:
            self._obs.append(obs)

    def removeObserver(self, obs):
        with self._obs_lock:
            self._obs.remove(obs)

    def run(self):
        self._initPipeline()

        self._last_bake = time.time()
        self._last_config_mtime = os.path.getmtime(self._config_path)
        self._record = self.pipeline.run()

        while True:
            cur_config_time = os.path.getmtime(self._config_path)
            if self._last_config_mtime < cur_config_time:
                logger.info("Site configuration changed, reloading pipeline.")
                self._last_config_mtime = cur_config_time
                self._initPipeline()
                for root in self._roots:
                    self._runPipeline(root)
                continue

            if self._monitor_assets_root:
                assets_dir = os.path.join(self.app.root_dir, 'assets')
                if os.path.isdir(assets_dir):
                    logger.info("Assets directory was created, reloading "
                                "pipeline.")
                    self._initPipeline()
                    self._runPipeline(assets_dir)
                    continue

            for root in self._roots:
                # For each mount root we try to find the first new or
                # modified file. If any, we just run the pipeline on
                # that mount.
                found_new_or_modified = False
                for dirpath, dirnames, filenames in os.walk(root):
                    for filename in filenames:
                        path = os.path.join(dirpath, filename)
                        if path not in self._paths:
                            logger.debug("Found new asset: %s" % path)
                            self._paths.add(path)
                            found_new_or_modified = True
                            break
                        if os.path.getmtime(path) > self._last_bake:
                            logger.debug("Found modified asset: %s" % path)
                            found_new_or_modified = True
                            break

                    if found_new_or_modified:
                        break

                if found_new_or_modified:
                    self._runPipeline(root)

            time.sleep(self.interval)

    def _initPipeline(self):
        # Create the app and pipeline.
        self.app = PieCrust(root_dir=self.root_dir, debug=self.debug)
        if self.sub_cache_dir:
            self.app._useSubCacheDir(self.sub_cache_dir)
        self.pipeline = ProcessorPipeline(self.app, self.out_dir)

        # Get the list of assets directories.
        self._roots = list(self.pipeline.mounts.keys())

        # The 'assets' folder may not be in the mounts list if it doesn't
        # exist yet, but we want to monitor for when the user creates it.
        default_root = os.path.join(self.app.root_dir, 'assets')
        self._monitor_assets_root = (default_root not in self._roots)

        # Build the list of initial asset files.
        self._paths = set()
        for root in self._roots:
            for dirpath, dirnames, filenames in os.walk(root):
                self._paths |= set([os.path.join(dirpath, f)
                                    for f in filenames])

    def _runPipeline(self, root):
        self._last_bake = time.time()
        try:
            self._record = self.pipeline.run(
                    root,
                    previous_record=self._record,
                    save_record=False)

            status_id = self.last_status_id + 1
            self.last_status_id += 1

            if self._record.success:
                changed = filter(
                        lambda i: not i.was_collapsed_from_last_run,
                        self._record.entries)
                changed = itertools.chain.from_iterable(
                        map(lambda i: i.rel_outputs, changed))
                changed = list(changed)
                item = {
                        'id': status_id,
                        'type': 'pipeline_success',
                        'assets': changed}

                self._notifyObservers(item)
            else:
                item = {
                        'id': status_id,
                        'type': 'pipeline_error',
                        'assets': []}
                for entry in self._record.entries:
                    if entry.errors:
                        asset_item = {
                                'path': entry.path,
                                'errors': list(entry.errors)}
                        item['assets'].append(asset_item)

                self._notifyObservers(item)
        except Exception as ex:
            logger.exception(ex)

    def _notifyObservers(self, item):
        with self._obs_lock:
            observers = list(self._obs)
        for obs in observers:
            obs.addBuildEvent(item)
Esempio n. 9
0
    def run(self, target,
            force=False, preview=False, extra_args=None, log_file=None,
            applied_config_variant=None, applied_config_values=None):
        start_time = time.perf_counter()

        # Get publisher for this target.
        pub = self.app.getPublisher(target)
        if pub is None:
            raise InvalidPublishTargetError(
                    "No such publish target: %s" % target)

        # Will we need to bake first?
        bake_first = True
        if not pub.has_url_config:
            bake_first = pub.getConfigValue('bake', True)

        # Setup logging stuff.
        hdlr = None
        root_logger = logging.getLogger()
        if log_file and not preview:
            logger.debug("Adding file handler for: %s" % log_file)
            hdlr = logging.FileHandler(log_file, mode='w', encoding='utf8')
            root_logger.addHandler(hdlr)
        if not preview:
            logger.info("Deploying to %s" % target)
        else:
            logger.info("Previewing deployment to %s" % target)

        # Bake first is necessary.
        rec1 = None
        rec2 = None
        was_baked = False
        bake_out_dir = os.path.join(self.app.root_dir, '_pub', target)
        if bake_first:
            if not preview:
                bake_start_time = time.perf_counter()
                logger.debug("Baking first to: %s" % bake_out_dir)

                from piecrust.baking.baker import Baker
                baker = Baker(
                        self.app, bake_out_dir,
                        applied_config_variant=applied_config_variant,
                        applied_config_values=applied_config_values)
                rec1 = baker.bake()

                from piecrust.processing.pipeline import ProcessorPipeline
                proc = ProcessorPipeline(
                        self.app, bake_out_dir,
                        applied_config_variant=applied_config_variant,
                        applied_config_values=applied_config_values)
                rec2 = proc.run()

                was_baked = True

                if not rec1.success or not rec2.success:
                    raise Exception(
                            "Error during baking, aborting publishing.")
                logger.info(format_timed(bake_start_time, "Baked website."))
            else:
                logger.info("Would bake to: %s" % bake_out_dir)

        # Publish!
        logger.debug(
                "Running publish target '%s' with publisher: %s" %
                (target, pub.PUBLISHER_NAME))
        pub_start_time = time.perf_counter()

        ctx = PublishingContext()
        ctx.bake_out_dir = bake_out_dir
        ctx.bake_record = rec1
        ctx.processing_record = rec2
        ctx.was_baked = was_baked
        ctx.preview = preview
        ctx.args = extra_args
        try:
            pub.run(ctx)
        except Exception as ex:
            raise PublishingError(
                    "Error publishing to target: %s" % target) from ex
        finally:
            if hdlr:
                root_logger.removeHandler(hdlr)
                hdlr.close()

        logger.info(format_timed(
            pub_start_time, "Ran publisher %s" % pub.PUBLISHER_NAME))

        logger.info(format_timed(start_time, 'Deployed to %s' % target))
Esempio n. 10
0
class ProcessingLoop(threading.Thread):
    def __init__(self, appfactory, out_dir):
        super(ProcessingLoop, self).__init__(
                name='pipeline-reloader', daemon=True)
        self.appfactory = appfactory
        self.out_dir = out_dir
        self.last_status_id = 0
        self.interval = 1
        self.app = None
        self._roots = []
        self._monitor_assets_root = False
        self._paths = set()
        self._record = None
        self._last_bake = 0
        self._last_config_mtime = 0
        self._obs = []
        self._obs_lock = threading.Lock()
        config_name = (
                THEME_CONFIG_PATH if appfactory.theme_site else CONFIG_PATH)
        self._config_path = os.path.join(appfactory.root_dir, config_name)

    def addObserver(self, obs):
        with self._obs_lock:
            self._obs.append(obs)

    def removeObserver(self, obs):
        with self._obs_lock:
            self._obs.remove(obs)

    def run(self):
        self._initPipeline()

        self._last_bake = time.time()
        self._last_config_mtime = os.path.getmtime(self._config_path)
        self._record = self.pipeline.run()

        while True:
            cur_config_time = os.path.getmtime(self._config_path)
            if self._last_config_mtime < cur_config_time:
                logger.info("Site configuration changed, reloading pipeline.")
                self._last_config_mtime = cur_config_time
                self._initPipeline()
                for root in self._roots:
                    self._runPipeline(root)
                continue

            if self._monitor_assets_root:
                assets_dir = os.path.join(self.app.root_dir, 'assets')
                if os.path.isdir(assets_dir):
                    logger.info("Assets directory was created, reloading "
                                "pipeline.")
                    self._initPipeline()
                    self._runPipeline(assets_dir)
                    continue

            for root in self._roots:
                # For each mount root we try to find the first new or
                # modified file. If any, we just run the pipeline on
                # that mount.
                found_new_or_modified = False
                for dirpath, dirnames, filenames in os.walk(root):
                    for filename in filenames:
                        path = os.path.join(dirpath, filename)
                        if path not in self._paths:
                            logger.debug("Found new asset: %s" % path)
                            self._paths.add(path)
                            found_new_or_modified = True
                            break
                        if os.path.getmtime(path) > self._last_bake:
                            logger.debug("Found modified asset: %s" % path)
                            found_new_or_modified = True
                            break

                    if found_new_or_modified:
                        break

                if found_new_or_modified:
                    self._runPipeline(root)

            time.sleep(self.interval)

    def _initPipeline(self):
        # Create the app and pipeline.
        self.app = self.appfactory.create()
        self.pipeline = ProcessorPipeline(self.app, self.out_dir)

        # Get the list of assets directories.
        self._roots = list(self.pipeline.mounts.keys())

        # The 'assets' folder may not be in the mounts list if it doesn't
        # exist yet, but we want to monitor for when the user creates it.
        default_root = os.path.join(self.app.root_dir, 'assets')
        self._monitor_assets_root = (default_root not in self._roots)

        # Build the list of initial asset files.
        self._paths = set()
        for root in self._roots:
            for dirpath, dirnames, filenames in os.walk(root):
                self._paths |= set([os.path.join(dirpath, f)
                                    for f in filenames])

    def _runPipeline(self, root):
        self._last_bake = time.time()
        try:
            self._record = self.pipeline.run(
                    root,
                    previous_record=self._record,
                    save_record=False)

            status_id = self.last_status_id + 1
            self.last_status_id += 1

            if self._record.success:
                changed = filter(
                        lambda i: not i.was_collapsed_from_last_run,
                        self._record.entries)
                changed = itertools.chain.from_iterable(
                        map(lambda i: i.rel_outputs, changed))
                changed = list(changed)
                item = {
                        'id': status_id,
                        'type': 'pipeline_success',
                        'assets': changed}

                self._notifyObservers(item)
            else:
                item = {
                        'id': status_id,
                        'type': 'pipeline_error',
                        'assets': []}
                for entry in self._record.entries:
                    if entry.errors:
                        asset_item = {
                                'path': entry.path,
                                'errors': list(entry.errors)}
                        item['assets'].append(asset_item)

                self._notifyObservers(item)
        except Exception as ex:
            logger.exception(ex)

    def _notifyObservers(self, item):
        with self._obs_lock:
            observers = list(self._obs)
        for obs in observers:
            obs.addBuildEvent(item)
Esempio n. 11
0
 def _bakeAssets(self, ctx, out_dir):
     proc = ProcessorPipeline(ctx.app, out_dir, force=ctx.args.force)
     record = proc.run()
     _merge_timers(record.timers, ctx.timers)
     return record.success
Esempio n. 12
0
def _get_pipeline(fs, app=None):
    app = app or fs.getApp()
    return ProcessorPipeline(app, fs.path('counter'))