Exemple #1
0
 def setUp(self):
     super(SimplePipeLineTest, self).setUp()
     self.templates = TemplateLookup(
         directories=[os.path.join(self.datadir, 'simple-pipeline')])
     self.output = tempfile.NamedTemporaryFile('w').name
     self.signer = tempfile.NamedTemporaryFile('w').name
     self.signer_template = self.templates.get_template('signer.fd')
     self.validator = tempfile.NamedTemporaryFile('w').name
     self.validator_template = self.templates.get_template('validator.fd')
     self.md_signer = MDRepository(store=MemoryStore())
     self.md_validator = MDRepository(store=MemoryStore())
     with open(self.signer, "w") as fd:
         fd.write(self.signer_template.render(ctx=self))
     with open(self.validator, "w") as fd:
         fd.write(self.validator_template.render(ctx=self))
     self.signer_result = plumbing(self.signer).process(self.md_signer,
                                                        state={
                                                            'batch': True,
                                                            'stats': {}
                                                        })
     self.validator_result = plumbing(self.validator).process(
         self.md_validator, state={
             'batch': True,
             'stats': {}
         })
Exemple #2
0
 def setUp(self):
     super(SimplePipeLineTest, self).setUp()
     self.templates = TemplateLookup(directories=[os.path.join(self.datadir, 'simple-pipeline')])
     self.output = tempfile.NamedTemporaryFile('w').name
     self.signer = tempfile.NamedTemporaryFile('w').name
     self.signer_template = self.templates.get_template('signer.fd')
     self.validator = tempfile.NamedTemporaryFile('w').name
     self.validator_template = self.templates.get_template('validator.fd')
     self.md_signer = MDRepository(store=MemoryStore())
     self.md_validator = MDRepository(store=MemoryStore())
     with open(self.signer, "w") as fd:
         fd.write(self.signer_template.render(ctx=self))
     with open(self.validator, "w") as fd:
         fd.write(self.validator_template.render(ctx=self))
     self.signer_result = plumbing(self.signer).process(self.md_signer, state={'batch': True, 'stats': {}})
     self.validator_result = plumbing(self.validator).process(self.md_validator, state={'batch': True, 'stats': {}})
Exemple #3
0
    def __init__(self,
                 pipes=None,
                 autoreload=False,
                 frequency=600,
                 aliases=None,
                 cache_enabled=True,
                 observers=None,
                 store=None):

        if aliases is None:
            aliases = ATTRS
        if not observers:
            observers = []
        if not pipes:
            pipes = []
        self._pipes = pipes
        self.cache_enabled = cache_enabled
        self.lock = ReadWriteLock()
        self.plumbings = [plumbing(v) for v in pipes]
        self.refresh = MDUpdate(cherrypy.engine, server=self, frequency=frequency)
        self.refresh.subscribe()
        self.aliases = aliases
        self.psl = PublicSuffixList()
        self.md = MDRepository(metadata_cache_enabled=self.cache_enabled, store=store)

        if autoreload:
            for f in pipes:
                cherrypy.engine.autoreload.files.add(f)
Exemple #4
0
    def __init__(self,
                 pipes=None,
                 autoreload=False,
                 frequency=600,
                 aliases=None,
                 cache_enabled=True,
                 observers=None,
                 store=None):

        if aliases is None:
            aliases = ATTRS
        if not observers:
            observers = []
        if not pipes:
            pipes = []
        self._pipes = pipes
        self.cache_enabled = cache_enabled
        self.lock = ReadWriteLock()
        self.plumbings = [plumbing(v) for v in pipes]
        self.refresh = MDUpdate(cherrypy.engine, server=self, frequency=frequency)
        self.refresh.subscribe()
        self.aliases = aliases
        self.psl = PublicSuffixList()
        self.md = MDRepository(metadata_cache_enabled=self.cache_enabled, store=store)

        if autoreload:
            for f in pipes:
                cherrypy.engine.autoreload.files.add(f)
Exemple #5
0
    def run_pipeline(self, pl_name, ctx=None, md=MDRepository()):
        if ctx is None:
            ctx = dict()

        templates = TemplateLookup(directories=[os.path.join(self.datadir, 'simple-pipeline')])
        pipeline = tempfile.NamedTemporaryFile('w').name
        template = templates.get_template(pl_name)
        with open(pipeline, "w") as fd:
            fd.write(template.render(ctx=ctx))
        res = plumbing(pipeline).process(md, state={'batch': True, 'stats': {}})
        os.unlink(pipeline)
        return res, md, ctx
Exemple #6
0
    def run_pipeline(self, pl_name, ctx=None, md=MDRepository()):
        if ctx is None:
            ctx = dict()

        templates = TemplateLookup(directories=[os.path.join(self.datadir, 'simple-pipeline')])
        pipeline = tempfile.NamedTemporaryFile('w').name
        template = templates.get_template(pl_name)
        with open(pipeline, "w") as fd:
            fd.write(template.render(ctx=ctx))
        res = plumbing(pipeline).process(md, state={'batch': True, 'stats': {}})
        os.unlink(pipeline)
        return res, md, ctx
Exemple #7
0
    def __init__(self, pipes=None, autoreload=False, frequency=600, aliases=ATTRS, cache_enabled=True):
        if not pipes:
            pipes = []
        self.cache_enabled = cache_enabled
        self._md = None
        self.lock = ReadWriteLock()
        self.plumbings = [plumbing(v) for v in pipes]
        self.refresh = MDUpdate(cherrypy.engine, server=self, frequency=frequency)
        self.refresh.subscribe()
        self.aliases = aliases

        if autoreload:
            for f in pipes:
                cherrypy.engine.autoreload.files.add(f)
Exemple #8
0
def main():
    """
    The main entrypoint for the pyFF cmdline tool.
    """
    args = parse_options("pyff", __doc__)

    log_args = {'level': config.loglevel}
    if config.logfile is not None:
        log_args['filename'] = config.logfile
    logging.basicConfig(**log_args)

    config.modules.append('pyff.builtins')
    for mn in config.modules:
        importlib.import_module(mn)
    config.update_frequency = 0
    try:
        md = MDRepository()
        for p in args:
            plumbing(p).process(md, state={'batch': True, 'stats': {}})
        sys.exit(0)
    except Exception as ex:
        logging.debug(traceback.format_exc())
        logging.error(ex)
        sys.exit(-1)
Exemple #9
0
    def __init__(self, pipes=None, observers=None):

        if not observers:
            observers = []
        if not pipes:
            pipes = []
        self._pipes = pipes
        self.lock = ReadWriteLock()
        self.plumbings = [plumbing(v) for v in pipes]
        self.refresh = MDUpdate(cherrypy.engine, server=self, frequency=config.frequency)
        self.refresh.subscribe()
        self.aliases = config.aliases
        self.psl = PublicSuffixList()
        self.md = MDRepository(metadata_cache_enabled=config.caching_enabled, store=config.store)

        if config.autoreload:
            for f in pipes:
                cherrypy.engine.autoreload.files.add(f)
Exemple #10
0
    def __init__(self, pipes=None, observers=None):

        if not observers:
            observers = []
        if not pipes:
            pipes = []
        self._pipes = pipes
        self.lock = ReadWriteLock()
        self.plumbings = [plumbing(v) for v in pipes]
        self.refresh = MDUpdate(cherrypy.engine, server=self, frequency=config.frequency)
        self.refresh.subscribe()
        self.aliases = config.aliases
        self.psl = PublicSuffixList()
        self.md = MDRepository(metadata_cache_enabled=config.caching_enabled, store=config.store)

        if config.autoreload:
            for f in pipes:
                cherrypy.engine.autoreload.files.add(f)
Exemple #11
0
    def __init__(self,
                 pipes=None,
                 autoreload=False,
                 frequency=600,
                 aliases=ATTRS,
                 cache_enabled=True,
                 hosts_dir=None,
                 observers=[]):
        if pipes is None:
            pipes = []
        self.cache_enabled = cache_enabled
        self._md = None
        self.lock = ReadWriteLock()
        self.plumbings = [plumbing(v) for v in pipes]
        self.refresh = MDUpdate(cherrypy.engine, server=self, frequency=frequency)
        self.refresh.subscribe()
        self.aliases = aliases
        self.observers = observers
        self.psl = PublicSuffixList()

        if autoreload:
            for f in pipes:
                cherrypy.engine.autoreload.files.add(f)
Exemple #12
0
 def reload_pipeline(self):
     new_plumbings = [plumbing(v) for v in self._pipes]
     self.plumbings = new_plumbings
Exemple #13
0
 def reload_pipeline(self):
     new_plumbings = [plumbing(v) for v in self._pipes]
     self.plumbings = new_plumbings
Exemple #14
0
            loglevel = getattr(logging, a.upper(), None)
            if not isinstance(loglevel, int):
                raise ValueError('Invalid log level: %s' % loglevel)
        elif o in '--logfile':
            logfile = a
        elif o in '--version':
            print "pyff version %s" % __version__
            sys.exit(0)
        else:
            raise ValueError("Unknown option '%s'" % o)

    log_args = {'level': loglevel}
    if logfile is not None:
        log_args['filename'] = logfile
    logging.basicConfig(**log_args)

    try:
        for p in args:
            plumbing(p).process(md, state={'batch': True, 'stats': {}})
        sys.exit(0)
    except Exception, ex:
        if logging.getLogger().isEnabledFor(logging.DEBUG):
            print "-" * 64
            traceback.print_exc()
            print "-" * 64
        logging.error(ex)
        sys.exit(-1)


if __name__ == "__main__":
    main()
Exemple #15
0
def mkapp(*args: Any, **kwargs: Any) -> Any:
    md = kwargs.pop('md', None)
    if md is None:
        md = MDRepository()

    if config.devel_memory_profile:
        launch_memory_usage_server()

    with Configurator(debug_logger=log) as ctx:
        ctx.add_subscriber(add_cors_headers_response_callback, NewRequest)

        if config.aliases is None:
            config.aliases = dict()

        if config.modules is None:
            config.modules = []

        ctx.registry.config = config
        config.modules.append('pyff.builtins')
        for mn in config.modules:
            importlib.import_module(mn)

        pipeline = None
        if args:
            pipeline = list(args)
        if pipeline is None and config.pipeline:
            pipeline = [config.pipeline]

        ctx.registry.scheduler = md.scheduler
        if pipeline is not None:
            ctx.registry.pipeline = pipeline
            ctx.registry.plumbings = [plumbing(v) for v in pipeline]
        ctx.registry.aliases = config.aliases
        ctx.registry.md = md
        if config.caching_enabled:
            ctx.registry.cache = TTLCache(config.cache_size, config.cache_ttl)
        else:
            ctx.registry.cache = NoCache()

        ctx.add_route('robots', '/robots.txt')
        ctx.add_view(robots_handler, route_name='robots')

        ctx.add_route('webfinger',
                      '/.well-known/webfinger',
                      request_method='GET')
        ctx.add_view(webfinger_handler, route_name='webfinger')

        ctx.add_route('search', '/api/search', request_method='GET')
        ctx.add_view(search_handler, route_name='search')

        ctx.add_route('status', '/api/status', request_method='GET')
        ctx.add_view(status_handler, route_name='status')

        ctx.add_route('resources', '/api/resources', request_method='GET')
        ctx.add_view(resources_handler, route_name='resources')

        ctx.add_route('pipeline', '/api/pipeline', request_method='GET')
        ctx.add_view(pipeline_handler, route_name='pipeline')

        ctx.add_route('call',
                      '/api/call/{entry}',
                      request_method=['POST', 'PUT'])
        ctx.add_view(process_handler, route_name='call')

        ctx.add_route('request', '/*path', request_method='GET')
        ctx.add_view(request_handler, route_name='request')

        start = utc_now() + timedelta(seconds=1)
        if config.update_frequency > 0:
            ctx.registry.scheduler.add_job(
                call,
                'interval',
                id="call/update",
                args=['update'],
                start_date=start,
                misfire_grace_time=10,
                seconds=config.update_frequency,
                replace_existing=True,
                max_instances=1,
                timezone=pytz.utc,
            )

        return ctx.make_wsgi_app()