def test_lookup_intersect_empty_test01(self): store = MemoryStore() store.update(self.test01) entity_id = root(self.test01).get('entityID') e = store.lookup("%s=%s+%s=%s" % (ATTRS['domain'], 'example.com', ATTRS['role'], 'sp')) assert (len(e) == 0)
def test_create_store(self): store = MemoryStore() assert (store is not None) assert (store.size() == 0) assert (len(store.collections()) == 0) assert (str(store)) assert (not store.attributes())
def setUp(self): super(SimplePipeLineTest, self).setUp() self.templates = TemplateLookup( directories=[os.path.join(self.datadir, 'simple-pipeline')]) self.output = tempfile.NamedTemporaryFile('w').name self.signer = tempfile.NamedTemporaryFile('w').name self.signer_template = self.templates.get_template('signer.fd') self.validator = tempfile.NamedTemporaryFile('w').name self.validator_template = self.templates.get_template('validator.fd') self.md_signer = MDRepository(store=MemoryStore()) self.md_validator = MDRepository(store=MemoryStore()) with open(self.signer, "w") as fd: fd.write(self.signer_template.render(ctx=self)) with open(self.validator, "w") as fd: fd.write(self.validator_template.render(ctx=self)) self.signer_result = plumbing(self.signer).process(self.md_signer, state={ 'batch': True, 'stats': {} }) self.validator_result = plumbing(self.validator).process( self.md_validator, state={ 'batch': True, 'stats': {} })
def test_lookup_intersect_empty_test01(self): store = MemoryStore() store.update(self.test01) entity_id = root(self.test01).get('entityID') e = store.lookup("%s=%s+%s=%s" % (ATTRS['domain'], 'example.com', ATTRS['role'], 'sp')) print e assert (len(e) == 0)
def test_lookup_test01(self): store = MemoryStore() store.update(self.test01) entity_id = root(self.test01).get('entityID') e = store.lookup(entity_id) assert (len(e) == 1) assert (e[0] is not None) assert (e[0].get('entityID') is not None) assert (e[0].get('entityID') == entity_id)
def test_lookup_intersect_test01(self): store = MemoryStore() store.update(self.test01) entity_id = root(self.test01).get('entityID') e = store.lookup("%s=%s+%s=%s" % (ATTRS['domain'], 'example.com', ATTRS['role'], 'idp')) assert (len(e) == 1) assert (e[0] is not None) assert (e[0].get('entityID') is not None) assert (e[0].get('entityID') == entity_id)
def test_lookup_intersect_test01(self): store = MemoryStore() store.update(self.test01) entity_id = root(self.test01).get('entityID') e = store.lookup("%s=%s+%s=%s" % (ATTRS['domain'], 'example.com', ATTRS['role'], 'idp')) print e assert (len(e) == 1) assert (e[0] is not None) assert (e[0].get('entityID') is not None) assert (e[0].get('entityID') == entity_id)
def test_store_attributes_test01(self): store = MemoryStore() store.update(self.test01) assert (ATTRS['domain'] in store.attributes()) assert (ATTRS['role'] in store.attributes()) assert (ATTRS['collection'] not in store.attributes()) assert ('example.com' in store.attribute(ATTRS['domain'])) assert ('example.net' in store.attribute(ATTRS['domain'])) assert ('foo.com' not in store.attribute(ATTRS['domain']))
def test_import_reset_wayf(self): store = MemoryStore() store.update(self.wayf, tid='https://metadata.wayf.dk/wayf-edugain-metadata.xml') assert (store.size() == 77) store.reset() assert (store.size() == 0)
def test_select_wayf(self): store = MemoryStore() store.update(self.wayf, tid='https://metadata.wayf.dk/wayf-edugain-metadata.xml') assert store.size() == 77 res = store.select("https://metadata.wayf.dk/wayf-edugain-metadata.xml") lst = [e.get('entityID') for e in res] assert len(lst) == 77 assert 'https://birk.wayf.dk/birk.php/wayf.supportcenter.dk/its/saml2/idp/metadata.php?unit=its' in lst
def test_lookup_wayf_sha1(self): store = MemoryStore() store.update(self.wayf, tid='https://metadata.wayf.dk/wayf-edugain-metadata.xml') assert store.size() == 77 res = store.lookup("{sha1}4ece490318a017bc2cc24674f5ad049ad562f7b2") lst = [e.get('entityID') for e in res] assert len(lst) == 1 assert 'https://birk.wayf.dk/birk.php/wayf.supportcenter.dk/its/saml2/idp/metadata.php?unit=its' in lst
def test_lookup_wayf(self): store = MemoryStore() store.update(self.wayf, tid='https://metadata.wayf.dk/wayf-edugain-metadata.xml') assert (store.size() == 77) res = store.lookup("entities") lst = [e.get('entityID') for e in res] assert (len(lst) == 77) assert ( 'https://birk.wayf.dk/birk.php/wayf.supportcenter.dk/its/saml2/idp/metadata.php?unit=its' in lst)
loglevel = logging.INFO error_log = None access_log = None port = 8080 host = "127.0.0.1" pidfile = "/var/run/pyffd.pid" caching = True delay = 300 daemonize = True autoreload = False frequency = 600 aliases = ATTRS base_dir = None proxy = False store = MemoryStore() terminator = False modules = [] try: # pragma: nocover for o, a in opts: if o in ('-h', '--help'): print __doc__ sys.exit(0) elif o == '--loglevel': loglevel = getattr(logging, a.upper(), None) if not isinstance(loglevel, int): raise ValueError('Invalid log level: %s' % loglevel) elif o in ('--log', '-l'): error_log = a access_log = a
def main(): """ The main entrypoint for the pyffd command. """ try: opts, args = getopt.getopt(sys.argv[1:], 'hP:p:H:CfaA:l:Rm:', ['help', 'loglevel=', 'log=', 'access-log=', 'error-log=', 'port=', 'host=', 'no-caching', 'autoreload', 'frequency=', 'modules=', 'alias=', 'dir=', 'version', 'proxy', 'allow_shutdown']) except getopt.error as msg: print(msg) print(__doc__) sys.exit(2) if config.store is None: config.store = MemoryStore() if config.loglevel is None: config.loglevel = logging.INFO if config.aliases is None: config.aliases = dict() if config.modules is None: config.modules = [] try: # pragma: nocover for o, a in opts: if o in ('-h', '--help'): print(__doc__) sys.exit(0) elif o == '--loglevel': config.loglevel = getattr(logging, a.upper(), None) if not isinstance(config.loglevel, int): raise ValueError('Invalid log level: %s' % config.loglevel) elif o in ('--log', '-l'): config.error_log = a config.access_log = a elif o in '--error-log': config.error_log = a elif o in '--access-log': config.access_log = a elif o in ('--host', '-H'): config.bind_address = a elif o in ('--port', '-P'): config.port = int(a) elif o in ('--pidfile', '-p'): config.pid_file = a elif o in '-R': config.store = RedisStore() elif o in ('--no-caching', '-C'): config.caching_enabled = False elif o in ('--caching-delay', 'D'): config.caching_delay = int(o) elif o in ('--foreground', '-f'): config.daemonize = False elif o in ('--autoreload', '-a'): config.autoreload = True elif o in '--frequency': config.frequency = int(a) elif o in ('-A', '--alias'): (a, colon, uri) = a.partition(':') assert (colon == ':') if a and uri: config.aliases[a] = uri elif o in '--dir': config.base_dir = a elif o in '--proxy': config.proxy = True elif o in '--allow_shutdown': config.allow_shutdown = True elif o in ('-m', '--module'): config.modules.append(a) elif o in '--version': print("pyffd version {} (cherrypy version {})".format(pyff_version, cherrypy.__version__)) sys.exit(0) else: raise ValueError("Unknown option '%s'" % o) except Exception as ex: print(ex) print(__doc__) sys.exit(3) engine = cherrypy.engine plugins = cherrypy.process.plugins if config.daemonize: cherrypy.config.update({'environment': 'production'}) cherrypy.config.update({'log.screen': False}) if config.error_log is None: config.error_log = 'syslog:daemon' if config.access_log is None: config.access_log = 'syslog:daemon' plugins.Daemonizer(engine).subscribe() if config.base_dir is not None: DirPlugin(engine, config.base_dir).subscribe() if config.pid_file: plugins.PIDFile(engine, config.pid_file).subscribe() def _b64(p): if p: return "{base64}%s" % p.encode('base64') else: return "" def error_page(code, **kwargs): return render_template("%d.html" % code, **kwargs) observers = [] if config.loglevel == logging.DEBUG: observers.append(debug_observer) config.modules.append('pyff.builtins') for mn in config.modules: importlib.import_module(mn) server = MDServer(pipes=args, observers=observers) pfx = ["/entities", "/metadata"] + ["/" + x for x in server.aliases.keys()] cfg = { 'global': { 'tools.encode.encoding': 'UTF-8', 'server.socket_port': config.port, 'server.socket_host': config.bind_address, 'tools.caching.on': config.caching_enabled, 'tools.caching.debug': config.caching_enabled, 'tools.trailing_slash.on': True, 'tools.caching.maxobj_size': 1000000000000, # effectively infinite 'tools.caching.maxsize': 1000000000000, 'tools.caching.antistampede_timeout': 30, 'tools.caching.delay': 3600, # this is how long we keep static stuff 'tools.cpstats.on': True, 'tools.proxy.on': config.proxy, 'allow_shutdown': config.allow_shutdown, 'error_page.404': lambda **kwargs: error_page(404, _=_, **kwargs), 'error_page.503': lambda **kwargs: error_page(503, _=_, **kwargs), 'error_page.500': lambda **kwargs: error_page(500, _=_, **kwargs), 'error_page.400': lambda **kwargs: error_page(400, _=_, **kwargs) }, '/': { 'tools.caching.delay': config.caching_delay, 'tools.cpstats.on': True, 'tools.proxy.on': config.proxy, 'request.dispatch': EncodingDispatcher(pfx, _b64).dispatch, 'request.dispatpch.debug': True, }, '/static': { 'tools.cpstats.on': True, 'tools.caching.on': config.caching_enabled, 'tools.caching.delay': config.caching_delay, 'tools.proxy.on': config.proxy }, '/shutdown': { 'allow_shutdown': config.allow_shutdown } } cherrypy.config.update(cfg) if config.error_log is not None: cherrypy.config.update({'log.screen': False}) root = MDRoot(server) app = cherrypy.tree.mount(root, config=cfg) if config.error_log is not None: if config.error_log.startswith('syslog:'): facility = config.error_log[7:] h = SysLogLibHandler(facility=facility) app.log.error_log.addHandler(h) cherrypy.config.update({'log.error_file': ''}) else: cherrypy.config.update({'log.error_file': config.error_log}) if config.access_log is not None: if config.access_log.startswith('syslog:'): facility = config.error_log[7:] h = SysLogLibHandler(facility=facility) app.log.access_log.addHandler(h) cherrypy.config.update({'log.access_file': ''}) else: cherrypy.config.update({'log.access_file': config.access_log}) app.log.error_log.setLevel(config.loglevel) engine.signals.subscribe() try: engine.start() except Exception as ex: logging.error(ex) sys.exit(1) else: engine.block()
def test_import_reset_test01(self): store = MemoryStore() store.update(self.test01) assert (store.size() > 0) store.reset() assert (store.size() == 0)
def difftool(): """ diff two saml metadata sources """ args = parse_options("samldiff", __doc__, 'hv', ['help', 'loglevel=', 'version']) log_args = {'level': config.loglevel} if config.logfile is not None: log_args['filename'] = config.logfile logging.basicConfig(**log_args) try: rm = Resource() rm.add(r1) rm.add(r2) store = MemoryStore() rm.reload(store=store) r1 = Resource(url=args[0], opts=ResourceOpts()) r2 = Resource(url=args[1], opts=ResourceOpts()) status = 0 if r1.t.get('Name') != r2.t.get('Name'): status += 1 print("Name differs: {} != {}".format(r1.t.get('Name'), r2.t.get('Name'))) d1 = diff(r1.t, r2.t) if d1: print("Only in {}".format(r1.url)) print("\n+".join(d1)) status += 2 d2 = diff(r2.t, r1.t) if d2: print("Only in {}".format(r2.url)) print("\n+".join(d2)) status += 4 s1 = dict() s2 = dict() for e1 in iter_entities(r1.t): s1[e1.get('entityID')] = e1 for e2 in iter_entities(r2.t): s2[e2.get('entityID')] = e2 formatter = DiffFormatter() for eid in set(s1.keys()).intersection(s2.keys()): d = diff_trees( s1[eid], s2[eid], formatter=formatter, diff_options=dict(uniqueattrs=[ "{urn:oasis:names:tc:SAML:2.0:metadata}entityID" ]), ) if d: status += 8 print(d) sys.exit(status) except Exception as ex: logging.debug(traceback.format_exc()) logging.error(ex) sys.exit(-1)