def _unindex(self, entity): attr_idx = self.index.setdefault('attr', {}) nd = 0 for hn in DINDEX: # log.debug("computing %s" % hn) hid = hash_id(entity, hn, False) self.index[hn].setdefault(hid, EntitySet()) self.index[hn][hid].discard(entity) nd += 1 na = 0 for attr, values in entity_attribute_dict(entity).iteritems(): # log.debug("indexing %s on %s" % (attr,entity.get('entityID'))) for v in values: vidx = attr_idx.setdefault(attr, {}) vidx.setdefault(v, EntitySet()) na += 1 vidx[v].discard(entity) vidx = attr_idx.setdefault(ATTRS['role'], {}) if is_idp(entity): vidx.setdefault('idp', EntitySet()) na += 1 vidx['idp'].discard(entity) if is_sp(entity): vidx.setdefault('sp', EntitySet()) na += 1 vidx['sp'].discard(entity)
def test_store_and_retrieve(self): with patch.multiple("sys", exit=self.sys_exit, stdout=StreamCapturing(sys.stdout)): tmpdir = tempfile.mkdtemp() os.rmdir(tmpdir) # lets make sure 'store' can recreate it try: self.exec_pipeline(""" - load: - file://%s/metadata/test01.xml - select - store: directory: %s """ % (self.datadir, tmpdir)) t1 = parse_xml(resource_filename("metadata/test01.xml", self.datadir)) assert t1 is not None entity_id = 'https://idp.example.com/saml2/idp/metadata.php' sha1id = hash_id(entity_id, prefix=False) fn = "%s/%s.xml" % (tmpdir, sha1id) assert os.path.exists(fn) t2 = parse_xml(fn) assert t2 is not None assert root(t1).get('entityID') == root(t2).get('entityID') assert root(t2).get('entityID') == entity_id except IOError: raise Skip finally: shutil.rmtree(tmpdir)
def test_store_and_retrieve(self): tmpdir = tempfile.mkdtemp() os.rmdir(tmpdir) # lets make sure 'store' can recreate it try: self.exec_pipeline(f""" - load: - file://{self.datadir}/metadata/test01.xml - select - store: directory: {tmpdir} """) t1 = parse_xml( resource_filename("metadata/test01.xml", self.datadir)) assert t1 is not None entity_id = 'https://idp.example.com/saml2/idp/metadata.php' sha1id = hash_id(entity_id, prefix=False) fn = f"{tmpdir}/{sha1id}.xml" assert os.path.exists(fn) t2 = parse_xml(fn) assert t2 is not None assert root(t1).get('entityID') == root(t2).get('entityID') assert root(t2).get('entityID') == entity_id except IOError: pass finally: shutil.rmtree(tmpdir)
def test_store_and_retrieve(self): with patch.multiple("sys", exit=self.sys_exit, stdout=StreamCapturing(sys.stdout)): tmpdir = tempfile.mkdtemp() os.rmdir(tmpdir) # lets make sure 'store' can recreate it try: self.exec_pipeline(""" - load: - file://%s/metadata/test01.xml - select - store: directory: %s """ % (self.datadir, tmpdir)) t1 = parse_xml( resource_filename("metadata/test01.xml", self.datadir)) assert t1 is not None entity_id = 'https://idp.example.com/saml2/idp/metadata.php' sha1id = hash_id(entity_id, prefix=False) fn = "%s/%s.xml" % (tmpdir, sha1id) assert os.path.exists(fn) t2 = parse_xml(fn) assert t2 is not None assert root(t1).get('entityID') == root(t2).get('entityID') assert root(t2).get('entityID') == entity_id except IOError: raise Skip finally: shutil.rmtree(tmpdir)
def _index_prep(self, info): res = dict() if 'entity_attributes' in info: for a, v in list(info.pop('entity_attributes').items()): info[a] = v content = " ".join( filter( lambda x: x is not None, [ info.get(x, '') for x in ('service_name', 'title', 'domain', 'keywords', 'scopes') ], )) res['content'] = content.strip() for a, v in info.items(): k = a if a in ATTRS_INV: k = ATTRS_INV[a] if k in self.schema.names(): if type(v) in (list, tuple): res[k] = " ".join([vv.lower() for vv in v]) elif type(v) in six.string_types: res[k] = info[a].lower() res['sha1'] = hash_id(info['entity_id'], prefix=False) return res
def entity_simple_summary(e): if e is None: return dict() title, descr = entity_extended_display(e) entity_id = e.get('entityID') d = dict( title=title, descr=descr, auth='saml', entity_id=entity_id, entityID=entity_id, domains=";".join(sub_domains(e)), id=hash_id(e, 'sha1'), ) scopes = entity_scopes(e) if scopes is not None and len(scopes) > 0: d['scopes'] = " ".join(scopes) psu = privacy_statement_url(e, None) if psu: d['privacy_statement_url'] = psu return d
def test_sha1_hash(self): entity_id = root(self.t).get('entityID') self.md.store.update(root(self.t), entity_id) e = self.md.lookup(entity_id) assert (sha1_id( e[0]) == "{sha1}568515f6fae8c8b4d42d543853c96d08f051ef13") assert (hash_id( e[0], 'sha1', prefix=False) == "568515f6fae8c8b4d42d543853c96d08f051ef13")
def _modify(self, entity, modifier): def _m(idx, vv): getattr(idx.setdefault(vv, EntitySet()), modifier)(entity) for hn in DINDEX: _m(self.index[hn], hash_id(entity, hn, False)) attr_idx = self.index.setdefault('attr', {}) for attr, values in list(entity_attribute_dict(entity).items()): vidx = attr_idx.setdefault(attr, {}) for v in values: _m(vidx, v) vidx = attr_idx.setdefault(ATTRS['role'], {}) if is_idp(entity): _m(vidx, "idp") if is_sp(entity): _m(vidx, "sp")
def _modify(self, entity, modifier): def _m(idx, vv): getattr(idx.setdefault(vv, EntitySet()), modifier)(entity) for hn in DINDEX: _m(self.index[hn], hash_id(entity, hn, False)) attr_idx = self.index.setdefault('attr', {}) for attr, values in entity_attribute_dict(entity).iteritems(): vidx = attr_idx.setdefault(attr, {}) for v in values: _m(vidx, v) vidx = attr_idx.setdefault(ATTRS['role'], {}) if is_idp(entity): _m(vidx, "idp") if is_sp(entity): _m(vidx, "sp")
def test_sha1_hash(self): entity_id = root(self.t).get('entityID') self.md.store.update(root(self.t), entity_id) e = self.md.lookup(entity_id) assert (self.md.sha1_id(e[0]) == "{sha1}568515f6fae8c8b4d42d543853c96d08f051ef13") assert (hash_id(e[0], 'sha1', prefix=False) == "568515f6fae8c8b4d42d543853c96d08f051ef13")
def webfinger_handler(request: Request) -> Response: """An implementation the webfinger protocol (http://tools.ietf.org/html/draft-ietf-appsawg-webfinger-12) in order to provide information about up and downstream metadata available at this pyFF instance. Example: .. code-block:: bash # curl http://my.org/.well-known/webfinger?resource=http://my.org This should result in a JSON structure that looks something like this: .. code-block:: json { "expires": "2013-04-13T17:40:42.188549", "links": [ { "href": "http://reep.refeds.org:8080/role/sp.xml", "rel": "urn:oasis:names:tc:SAML:2.0:metadata" }, { "href": "http://reep.refeds.org:8080/role/sp.json", "rel": "disco-json" } ], "subject": "http://reep.refeds.org:8080" } Depending on which version of pyFF you're running and the configuration you may also see downstream metadata listed using the 'role' attribute to the link elements. """ resource = request.params.get('resource', None) rel = request.params.get('rel', None) if resource is None: resource = request.host_url jrd: Dict[str, Any] = dict() dt = datetime.now() + timedelta(hours=1) jrd['expires'] = dt.isoformat() jrd['subject'] = request.host_url links: List[Dict[str, Any]] = list() jrd['links'] = links _dflt_rels = { 'urn:oasis:names:tc:SAML:2.0:metadata': ['.xml', 'application/xml'], 'disco-json': ['.json', 'application/json'], } if rel is None or len(rel) == 0: rel = _dflt_rels.keys() else: rel = [rel] def _links(url: str, title: Any = None) -> None: if url.startswith('/'): url = url.lstrip('/') for r in rel: suffix = "" if not url.endswith('/'): suffix = _dflt_rels[r][0] links.append( dict(rel=r, type=_dflt_rels[r][1], href='%s/%s%s' % (request.host_url, url, suffix))) _links('/entities/') for a in request.registry.md.store.collections(): if a is not None and '://' not in a: _links(a) for entity in request.registry.md.store.lookup('entities'): entity_display = entity_display_name(entity) _links("/entities/%s" % hash_id(entity.get('entityID')), title=entity_display) aliases = request.registry.aliases for a in aliases.keys(): for v in request.registry.md.store.attribute(aliases[a]): _links('%s/%s' % (a, quote_plus(v))) response = Response(dumps(jrd, default=json_serializer)) response.headers['Content-Type'] = 'application/json' return response
def sha1_id(e): return hash_id(e, 'sha1')