def test_prune(self): with patch.multiple("sys", exit=self.sys_exit, stdout=StreamCapturing(sys.stdout)): tmpfile = tempfile.NamedTemporaryFile('w').name try: self.exec_pipeline(""" - load: - file://%s/metadata/test01.xml - select - prune: - .//{urn:oasis:names:tc:SAML:metadata:ui}UIInfo - publish: %s """ % (self.datadir, tmpfile)) t1 = parse_xml(resource_filename("metadata/test01.xml", self.datadir)) uiinfo = t1.find(".//{urn:oasis:names:tc:SAML:metadata:ui}UIInfo") assert uiinfo is not None t2 = parse_xml(tmpfile) assert t2 is not None gone = t2.find(".//{urn:oasis:names:tc:SAML:metadata:ui}UIInfo") assert gone is None except PipeException: pass except IOError: raise Skip finally: try: os.unlink(tmpfile) except: pass
def setUp(self): self.datadir = resource_filename('metadata', 'test/data') self.test01_source = os.path.join(self.datadir, 'test01.xml') self.test01 = parse_xml(self.test01_source) self.swamid_source = os.path.join(self.datadir, 'swamid-2.0-test.xml') self.swamid = parse_xml(self.swamid_source) self.dir = tempfile.mkdtemp()
def test_first_select_as(self): with patch.multiple("sys", exit=self.sys_exit, stdout=StreamCapturing(sys.stdout)): tmpfile = tempfile.NamedTemporaryFile('w').name try: self.exec_pipeline(""" - load: - file://%s/metadata/test01.xml - select as FOO - first - publish: %s """ % (self.datadir, tmpfile)) t1 = parse_xml( resource_filename("metadata/test01.xml", self.datadir)) assert t1 is not None entity_id = 'https://idp.example.com/saml2/idp/metadata.php' t2 = parse_xml(tmpfile) assert t2 is not None assert root(t1).get('entityID') == root(t2).get('entityID') assert root(t2).get('entityID') == entity_id except PipeException: pass except IOError: raise Skip finally: try: os.unlink(tmpfile) except: pass
def test_store_and_retrieve(self): with patch.multiple("sys", exit=self.sys_exit, stdout=StreamCapturing(sys.stdout)): tmpdir = tempfile.mkdtemp() os.rmdir(tmpdir) # lets make sure 'store' can recreate it try: self.exec_pipeline(""" - load: - file://%s/metadata/test01.xml - select - store: directory: %s """ % (self.datadir, tmpdir)) t1 = parse_xml(resource_filename("metadata/test01.xml", self.datadir)) assert t1 is not None entity_id = 'https://idp.example.com/saml2/idp/metadata.php' sha1id = hash_id(entity_id, prefix=False) fn = "%s/%s.xml" % (tmpdir, sha1id) assert os.path.exists(fn) t2 = parse_xml(fn) assert t2 is not None assert root(t1).get('entityID') == root(t2).get('entityID') assert root(t2).get('entityID') == entity_id except IOError: raise Skip finally: shutil.rmtree(tmpdir)
def test_prune(self): with patch.multiple("sys", exit=self.sys_exit, stdout=StreamCapturing(sys.stdout)): tmpfile = tempfile.NamedTemporaryFile('w').name try: self.exec_pipeline(""" - load: - file://%s/metadata/test01.xml - select - prune: - .//{urn:oasis:names:tc:SAML:metadata:ui}UIInfo - publish: %s """ % (self.datadir, tmpfile)) t1 = parse_xml( resource_filename("metadata/test01.xml", self.datadir)) uiinfo = t1.find( ".//{urn:oasis:names:tc:SAML:metadata:ui}UIInfo") assert uiinfo is not None t2 = parse_xml(tmpfile) assert t2 is not None gone = t2.find( ".//{urn:oasis:names:tc:SAML:metadata:ui}UIInfo") assert gone is None except PipeException: pass except IOError: raise Skip finally: try: os.unlink(tmpfile) except: pass
def test_store_and_retrieve(self): with patch.multiple("sys", exit=self.sys_exit, stdout=StreamCapturing(sys.stdout)): tmpdir = tempfile.mkdtemp() os.rmdir(tmpdir) # lets make sure 'store' can recreate it try: self.exec_pipeline(""" - load: - file://%s/metadata/test01.xml - select - store: directory: %s """ % (self.datadir, tmpdir)) t1 = parse_xml( resource_filename("metadata/test01.xml", self.datadir)) assert t1 is not None entity_id = 'https://idp.example.com/saml2/idp/metadata.php' sha1id = hash_id(entity_id, prefix=False) fn = "%s/%s.xml" % (tmpdir, sha1id) assert os.path.exists(fn) t2 = parse_xml(fn) assert t2 is not None assert root(t1).get('entityID') == root(t2).get('entityID') assert root(t2).get('entityID') == entity_id except IOError: raise Skip finally: shutil.rmtree(tmpdir)
def test_first_select_as(self): tmpfile = tempfile.NamedTemporaryFile('w').name try: self.exec_pipeline(f""" - load: - file://{self.datadir}/metadata/test01.xml - select as FOO: - first - publish: {tmpfile} """) t1 = parse_xml( resource_filename("metadata/test01.xml", self.datadir)) assert t1 is not None entity_id = 'https://idp.example.com/saml2/idp/metadata.php' t2 = parse_xml(tmpfile) assert t2 is not None assert root(t1).get('entityID') == root(t2).get('entityID') assert root(t2).get('entityID') == entity_id except PipeException: pass except IOError: pass finally: try: os.unlink(tmpfile) except (IOError, OSError): pass
def test_prune(self): tmpfile = tempfile.NamedTemporaryFile('w').name try: self.exec_pipeline(f""" - load: - file://{self.datadir}/metadata/test01.xml - select - prune: - .//{{urn:oasis:names:tc:SAML:metadata:ui}}UIInfo - publish: {tmpfile} """) t1 = parse_xml( resource_filename("metadata/test01.xml", self.datadir)) uiinfo = t1.find(".//{urn:oasis:names:tc:SAML:metadata:ui}UIInfo") assert uiinfo is not None t2 = parse_xml(tmpfile) assert t2 is not None gone = t2.find(".//{urn:oasis:names:tc:SAML:metadata:ui}UIInfo") assert gone is None except PipeException: pass except IOError: pass finally: try: os.unlink(tmpfile) except: pass
def test_store_and_retrieve(self): tmpdir = tempfile.mkdtemp() os.rmdir(tmpdir) # lets make sure 'store' can recreate it try: self.exec_pipeline(f""" - load: - file://{self.datadir}/metadata/test01.xml - select - store: directory: {tmpdir} """) t1 = parse_xml( resource_filename("metadata/test01.xml", self.datadir)) assert t1 is not None entity_id = 'https://idp.example.com/saml2/idp/metadata.php' sha1id = hash_id(entity_id, prefix=False) fn = f"{tmpdir}/{sha1id}.xml" assert os.path.exists(fn) t2 = parse_xml(fn) assert t2 is not None assert root(t1).get('entityID') == root(t2).get('entityID') assert root(t2).get('entityID') == entity_id except IOError: pass finally: shutil.rmtree(tmpdir)
def test_first_select_as(self): with patch.multiple("sys", exit=self.sys_exit, stdout=StreamCapturing(sys.stdout)): tmpfile = tempfile.NamedTemporaryFile('w').name try: self.exec_pipeline(""" - load: - file://%s/metadata/test01.xml - select as FOO - first - publish: %s """ % (self.datadir, tmpfile)) t1 = parse_xml(resource_filename("metadata/test01.xml", self.datadir)) assert t1 is not None entity_id = 'https://idp.example.com/saml2/idp/metadata.php' t2 = parse_xml(tmpfile) assert t2 is not None assert root(t1).get('entityID') == root(t2).get('entityID') assert root(t2).get('entityID') == entity_id except PipeException: pass except IOError: raise Skip finally: try: os.unlink(tmpfile) except: pass
def setUp(self): self.md = MDRepository(store=MemoryStore) self.datadir = resource_filename('metadata', 'test/data') self.xml_source = os.path.join(self.datadir, 'test01.xml') self.swamid_source = os.path.join(self.datadir, 'swamid-2.0-test.xml') self.swamid = root(parse_xml(self.swamid_source)) self.t = parse_xml(self.xml_source) self.non_metadata = parse_xml(resource_filename("not-metadata.xml", self.datadir))
def setUp(self): self.datadir = resource_filename('metadata', 'test/data') self.test01_source = os.path.join(self.datadir, 'test01.xml') self.test01 = parse_xml(self.test01_source) self.swamid_source = os.path.join(self.datadir, 'swamid-2.0-test.xml') self.swamid = parse_xml(self.swamid_source) self.wayf_source = os.path.join(self.datadir, 'wayf-edugain-metadata.xml') self.wayf = parse_xml(self.wayf_source)
def test_all_entities_parses(self): r = requests.get("http://127.0.0.1:%s/entities" % self.port) assert (r.status_code == 200) # assert (r.encoding == 'utf8') t = parse_xml(six.BytesIO(r.content)) assert (t is not None) validate_document(t)
def test_all_entities_parses(self): r = requests.get("http://127.0.0.1:8080/entities") assert (r.status_code == 200) #assert (r.encoding == 'utf8') t = parse_xml(StringIO(r.content)) assert (t is not None) validate_document(t)
def test_alias_ndn(self): r = requests.get("http://127.0.0.1:%s/ndn.xml" % self.port) assert r.status_code == 200 # assert (r.encoding == 'utf8') t = parse_xml(StringIO(r.content)) assert t is not None assert root(t).get("entityID") == "https://idp.nordu.net/idp/shibboleth" validate_document(t)
def test_alias_ndn(self): r = requests.get("http://127.0.0.1:%s/ndn.xml" % self.port) assert (r.status_code == 200) # assert (r.encoding == 'utf8') t = parse_xml(six.BytesIO(r.content)) assert (t is not None) assert (root(t).get('entityID') == 'https://idp.nordu.net/idp/shibboleth') validate_document(t)
def test_md_query_single(self): q = urllib.quote_plus('https://idp.nordu.net/idp/shibboleth') r = requests.get("http://127.0.0.1:%s/entities/%s" % (self.port, q)) assert (r.status_code == 200) assert ('application/xml' in r.headers['Content-Type']) t = parse_xml(StringIO(r.content)) assert (t is not None) e = root(t) assert (e.get('entityID') == 'https://idp.nordu.net/idp/shibboleth')
def test_alias_ndn(self): r = requests.get("http://127.0.0.1:8080/ndn.xml") assert (r.status_code == 200) #assert (r.encoding == 'utf8') t = parse_xml(StringIO(r.content)) assert (t is not None) assert ( root(t).get('entityID') == 'https://idp.nordu.net/idp/shibboleth') validate_document(t)
def test_md_query_single(self): q = quote_plus('https://idp.nordu.net/idp/shibboleth') r = requests.get("http://127.0.0.1:%s/entities/%s" % (self.port, q)) assert (r.status_code == 200) assert ('application/xml' in r.headers['Content-Type']) t = parse_xml(six.BytesIO(r.content)) assert (t is not None) e = root(t) assert (e.get('entityID') == 'https://idp.nordu.net/idp/shibboleth')
def test_md_query_single(self): q = urllib.quote_plus("https://idp.nordu.net/idp/shibboleth") r = requests.get("http://127.0.0.1:%s/entities/%s" % (self.port, q)) assert r.status_code == 200 assert "application/xml" in r.headers["Content-Type"] t = parse_xml(StringIO(r.content)) assert t is not None e = root(t) assert e.get("entityID") == "https://idp.nordu.net/idp/shibboleth"
def parse(self, resource: Resource, content: str) -> ParserInfo: info = ParserInfo(description='XRD links', expiration_time='never expires') t = parse_xml(unicode_stream(content)) relt = root(t) for xrd in t.iter("{%s}XRD" % NS['xrd']): for link in xrd.findall(".//{%s}Link[@rel='%s']" % (NS['xrd'], NS['md'])): link_href = link.get("href") certs = CertDict(link) fingerprints = list(certs.keys()) fp = None if len(fingerprints) > 0: fp = fingerprints[0] log.debug("XRD: {} verified by {}".format(link_href, fp)) child_opts = resource.opts.copy(update={'alias': None}) resource.add_child(link_href, child_opts) resource.last_seen = utc_now().replace(microsecond=0) resource.expire_time = None resource.never_expires = True return info
def setUp(self): self.datadir = resource_filename('metadata', 'test/data') self.test01_source = os.path.join(self.datadir, 'test01.xml') self.test01 = parse_xml(self.test01_source) self.swamid_source = os.path.join(self.datadir, 'swamid-2.0-test.xml') self.swamid = parse_xml(self.swamid_source)
def _get_metadata(self, key): return root(parse_xml(StringIO(self.rc.get("%s#metadata" % key))))
def setUp(self): self.datadir = resource_filename('metadata', 'test/data') self.xml_source1 = os.path.join(self.datadir, 'test01.xml') self.xml_source2 = os.path.join(self.datadir, 'swamid-2.0-test.xml') self.t1 = parse_xml(self.xml_source1) self.t2 = parse_xml(self.xml_source2)
def parse_saml_metadata( source: BytesIO, opts: ResourceOpts, base_url=None, validation_errors: Optional[Dict[str, Any]] = None, ): """Parse a piece of XML and return an EntitiesDescriptor element after validation. :param source: a file-like object containing SAML metadata :param opts: ResourceOpts instance :param base_url: use this base url to resolve relative URLs for XInclude processing :param validation_errors: A dict that will be used to return validation errors to the caller :return: Tuple with t (ElementTree), expire_time_offset, exception """ if validation_errors is None: validation_errors = dict() try: t = parse_xml(source, base_url=base_url) if config.xinclude: t.xinclude() expire_time_offset = metadata_expiration(t) t = check_signature(t, opts.verify) if opts.cleanup is not None: for cb in opts.cleanup: t = cb(t) else: # at least get rid of ID attribute for e in iter_entities(t): if e.get('ID') is not None: del e.attrib['ID'] t = root(t) filter_invalid = opts.filter_invalid if opts.fail_on_error: filter_invalid = False if opts.validate_schema: t = filter_or_validate(t, filter_invalid=filter_invalid, base_url=base_url, source=source, validation_errors=validation_errors) if t is not None: if t.tag == "{%s}EntityDescriptor" % NS['md']: t = entitiesdescriptor([t], base_url, copy=False, validate=True, filter_invalid=filter_invalid, nsmap=t.nsmap) except Exception as ex: log.debug(traceback.format_exc()) log.error("Error parsing {}: {}".format(base_url, ex)) if opts.fail_on_error: raise ex return None, None, ex log.debug("returning %d valid entities" % len(list(iter_entities(t)))) return t, expire_time_offset, None
def parse(self, resource: Resource, content: str) -> EidasMDParserInfo: info = EidasMDParserInfo(description='eIDAS MetadataServiceList', expiration_time='None') t = parse_xml(unicode_stream(content)) if config.xinclude: t.xinclude() relt = root(t) info.version = relt.get('Version', '0') info.issue_date = relt.get('IssueDate') info.next_update = relt.get('NextUpdate') if isinstance(info.next_update, str): resource.expire_time = iso2datetime(info.next_update) elif config.respect_cache_duration: duration = duration2timedelta(config.default_cache_duration) if not duration: # TODO: what is the right action here? raise ValueError( f'Invalid default cache duration: {config.default_cache_duration}' ) info.next_update = utc_now().replace(microsecond=0) + duration resource.expire_time = info.next_update info.expiration_time = 'None' if not resource.expire_time else resource.expire_time.isoformat( ) info.issuer_name = first_text(relt, "{%s}IssuerName" % NS['ser']) info.scheme_identifier = first_text(relt, "{%s}SchemeIdentifier" % NS['ser']) info.scheme_territory = first_text(relt, "{%s}SchemeTerritory" % NS['ser']) for mdl in relt.iter("{%s}MetadataList" % NS['ser']): for ml in mdl.iter("{%s}MetadataLocation" % NS['ser']): location = ml.get('Location') if location: certs = CertDict(ml) fingerprints = list(certs.keys()) fp = None if len(fingerprints) > 0: fp = fingerprints[0] ep = ml.find("{%s}Endpoint" % NS['ser']) if ep is not None and fp is not None: args = dict( country_code=mdl.get('Territory'), hide_from_discovery=strtobool( ep.get('HideFromDiscovery', 'false')), ) log.debug("MDSL[{}]: {} verified by {} for country {}". format(info.scheme_territory, location, fp, args.get('country_code'))) child_opts = resource.opts.copy(update={'alias': None}) child_opts.verify = fp r = resource.add_child(location, child_opts) # this is specific post-processing for MDSL files def _update_entities(_t, **kwargs): _country_code = kwargs.get('country_code') _hide_from_discovery = kwargs.get( 'hide_from_discovery') for e in iter_entities(_t): if _country_code: set_nodecountry(e, _country_code) if bool(_hide_from_discovery) and is_idp(e): set_entity_attributes( e, { ATTRS['entity-category']: 'http://refeds.org/category/hide-from-discovery' }) return _t r.add_via(Lambda(_update_entities, **args)) log.debug("Done parsing eIDAS MetadataServiceList") resource.last_seen = utc_now().replace(microsecond=0) resource.expire_time = None return info
def _unpickle(self, pickled_data): return root(parse_xml(BytesIO(pickled_data)))