Exemplo n.º 1
0
    def webfinger(self, resource=None, rel=None):
        if resource is None:
            raise cherrypy.HTTPError(400, "Bad Request - missing resource parameter")

        jrd = dict()
        dt = datetime.now() + duration2timedelta("PT1H")
        jrd['expires'] = dt.isoformat()
        jrd['subject'] = cherrypy.request.base
        links = list()
        jrd['links'] = links

        def _links(a):
            links.append(
                dict(rel='urn:oasis:names:tc:SAML:2.0:metadata', href='%s/%s.xml' % (cherrypy.request.base, a)))
            links.append(dict(rel='disco-json', href='%s/%s.json' % (cherrypy.request.base, a)))

        for a in self.server.md.keys():
            if not '://' in a:
                a = a.lstrip('/')
                _links(a)
            elif 'http://' in a or 'https://' in a:
                links.append(dict(rel='urn:oasis:names:tc:SAML:2.0:metadata',
                                  href=a,
                                  properties=dict()))

        for a in self.server.aliases.keys():
            for v in self.server.md.index.attribute(self.server.aliases[a]):
                _links('%s/%s' % (a, v))

        cherrypy.response.headers['Content-Type'] = 'application/json'
        return dumps(jrd)
Exemplo n.º 2
0
 def test_duration2timedelta(self):
     for expr, secs in TestDuration.DURATIONS:
         td = duration2timedelta(expr)
         print "timedelta: %s" % td
         print "duration: %s" % expr
         print "expected seconds: %s" % secs
         assert(int(td.total_seconds()) == secs)
         assert(int(total_seconds(td)) == secs)
Exemplo n.º 3
0
 def test_duration2timedelta(self):
     for expr, secs in TestDuration.DURATIONS:
         td = duration2timedelta(expr)
         print("timedelta: %s" % td)
         print("duration: %s" % expr)
         print("expected seconds: %s" % secs)
         assert(int(td.total_seconds()) == secs)
         assert(int(total_seconds(td)) == secs)
Exemplo n.º 4
0
    def webfinger(self, resource=None, rel=None):
        """An implementation the webfinger protocol (http://tools.ietf.org/html/draft-ietf-appsawg-webfinger-12)
        in order to provide information about up and downstream metadata available at this pyFF instance.

Example:

.. code-block:: bash

        # curl http://localhost:8080/.well-known/webfinger?resource=http://localhost:8080

This should result in a JSON structure that looks something like this:

.. code-block:: json

        {"expires": "2013-04-13T17:40:42.188549",
         "links": [
            {"href": "http://reep.refeds.org:8080/role/sp.xml", "rel": "urn:oasis:names:tc:SAML:2.0:metadata"},
            {"href": "http://reep.refeds.org:8080/role/sp.json", "rel": "disco-json"}],
         "subject": "http://reep.refeds.org:8080"}

Depending on which version of pyFF your're running and the configuration you may also see downstream metadata
listed using the 'role' attribute to the link elements.
        """
        if resource is None:
            raise cherrypy.HTTPError(400, _("Bad Request - missing resource parameter"))

        jrd = dict()
        dt = datetime.now() + duration2timedelta("PT1H")
        jrd['expires'] = dt.isoformat()
        jrd['subject'] = cherrypy.request.base
        links = list()
        jrd['links'] = links

        def _links(a):
            links.append(
                dict(rel='urn:oasis:names:tc:SAML:2.0:metadata',
                     role="provider",
                     href='%s/%s.xml' % (cherrypy.request.base, a)))
            links.append(dict(rel='disco-json', href='%s/%s.json' % (cherrypy.request.base, a)))

        for a in self.server.md.store.collections():
            if '://' not in a:
                a = a.lstrip('/')
                _links(a)
            elif 'http://' in a or 'https://' in a:
                links.append(dict(rel='urn:oasis:names:tc:SAML:2.0:metadata',
                                  href=a,
                                  role="consumer",
                                  properties=dict()))

        for a in self.server.aliases.keys():
            for v in self.server.md.store.attribute(self.server.aliases[a]):
                _links('%s/%s' % (a, v))

        cherrypy.response.headers['Content-Type'] = 'application/json'
        return dumps(jrd)
Exemplo n.º 5
0
    def webfinger(self, resource=None, rel=None):
        """An implementation the webfinger protocol (http://tools.ietf.org/html/draft-ietf-appsawg-webfinger-12)
        in order to provide information about up and downstream metadata available at this pyFF instance.

Example:

.. code-block:: bash

        # curl http://localhost:8080/.well-known/webfinger?resource=http://localhost:8080

This should result in a JSON structure that looks something like this:

.. code-block:: json

        {"expires": "2013-04-13T17:40:42.188549",
         "links": [
            {"href": "http://reep.refeds.org:8080/role/sp.xml", "rel": "urn:oasis:names:tc:SAML:2.0:metadata"},
            {"href": "http://reep.refeds.org:8080/role/sp.json", "rel": "disco-json"}],
         "subject": "http://reep.refeds.org:8080"}

Depending on which version of pyFF your're running and the configuration you may also see downstream metadata
listed using the 'role' attribute to the link elements.
        """
        if resource is None:
            raise cherrypy.HTTPError(400, _("Bad Request - missing resource parameter"))

        jrd = dict()
        dt = datetime.now() + duration2timedelta("PT1H")
        jrd['expires'] = dt.isoformat()
        jrd['subject'] = cherrypy.request.base
        links = list()
        jrd['links'] = links

        def _links(a):
            links.append(
                dict(rel='urn:oasis:names:tc:SAML:2.0:metadata',
                     role="provider",
                     href='%s/%s.xml' % (cherrypy.request.base, a)))
            links.append(dict(rel='disco-json', href='%s/%s.json' % (cherrypy.request.base, a)))

        for a in self.server.md.store.collections():
            if '://' not in a:
                a = a.lstrip('/')
                _links(a)
            elif 'http://' in a or 'https://' in a:
                links.append(dict(rel='urn:oasis:names:tc:SAML:2.0:metadata',
                                  href=a,
                                  role="consumer",
                                  properties=dict()))

        for a in self.server.aliases.keys():
            for v in self.server.md.store.attribute(self.server.aliases[a]):
                _links('%s/%s' % (a, v))

        cherrypy.response.headers['Content-Type'] = 'application/json'
        return dumps(jrd)
Exemplo n.º 6
0
def expiration(t):
    relt = root(t)
    if relt.tag in ('{%s}EntityDescriptor' % NS['md'],
                    '{%s}EntitiesDescriptor' % NS['md']):
        cache_duration = config.default_cache_duration
        valid_until = relt.get('validUntil', None)
        if valid_until is not None:
            now = utc_now().replace(microsecond=0)
            vu = iso2datetime(valid_until)
            return vu - now
        elif config.respect_cache_duration:
            cache_duration = relt.get('cacheDuration',
                                      config.default_cache_duration)
            return duration2timedelta(cache_duration)

    return None
Exemplo n.º 7
0
Arquivo: store.py Projeto: br00k/pyFF
    def _expiration(self, relt):
        ts = _now()+self.default_ttl

        if self.respect_validity:
            valid_until = relt.get("validUntil", None)
            if valid_until is not None:
                dt = iso8601.parse_date(valid_until)
                if dt is not None:
                    ts = totimestamp(dt)

            cache_duration = relt.get("cacheDuration", None)
            if cache_duration is not None:
                dt = datetime.utcnow() + duration2timedelta(cache_duration)
                if dt is not None:
                    ts = totimestamp(dt)

        return ts
Exemplo n.º 8
0
    def _expiration(self, relt):
        ts = _now() + self.default_ttl

        if self.respect_validity:
            valid_until = relt.get("validUntil", None)
            if valid_until is not None:
                dt = iso8601.parse_date(valid_until)
                if dt is not None:
                    ts = totimestamp(dt)

            cache_duration = relt.get("cacheDuration", None)
            if cache_duration is not None:
                dt = datetime.utcnow() + duration2timedelta(cache_duration)
                if dt is not None:
                    ts = totimestamp(dt)

        return ts
Exemplo n.º 9
0
 def test_bad(self):
     assert (duration2timedelta("abrakadabra") is None)
Exemplo n.º 10
0
def finalize(req, *opts):
    """
Prepares the working document for publication/rendering.

:param req: The request
:param opts: Options (not used)
:return: returns the working document with @Name, @cacheDuration and @validUntil set

Set Name, ID, cacheDuration and validUntil on the toplevel EntitiesDescriptor element of the working document. Unless
explicit provided the @Name is set from the request URI if the pipeline is executed in the pyFF server. The @ID is set
to a string representing the current date/time and will be prefixed with the string provided, which defaults to '_'. The
@cacheDuration element must be a valid xsd duration (eg PT5H for 5 hrs) and @validUntil can be either an absolute
ISO 8601 time string or (more comonly) a relative time on the form

.. code-block:: none

    \+?([0-9]+d)?\s*([0-9]+h)?\s*([0-9]+m)?\s*([0-9]+s)?


For instance +45d 2m results in a time delta of 45 days and 2 minutes. The '+' sign is optional.

If operating on a single EntityDescriptor then @Name is ignored (cf :py:mod:`pyff.pipes.builtins.first`).

**Examples**

.. code-block:: yaml

    - finalize:
        cacheDuration: PT8H
        validUntil: +10d
        ID: pyff
    """
    if req.t is None:
        raise PipeException("Your plumbing is missing a select statement.")

    e = root(req.t)
    if e.tag == "{%s}EntitiesDescriptor" % NS['md']:
        name = req.args.get('name', None)
        if name is None or not len(name):
            name = req.args.get('Name', None)
        if name is None or not len(name):
            name = req.state.get('url', None)
        if name is None or not len(name):
            name = e.get('Name', None)
        if name is not None and len(name):
            e.set('Name', name)

    now = datetime.utcnow()

    mdid = req.args.get('ID', 'prefix _')
    if re.match('(\s)*prefix(\s)*', mdid):
        prefix = re.sub('^(\s)*prefix(\s)*', '', mdid)
        ID = now.strftime(prefix + "%Y%m%dT%H%M%SZ")
    else:
        ID = mdid

    if not e.get('ID'):
        e.set('ID', ID)

    valid_until = str(req.args.get('validUntil', e.get('validUntil', None)))
    if valid_until is not None and len(valid_until) > 0:
        offset = duration2timedelta(valid_until)
        if offset is not None:
            dt = now + offset
            e.set('validUntil', dt.strftime("%Y-%m-%dT%H:%M:%SZ"))
        elif valid_until is not None:
            try:
                dt = iso8601.parse_date(valid_until)
                dt = dt.replace(tzinfo=None) # make dt "naive" (tz-unaware)
                offset = dt - now
                e.set('validUntil', dt.strftime("%Y-%m-%dT%H:%M:%SZ"))
            except ValueError, ex:
                log.error("Unable to parse validUntil: %s (%s)" % (valid_until, ex))

            # set a reasonable default: 50% of the validity
        # we replace this below if we have cacheDuration set
        req.state['cache'] = int(total_seconds(offset) / 50)
Exemplo n.º 11
0
    def parse(self, resource: Resource, content: str) -> EidasMDParserInfo:
        info = EidasMDParserInfo(description='eIDAS MetadataServiceList',
                                 expiration_time='None')
        t = parse_xml(unicode_stream(content))
        if config.xinclude:
            t.xinclude()
        relt = root(t)
        info.version = relt.get('Version', '0')
        info.issue_date = relt.get('IssueDate')
        info.next_update = relt.get('NextUpdate')
        if isinstance(info.next_update, str):
            resource.expire_time = iso2datetime(info.next_update)
        elif config.respect_cache_duration:
            duration = duration2timedelta(config.default_cache_duration)
            if not duration:
                # TODO: what is the right action here?
                raise ValueError(
                    f'Invalid default cache duration: {config.default_cache_duration}'
                )
            info.next_update = utc_now().replace(microsecond=0) + duration
            resource.expire_time = info.next_update

        info.expiration_time = 'None' if not resource.expire_time else resource.expire_time.isoformat(
        )
        info.issuer_name = first_text(relt, "{%s}IssuerName" % NS['ser'])
        info.scheme_identifier = first_text(relt,
                                            "{%s}SchemeIdentifier" % NS['ser'])
        info.scheme_territory = first_text(relt,
                                           "{%s}SchemeTerritory" % NS['ser'])
        for mdl in relt.iter("{%s}MetadataList" % NS['ser']):
            for ml in mdl.iter("{%s}MetadataLocation" % NS['ser']):
                location = ml.get('Location')
                if location:
                    certs = CertDict(ml)
                    fingerprints = list(certs.keys())
                    fp = None
                    if len(fingerprints) > 0:
                        fp = fingerprints[0]

                    ep = ml.find("{%s}Endpoint" % NS['ser'])
                    if ep is not None and fp is not None:
                        args = dict(
                            country_code=mdl.get('Territory'),
                            hide_from_discovery=strtobool(
                                ep.get('HideFromDiscovery', 'false')),
                        )
                        log.debug("MDSL[{}]: {} verified by {} for country {}".
                                  format(info.scheme_territory, location, fp,
                                         args.get('country_code')))
                        child_opts = resource.opts.copy(update={'alias': None})
                        child_opts.verify = fp
                        r = resource.add_child(location, child_opts)

                        # this is specific post-processing for MDSL files
                        def _update_entities(_t, **kwargs):
                            _country_code = kwargs.get('country_code')
                            _hide_from_discovery = kwargs.get(
                                'hide_from_discovery')
                            for e in iter_entities(_t):
                                if _country_code:
                                    set_nodecountry(e, _country_code)
                                if bool(_hide_from_discovery) and is_idp(e):
                                    set_entity_attributes(
                                        e, {
                                            ATTRS['entity-category']:
                                            'http://refeds.org/category/hide-from-discovery'
                                        })
                            return _t

                        r.add_via(Lambda(_update_entities, **args))

        log.debug("Done parsing eIDAS MetadataServiceList")
        resource.last_seen = utc_now().replace(microsecond=0)
        resource.expire_time = None
        return info
Exemplo n.º 12
0
        def consumer(q, njobs, stats, next_jobs=None, resolved=None):
            if next_jobs is None:
                next_jobs = []
            if resolved is None:
                resolved = set()
            nfinished = 0

            while nfinished < njobs:
                info = None
                try:
                    log.debug("waiting for next thread to finish...")
                    thread = q.get(True)
                    thread.join(timeout)

                    if thread.isAlive():
                        raise MetadataException("thread timeout fetching '%s'" % thread.url)

                    info = {
                        'Time Spent': thread.time()
                    }

                    if thread.ex is not None:
                        raise thread.ex
                    else:
                        if thread.result is not None:
                            info['Bytes'] = len(thread.result)
                        else:
                            raise MetadataException("empty response fetching '%s'" % thread.url)
                        info['Cached'] = thread.cached
                        info['Date'] = str(thread.date)
                        info['Last-Modified'] = str(thread.last_modified)
                        info['Tries'] = thread.tries

                    xml = thread.result.strip()

                    if thread.status is not None:
                        info['Status'] = thread.status

                    t = self.parse_metadata(StringIO(xml), key=thread.verify, base_url=thread.url)
                    if t is None:
                        self.fire(type=EVENT_IMPORT_FAIL, url=thread.url)
                        raise MetadataException("no valid metadata found at '%s'" % thread.url)

                    relt = root(t)
                    if relt.tag in ('{%s}XRD' % NS['xrd'], '{%s}XRDS' % NS['xrd']):
                        log.debug("%s looks like an xrd document" % thread.url)
                        for xrd in t.xpath("//xrd:XRD", namespaces=NS):
                            log.debug("xrd: %s" % xrd)
                            for link in xrd.findall(".//{%s}Link[@rel='%s']" % (NS['xrd'], NS['md'])):
                                url = link.get("href")
                                certs = xmlsec.CertDict(link)
                                fingerprints = certs.keys()
                                fp = None
                                if len(fingerprints) > 0:
                                    fp = fingerprints[0]
                                log.debug("fingerprint: %s" % fp)
                                next_jobs.append((url, fp, url, 0))

                    elif relt.tag in ('{%s}EntityDescriptor' % NS['md'], '{%s}EntitiesDescriptor' % NS['md']):
                        cacheDuration = self.default_cache_duration
                        if self.respect_cache_duration:
                            cacheDuration = root(t).get('cacheDuration', self.default_cache_duration)
                        offset = duration2timedelta(cacheDuration)

                        if thread.cached:
                            if thread.last_modified + offset < datetime.now() - duration2timedelta(self.min_cache_ttl):
                                raise MetadataException("cached metadata expired")
                            else:
                                log.debug("found cached metadata for '%s' (last-modified: %s)" % (thread.url, thread.last_modified))
                                ne = self.import_metadata(t, url=thread.id)
                                info['Number of Entities'] = ne
                        else:
                            log.debug("got fresh metadata for '%s' (date: %s)" % (thread.url, thread.date))
                            ne = self.import_metadata(t, url=thread.id)
                            info['Number of Entities'] = ne
                        info['Cache Expiration Time'] = str(thread.last_modified + offset)
                        certs = xmlsec.CertDict(relt)
                        cert = None
                        if certs.values():
                            cert = certs.values()[0].strip()
                        resolved.add((thread.url, cert))
                    else:
                        raise MetadataException("unknown metadata type for '%s' (%s)" % (thread.url, relt.tag))
                except Exception, ex:
                    #traceback.print_exc(ex)
                    log.warn("problem fetching '%s' (will retry): %s" % (thread.url, ex))
                    if info is not None:
                        info['Exception'] = ex
                    if thread.tries < self.retry_limit:
                        next_jobs.append((thread.url, thread.verify, thread.id, thread.tries + 1))
                    else:
                        #traceback.print_exc(ex)
                        log.error("retry limit exceeded for %s (last error was: %s)" % (thread.url, ex))
                finally:
Exemplo n.º 13
0
def finalize(req, *opts):
    """
Prepares the working document for publication/rendering.

:param req: The request
:param opts: Options (not used)
:return: returns the working document with @Name, @cacheDuration and @validUntil set

Set Name, cacheDuration and validUntil on the toplevel EntitiesDescriptor element of the working document. Unless
explicit provided the @Name is set from the request URI if the pipeline is executed in the pyFF server. The
@cacheDuration element must be a valid xsd duration (eg PT5H for 5 hrs) and @validUntil can be either an absolute
ISO 8601 time string or (more comonly) a relative time on the form

.. code-block:: none

    \+?([0-9]+d)?\s*([0-9]+h)?\s*([0-9]+m)?\s*([0-9]+s)?


For instance +45d 2m results in a time delta of 45 days and 2 minutes. The '+' sign is optional.

If operating on a single EntityDescriptor then @Name is ignored (cf :py:mod:`pyff.pipes.builtins.first`).

**Examples**

.. code-block:: yaml

    - finalize:
        cacheDuration: PT8H
        validUntil: +10d
    """
    if req.t is None:
        raise PipeException("Your plumbing is missing a select statement.")

    e = root(req.t)
    if e.tag == "{%s}EntitiesDescriptor" % NS['md']:
        name = req.args.get('name', None)
        if name is None or not len(name):
            name = req.args.get('Name', None)
        if name is None or not len(name):
            name = req.state.get('url', None)
        if name is None or not len(name):
            name = e.get('Name', None)
        if name is not None and len(name):
            e.set('Name', name)

    if not e.get('ID'):
        e.set('ID', datetime.now().strftime("pyff%Y%m%dT%H%M%S%Z"))

    validUntil = req.args.get('validUntil', e.get('validUntil', None))
    if validUntil is not None and len(validUntil) > 0:
        offset = duration2timedelta(validUntil)
        if offset is not None:
            dt = datetime.now() + offset
            e.set('validUntil', dt.isoformat())
        elif validUntil is not None:
            dt = iso8601.parse_date(validUntil)
            offset = dt - datetime.now()
            # set a reasonable default: 50% of the validity
        # we replace this below if we have cacheDuration set
        req.state['cache'] = int(total_seconds(offset) / 50)

    cacheDuration = req.args.get('cacheDuration', e.get('cacheDuration', None))
    if cacheDuration is not None and len(cacheDuration) > 0:
        offset = duration2timedelta(cacheDuration)
        if offset is None:
            raise PipeException("Unable to parse %s as xs:duration" % cacheDuration)

        e.set('cacheDuration', cacheDuration)
        req.state['cache'] = int(total_seconds(offset))

    return req.t
Exemplo n.º 14
0
def finalize(req, *opts):
    """
Prepares the working document for publication/rendering.

:param req: The request
:param opts: Options (not used)
:return: returns the working document with @Name, @cacheDuration and @validUntil set

Set Name, ID, cacheDuration and validUntil on the toplevel EntitiesDescriptor element of the working document. Unless
explicit provided the @Name is set from the request URI if the pipeline is executed in the pyFF server. The @ID is set
to a string representing the current date/time and will be prefixed with the string provided, which defaults to '_'. The
@cacheDuration element must be a valid xsd duration (eg PT5H for 5 hrs) and @validUntil can be either an absolute
ISO 8601 time string or (more comonly) a relative time on the form

.. code-block:: none

    \+?([0-9]+d)?\s*([0-9]+h)?\s*([0-9]+m)?\s*([0-9]+s)?


For instance +45d 2m results in a time delta of 45 days and 2 minutes. The '+' sign is optional.

If operating on a single EntityDescriptor then @Name is ignored (cf :py:mod:`pyff.pipes.builtins.first`).

**Examples**

.. code-block:: yaml

    - finalize:
        cacheDuration: PT8H
        validUntil: +10d
        ID: pyff
    """
    if req.t is None:
        raise PipeException("Your plumbing is missing a select statement.")

    e = root(req.t)
    if e.tag == "{%s}EntitiesDescriptor" % NS['md']:
        name = req.args.get('name', None)
        if name is None or not len(name):
            name = req.args.get('Name', None)
        if name is None or not len(name):
            name = req.state.get('url', None)
        if name is None or not len(name):
            name = e.get('Name', None)
        if name is not None and len(name):
            e.set('Name', name)

    now = datetime.utcnow()

    mdid = req.args.get('ID', 'prefix _')
    if re.match('(\s)*prefix(\s)*', mdid):
        prefix = re.sub('^(\s)*prefix(\s)*', '', mdid)
        ID = now.strftime(prefix + "%Y%m%dT%H%M%SZ")
    else:
        ID = mdid

    if not e.get('ID'):
        e.set('ID', ID)

    valid_until = str(req.args.get('validUntil', e.get('validUntil', None)))
    if valid_until is not None and len(valid_until) > 0:
        offset = duration2timedelta(valid_until)
        if offset is not None:
            dt = now + offset
            e.set('validUntil', dt.strftime("%Y-%m-%dT%H:%M:%SZ"))
        elif valid_until is not None:
            try:
                dt = iso8601.parse_date(valid_until)
                dt = dt.replace(tzinfo=None)  # make dt "naive" (tz-unaware)
                offset = dt - now
                e.set('validUntil', dt.strftime("%Y-%m-%dT%H:%M:%SZ"))
            except ValueError, ex:
                log.error("Unable to parse validUntil: %s (%s)" %
                          (valid_until, ex))

                # set a reasonable default: 50% of the validity
        # we replace this below if we have cacheDuration set
        req.state['cache'] = int(total_seconds(offset) / 50)
Exemplo n.º 15
0
                dt = iso8601.parse_date(valid_until)
                dt = dt.replace(tzinfo=None)  # make dt "naive" (tz-unaware)
                offset = dt - now
                e.set('validUntil', dt.strftime("%Y-%m-%dT%H:%M:%SZ"))
            except ValueError, ex:
                log.error("Unable to parse validUntil: %s (%s)" %
                          (valid_until, ex))

                # set a reasonable default: 50% of the validity
        # we replace this below if we have cacheDuration set
        req.state['cache'] = int(total_seconds(offset) / 50)

    cache_duration = req.args.get('cacheDuration',
                                  e.get('cacheDuration', None))
    if cache_duration is not None and len(cache_duration) > 0:
        offset = duration2timedelta(cache_duration)
        if offset is None:
            raise PipeException("Unable to parse %s as xs:duration" %
                                cache_duration)

        e.set('cacheDuration', cache_duration)
        req.state['cache'] = int(total_seconds(offset))

    return req.t


@pipe(name='reginfo')
def _reginfo(req, *opts):
    """
Sets registration info extension on EntityDescription element
Exemplo n.º 16
0
 def test_bad(self):
     assert (duration2timedelta("abrakadabra") is None)
Exemplo n.º 17
0
Arquivo: mdrepo.py Projeto: GEANT/met
        def consumer(q, njobs, stats, next_jobs=None, resolved=None):
            if next_jobs is None:
                next_jobs = []
            if resolved is None:
                resolved = set()
            nfinished = 0

            while nfinished < njobs:
                info = None
                try:
                    log.debug("waiting for next thread to finish...")
                    thread = q.get(True)
                    thread.join(timeout)

                    if thread.isAlive():
                        raise MetadataException(
                            "thread timeout fetching '%s'" % thread.url)

                    info = {
                        'Time Spent': thread.time()
                    }

                    if thread.ex is not None:
                        raise thread.ex
                    else:
                        if thread.result is not None:
                            info['Bytes'] = len(thread.result)
                        else:
                            raise MetadataException(
                                "empty response fetching '%s'" % thread.url)
                        info['Cached'] = thread.cached
                        info['Date'] = str(thread.date)
                        info['Last-Modified'] = str(thread.last_modified)
                        info['Tries'] = thread.tries

                    xml = thread.result.strip()

                    if thread.status is not None:
                        info['Status'] = thread.resp.status_code

                    t = self.parse_metadata(
                        StringIO(xml), key=thread.verify, base_url=thread.url)
                    if t is None:
                        self.fire(type=EVENT_IMPORT_FAIL, url=thread.url)
                        raise MetadataException(
                            "no valid metadata found at '%s'" % thread.url)

                    relt = root(t)
                    if relt.tag in ('{%s}XRD' % NS['xrd'], '{%s}XRDS' % NS['xrd']):
                        log.debug("%s looks like an xrd document" % thread.url)
                        for xrd in t.xpath("//xrd:XRD", namespaces=NS):
                            log.debug("xrd: %s" % xrd)
                            for link in xrd.findall(".//{%s}Link[@rel='%s']" % (NS['xrd'], NS['md'])):
                                url = link.get("href")
                                certs = xmlsec.CertDict(link)
                                fingerprints = certs.keys()
                                fp = None
                                if len(fingerprints) > 0:
                                    fp = fingerprints[0]
                                log.debug("fingerprint: %s" % fp)
                                next_jobs.append((url, fp, url, 0))

                    elif relt.tag in ('{%s}EntityDescriptor' % NS['md'], '{%s}EntitiesDescriptor' % NS['md']):
                        cacheDuration = self.default_cache_duration
                        if self.respect_cache_duration:
                            cacheDuration = root(t).get(
                                'cacheDuration', self.default_cache_duration)
                        offset = duration2timedelta(cacheDuration)

                        if thread.cached:
                            if thread.last_modified + offset < datetime.now() - duration2timedelta(self.min_cache_ttl):
                                raise MetadataException(
                                    "cached metadata expired")
                            else:
                                log.debug("found cached metadata for '%s' (last-modified: %s)" %
                                          (thread.url, thread.last_modified))
                                ne = self.import_metadata(t, url=thread.id)
                                info['Number of Entities'] = ne
                        else:
                            log.debug("got fresh metadata for '%s' (date: %s)" % (
                                thread.url, thread.date))
                            ne = self.import_metadata(t, url=thread.id)
                            info['Number of Entities'] = ne
                        info['Cache Expiration Time'] = str(
                            thread.last_modified + offset)
                        certs = xmlsec.CertDict(relt)
                        cert = None
                        if certs.values():
                            cert = certs.values()[0].strip()
                        resolved.add((thread.url, cert))
                    else:
                        raise MetadataException(
                            "unknown metadata type for '%s' (%s)" % (thread.url, relt.tag))
                except Exception, ex:
                    # traceback.print_exc(ex)
                    log.warn("problem fetching '%s' (will retry): %s" %
                             (thread.url, ex))
                    if info is not None:
                        info['Exception'] = ex
                    if thread.tries < self.retry_limit:
                        next_jobs.append(
                            (thread.url, thread.verify, thread.id, thread.tries + 1))
                    else:
                        # traceback.print_exc(ex)
                        log.error(
                            "retry limit exceeded for %s (last error was: %s)" % (thread.url, ex))
                finally:
Exemplo n.º 18
0
        elif valid_until is not None:
            try:
                dt = iso8601.parse_date(valid_until)
                dt = dt.replace(tzinfo=None) # make dt "naive" (tz-unaware)
                offset = dt - now
                e.set('validUntil', dt.strftime("%Y-%m-%dT%H:%M:%SZ"))
            except ValueError, ex:
                log.error("Unable to parse validUntil: %s (%s)" % (valid_until, ex))

            # set a reasonable default: 50% of the validity
        # we replace this below if we have cacheDuration set
        req.state['cache'] = int(total_seconds(offset) / 50)

    cache_duration = req.args.get('cacheDuration', e.get('cacheDuration', None))
    if cache_duration is not None and len(cache_duration) > 0:
        offset = duration2timedelta(cache_duration)
        if offset is None:
            raise PipeException("Unable to parse %s as xs:duration" % cache_duration)

        e.set('cacheDuration', cache_duration)
        req.state['cache'] = int(total_seconds(offset))

    return req.t

@pipe(name='reginfo')
def _reginfo(req, *opts):
    """
Sets registration info extension on EntityDescription element

:param req: The request
:param opts: Options (not used)