Пример #1
0
 def fetch_and_process(url):
     logging.debug("Following url %s" % url)
     response = raw_github_request(url, oauth_token=oauth_token)
     prs = json.loads(response.content)
     now = datetime.utcnow()
     should_continue_loading = True
     update_time = last_update_time
     for pr in prs:
         updated_at = \
             parse_datetime(pr['updated_at']).astimezone(tz.tzutc()).replace(tzinfo=None)
         update_time = max(update_time, updated_at)
         if updated_at < last_update_time:
             should_continue_loading = False
             break
         is_fresh = (now - updated_at
                     ).total_seconds() < app.config['FRESHNESS_THRESHOLD']
         queue_name = ("fresh-prs" if is_fresh else "old-prs")
         taskqueue.add(url=url_for(".update_pr", pr_number=pr['number']),
                       queue_name=queue_name)
     if should_continue_loading:
         link_header = parse_link_header(response.headers.get('Link', ''))
         for link in link_header.links:
             if link.rel == 'next':
                 fetch_and_process(link.href)
     return update_time
Пример #2
0
 def fetch_and_process(url):
     logging.debug("Following url %s" % url)
     response = raw_request(url, oauth_token=app.config['GITHUB_OAUTH_KEY'])
     link_header = parse_link_header(response.headers.get('Link', ''))
     prs = json.loads(response.content)
     for pr in prs:
         taskqueue.add(url="/tasks/update-issue/%i" % pr['number'])
     for link in link_header.links:
         if link.rel == 'next':
             fetch_and_process(link.href)
Пример #3
0
 def fetch_and_process(url):
     logging.debug("Following url %s" % url)
     response = raw_github_request(url, oauth_token=app.config["GITHUB_OAUTH_KEY"])
     link_header = parse_link_header(response.headers.get("Link", ""))
     prs = json.loads(response.content)
     now = datetime.utcnow()
     for pr in prs:
         updated_at = parse_datetime(pr["updated_at"]).astimezone(tz.tzutc()).replace(tzinfo=None)
         is_fresh = (now - updated_at).total_seconds() < app.config["FRESHNESS_THRESHOLD"]
         queue_name = "fresh-prs" if is_fresh else "old-prs"
         taskqueue.add(url="/tasks/update-github-pr/%i" % pr["number"], queue_name=queue_name)
     for link in link_header.links:
         if link.rel == "next":
             fetch_and_process(link.href)
Пример #4
0
 def fetch_and_process(url):
     logging.debug("Following url %s" % url)
     response = raw_github_request(url, oauth_token=app.config['GITHUB_OAUTH_KEY'])
     link_header = parse_link_header(response.headers.get('Link', ''))
     prs = json.loads(response.content)
     now = datetime.utcnow()
     for pr in prs:
         updated_at = \
             parse_datetime(pr['updated_at']).astimezone(tz.tzutc()).replace(tzinfo=None)
         is_fresh = (now - updated_at).total_seconds() < app.config['FRESHNESS_THRESHOLD']
         queue_name = ("fresh-prs" if is_fresh else "old-prs")
         taskqueue.add(url=url_for(".update_pr", pr_number=pr['number']), queue_name=queue_name)
     for link in link_header.links:
         if link.rel == 'next':
             fetch_and_process(link.href)
Пример #5
0
 def fetch_and_process(url):
     logging.debug("Following url %s" % url)
     response = raw_github_request(url, oauth_token=oauth_token)
     prs = json.loads(response.content)
     now = datetime.utcnow()
     should_continue_loading = True
     update_time = last_update_time
     for pr in prs:
         updated_at = \
             parse_datetime(pr['updated_at']).astimezone(tz.tzutc()).replace(tzinfo=None)
         update_time = max(update_time, updated_at)
         if updated_at < last_update_time:
             should_continue_loading = False
             break
         is_fresh = (now - updated_at).total_seconds() < app.config['FRESHNESS_THRESHOLD']
         queue_name = ("fresh-prs" if is_fresh else "old-prs")
         taskqueue.add(url=url_for(".update_pr", pr_number=pr['number']), queue_name=queue_name)
     if should_continue_loading:
         link_header = parse_link_header(response.headers.get('Link', ''))
         for link in link_header.links:
             if link.rel == 'next':
                 fetch_and_process(link.href)
     return update_time
Пример #6
0
 def get_next_url(resp):
     link_header = parse_link_header(resp.headers.get('Link', ''))
     for link in link_header.links:
         if link.rel == 'next':
             return link.href
Пример #7
0
def notification(url):
    """Receives a notification from the hub."""

    topic = None
    hub = None

    feed = Feed.query.filter_by(unique_url=url).first()
    if not feed:
        app.logger.warning(u'No feed found for url {0}'.format(url))
        abort(404)

    if feed.status != STATUS.SUBSCRIBED:
        app.logger.warning(u'Received notification for unsubscribed feed '
                           '{0} from {1}'.format(feed.topic,
                                                 request.remote_addr))
        abort(404)

    try:
        lh = request.headers.get('link')

        if lh:
            p = parse_link_header(lh)

            for link in p.links:
                if link.rel == 'hub':
                    hub = link.href
                elif link.rel == 'self':
                    topic = link.href

    except Exception as e:
        app.logger.error(u'Could not parse link header for {0}: {1}'
                         .format(url, e))

    if topic is None:
        topic = feed.topic

    if hub is None:
        hub = feed.hub

    app.logger.info(u'Received notification for {0} from {1}'
                    .format(topic, hub))

    if feed.secret:
        sig = request.headers.get('X-Hub-Signature')
        if not sig:
            app.logger.warning(u'Notification for {0} from {1} did not'
                               'contain secret signature'.format(topic, hub))
            abort(400)

        try:
            h = hmac.new(bytes(feed.secret, 'UTF-8'), digestmod=hashlib.sha1)
            h.update(request.get_data())
            digest = h.hexdigest()
        except Exception:
            app.logger.exception(u'Could not compute hmac signature for {0}'
                                 .format(topic))
            return Response(status=200)

        if sig != "sha1=" + digest:
            app.logger.warning(u'Signature for {0} from {1} did not match'
                               .format(topic, hub))
            return Response(status=200)

    feed.last_update_received = datetime.utcnow()

    if feed.fetch_feed_on_notify:
        app.logger.info(u'Fetching RSS for Feed {0}, discarding notification'
                        .format(feed))
        success = Subscriber().get_rss(feed)
        if not success:
            db.session.commit()
        return Response(status=200)

    data = request.get_data()
    if data is None:
        app.logger.warning(u'No data in notification for {0} from {1}'
                           .format(topic, hub))
        return Response(status=200)

    encoding = request.content_encoding or 'UTF-8'

    notification_received.send(bp,
                               feed=feed,
                               content_type=request.mimetype,
                               content=data,
                               encoding=encoding)

    return Response(status=200)
Пример #8
0
 def get_next_url(resp):
     link_header = parse_link_header(resp.headers.get('Link', ''))
     for link in link_header.links:
         if link.rel == 'next':
             return link.href