Exemple #1
0
 def _OnResponse(response):
     response_dict = www_util.ParseJSONResponse(response)
     if response_dict.get('status') == 'alert':
         logger.error(_FormatServiceHealthReport(response_dict))
     else:
         logger.info('CheckServiceHealth passed.')
     callback()
Exemple #2
0
 def _OnResponse(response):
     response_dict = www_util.ParseJSONResponse(response)
     viewpoints = response_dict.get('viewpoints')
     if len(viewpoints) < 1:
         logger.error(
             'query_followed returned 0 viewpoints, should always return at least one.'
         )
     else:
         logger.info('QueryFollowed scenario passed.')
     callback()
Exemple #3
0
    def _GetUserInfo(self, device_dict, refresh_token, response):
        """Parses the google access token from the JSON response body. Gets user data via OAUTH2
    with access token.
    """
        tokens = www_util.ParseJSONResponse(response)
        assert tokens, 'unable to fetch access token'
        access_token = tokens['access_token']
        expires = tokens['expires_in']
        if tokens.has_key('refresh_token') and not refresh_token:
            refresh_token = tokens['refresh_token']

        # Using the access token that was previously retrieved, request information about the
        # user that is logging in.
        assert access_token, 'no access token was provided'
        url = AuthGoogleHandler._OAUTH2_USERINFO_URL + '?' + urllib.urlencode(
            {'access_token': access_token})
        http_client = httpclient.AsyncHTTPClient()
        response = yield gen.Task(http_client.fetch, url)

        # Parse the user information from the JSON response body and invoke _OnAuthenticate to
        # register the user as a viewfinder account. Create user dict from Google's JSON response.
        user_dict = www_util.ParseJSONResponse(response)
        assert user_dict, 'unable to fetch user data'
        assert 'phone' not in user_dict, user_dict
        assert 'email' in user_dict, user_dict
        user_dict['email'] = Identity.CanonicalizeEmail(user_dict['email'])

        # Ensure that user email is verified, else we can't trust that the user really owns it.
        if not user_dict.get('verified_email', False):
            raise web.HTTPError(
                403, _CANNOT_USE_UNVERIFIED_EMAIL % user_dict['email'])

        # Create identity dict from Google's email field.
        ident_dict = {
            'key': 'Email:%s' % user_dict['email'],
            'authority': 'Google',
            'refresh_token': refresh_token,
            'access_token': access_token,
            'expires': util.GetCurrentTimestamp() + expires
        }

        self._AuthUser(user_dict, ident_dict, device_dict)
Exemple #4
0
 def _OnPollTokenEndpoint(response):
   json_response = www_util.ParseJSONResponse(response)
   if 'error' in json_response:
     callback(False)
   else:
     refresh_token = json_response.get('refresh_token')
     url = 'https://%s:%d/auth/google?refresh_token=%s' % \
           (ServerEnvironment.GetHost(), options.options.port, refresh_token)
     http_client.fetch(url, method='POST',
                       callback=_OnLogin,
                       body=json.dumps({}),
                       validate_cert=False, follow_redirects=False,
                       headers={'Content-Type': 'application/json'})
Exemple #5
0
        def _OnRefresh(response):
            try:
                response_dict = www_util.ParseJSONResponse(response)
            except web.HTTPError as e:
                if e.status_code == 400:
                    logging.error(
                        '%s: failed to refresh access token; clearing refresh token'
                        % e)
                    with util.ExceptionBarrier(util.LogExceptionCallback):
                        self.refresh_token = None
                        self.Update(client, util.NoCallback)
                raise

            self.access_token = response_dict['access_token']
            self.expires = time.time() + response_dict['expires_in']
            callback()
Exemple #6
0
        def _DetermineFacebookRankings():
            """Uses The tags from friends and the authors of the
      photos are used to determine friend rank for facebook contacts. The
      basic algorithm is:

      sorted([sum(exp_decay(pc.time) * strength(pc)) for pc in photos])

      A 'pc' in is a photo connection. There are three types, ordered by
      the 'strength' they impart in the summation equation:
        - from: the poster of a photo (strength=1.0)
        - tag: another user tagged in the photo (strength=1.0)
        - like: a facebook user who 'liked' the photo (strength=0.25)
      Exponential decay uses _FACEBOOK_CONNECTION_HALF_LIFE for half life.

      The rankings are passed to the provided callback as a dictionary of
      identity ('FacebookGraph:<id>') => rank.
      """
            logging.info(
                'determining facebook contact rankings for identity %r...' %
                self._identity)
            http_client = httpclient.AsyncHTTPClient()
            friends = dict()  # facebook id => connection strength
            likes = dict()
            now = util.GetCurrentTimestamp()

            def _ComputeScore(create_iso8601, conn_type):
                """Computes the strength of a photo connection based on the time
        that's passed and the connection type.
        """
                decay = 0.001  # default is 1/1000th
                if create_iso8601:
                    dt = iso8601.parse_date(create_iso8601)
                    create_time = calendar.timegm(dt.utctimetuple())
                    decay = math.exp(
                        -math.log(2) * (now - create_time) /
                        FetchContactsOperation._FACEBOOK_CONNECTION_HALF_LIFE)
                return decay * FetchContactsOperation._PHOTO_CONNECTION_STRENGTHS[
                    conn_type]

            # Construct the URL that will kick things off.
            url = FetchContactsOperation._FACEBOOK_PHOTOS_URL + '?' + \
                urllib.urlencode({'access_token': self._identity.access_token,
                                  'format': 'json', 'limit': FetchContactsOperation._MAX_FETCH_COUNT})
            while True:
                logging.info(
                    'querying next %d Facebook photos for user %d' %
                    (FetchContactsOperation._MAX_FETCH_COUNT, self._user_id))
                response = yield gen.Task(http_client.fetch, url, method='GET')
                response_dict = www_util.ParseJSONResponse(response)
                for p_dict in response_dict['data']:
                    created_time = p_dict.get('created_time', None)
                    if p_dict.get('from', None) and p_dict['from']['id']:
                        from_id = p_dict['from']['id']
                        friends[from_id] = friends.get(from_id, 0.0) + \
                            _ComputeScore(created_time, 'from')

                    if p_dict.get('tags', None):
                        for tag in p_dict['tags']['data']:
                            if tag.get('id', None) is not None:
                                friends[tag['id']] = friends.get(tag['id'], 0.0) + \
                                    _ComputeScore(tag.get('created_time', None), 'tag')

                    if p_dict.get('likes', None):
                        for like in p_dict['likes']['data']:
                            if like.get('id', None) is not None:
                                likes[like['id']] = likes.get(like['id'], 0.0) + \
                                    _ComputeScore(created_time, 'like')

                if (len(response_dict['data'])
                        == FetchContactsOperation._MAX_FETCH_COUNT
                        and response_dict.has_key('paging')
                        and response_dict['paging'].has_key('next')):
                    url = response_dict['paging']['next']
                else:
                    for fb_id in friends.keys():
                        friends[fb_id] += likes.get(fb_id, 0.0)
                    ranked_friends = sorted(friends.items(),
                                            key=itemgetter(1),
                                            reverse=True)
                    logging.info(
                        'successfully ranked %d Facebook contacts for user %d'
                        % (len(ranked_friends), self._user_id))
                    raise gen.Return(dict([('FacebookGraph:%s' % fb_id, rank) for rank, (fb_id, _) in \
                                          izip(xrange(len(ranked_friends)), ranked_friends)]))
Exemple #7
0
    def _FetchFacebookContacts(self):
        """Do Facebook specific data gathering and checking.
    Queries Facebook graph API for friend list using the identity's access token.
    """
        @gen.coroutine
        def _DetermineFacebookRankings():
            """Uses The tags from friends and the authors of the
      photos are used to determine friend rank for facebook contacts. The
      basic algorithm is:

      sorted([sum(exp_decay(pc.time) * strength(pc)) for pc in photos])

      A 'pc' in is a photo connection. There are three types, ordered by
      the 'strength' they impart in the summation equation:
        - from: the poster of a photo (strength=1.0)
        - tag: another user tagged in the photo (strength=1.0)
        - like: a facebook user who 'liked' the photo (strength=0.25)
      Exponential decay uses _FACEBOOK_CONNECTION_HALF_LIFE for half life.

      The rankings are passed to the provided callback as a dictionary of
      identity ('FacebookGraph:<id>') => rank.
      """
            logging.info(
                'determining facebook contact rankings for identity %r...' %
                self._identity)
            http_client = httpclient.AsyncHTTPClient()
            friends = dict()  # facebook id => connection strength
            likes = dict()
            now = util.GetCurrentTimestamp()

            def _ComputeScore(create_iso8601, conn_type):
                """Computes the strength of a photo connection based on the time
        that's passed and the connection type.
        """
                decay = 0.001  # default is 1/1000th
                if create_iso8601:
                    dt = iso8601.parse_date(create_iso8601)
                    create_time = calendar.timegm(dt.utctimetuple())
                    decay = math.exp(
                        -math.log(2) * (now - create_time) /
                        FetchContactsOperation._FACEBOOK_CONNECTION_HALF_LIFE)
                return decay * FetchContactsOperation._PHOTO_CONNECTION_STRENGTHS[
                    conn_type]

            # Construct the URL that will kick things off.
            url = FetchContactsOperation._FACEBOOK_PHOTOS_URL + '?' + \
                urllib.urlencode({'access_token': self._identity.access_token,
                                  'format': 'json', 'limit': FetchContactsOperation._MAX_FETCH_COUNT})
            while True:
                logging.info(
                    'querying next %d Facebook photos for user %d' %
                    (FetchContactsOperation._MAX_FETCH_COUNT, self._user_id))
                response = yield gen.Task(http_client.fetch, url, method='GET')
                response_dict = www_util.ParseJSONResponse(response)
                for p_dict in response_dict['data']:
                    created_time = p_dict.get('created_time', None)
                    if p_dict.get('from', None) and p_dict['from']['id']:
                        from_id = p_dict['from']['id']
                        friends[from_id] = friends.get(from_id, 0.0) + \
                            _ComputeScore(created_time, 'from')

                    if p_dict.get('tags', None):
                        for tag in p_dict['tags']['data']:
                            if tag.get('id', None) is not None:
                                friends[tag['id']] = friends.get(tag['id'], 0.0) + \
                                    _ComputeScore(tag.get('created_time', None), 'tag')

                    if p_dict.get('likes', None):
                        for like in p_dict['likes']['data']:
                            if like.get('id', None) is not None:
                                likes[like['id']] = likes.get(like['id'], 0.0) + \
                                    _ComputeScore(created_time, 'like')

                if (len(response_dict['data'])
                        == FetchContactsOperation._MAX_FETCH_COUNT
                        and response_dict.has_key('paging')
                        and response_dict['paging'].has_key('next')):
                    url = response_dict['paging']['next']
                else:
                    for fb_id in friends.keys():
                        friends[fb_id] += likes.get(fb_id, 0.0)
                    ranked_friends = sorted(friends.items(),
                                            key=itemgetter(1),
                                            reverse=True)
                    logging.info(
                        'successfully ranked %d Facebook contacts for user %d'
                        % (len(ranked_friends), self._user_id))
                    raise gen.Return(dict([('FacebookGraph:%s' % fb_id, rank) for rank, (fb_id, _) in \
                                          izip(xrange(len(ranked_friends)), ranked_friends)]))

        logging.info('fetching Facebook contacts for identity %r...' %
                     self._identity)
        http_client = httpclient.AsyncHTTPClient()
        # Track fetched contacts regardless of rank in order to dedup contacts retrieved from Facebook.
        rankless_ids = set()

        # First get the rankings and then fetch the contacts.
        rankings = yield _DetermineFacebookRankings()
        url = FetchContactsOperation._FACEBOOK_FRIENDS_URL + '?' + \
            urllib.urlencode({'fields': 'first_name,name,last_name',
                              'access_token': self._identity.access_token,
                              'format': 'json', 'limit': FetchContactsOperation._MAX_FETCH_COUNT})
        retries = 0
        while True:
            if retries >= FetchContactsOperation._MAX_FETCH_RETRIES:
                raise TooManyRetriesError(
                    'failed to fetch contacts %d times; aborting' % retries)
            logging.info(
                'fetching next %d Facebook contacts for user %d' %
                (FetchContactsOperation._MAX_FETCH_COUNT, self._user_id))
            response = yield gen.Task(http_client.fetch, url, method='GET')
            try:
                response_dict = www_util.ParseJSONResponse(response)
            except Exception as exc:
                logging.warning('failed to fetch Facebook contacts: %s' % exc)
                retries += 1
                continue

            for c_dict in response_dict['data']:
                if c_dict.has_key('id'):
                    ident = 'FacebookGraph:%s' % c_dict['id']

                    # Skip contact if name is not present, or is empty.
                    name = c_dict.get('name', None)
                    if name:
                        names = {
                            'name': name,
                            'given_name': c_dict.get('first_name', None),
                            'family_name': c_dict.get('last_name', None)
                        }

                        # Check to see if we've already processed an identical contact.
                        rankless_id = Contact.CalculateContactEncodedDigest(
                            identities_properties=[(ident, None)], **names)
                        if rankless_id in rankless_ids:
                            # Duplicate among fetched contacts. Skip it.
                            continue
                        else:
                            rankless_ids.add(rankless_id)

                        rank = rankings[ident] if ident in rankings else None
                        fetched_contact = Contact.CreateFromKeywords(
                            self._user_id, [(ident, None)],
                            self._notify_timestamp,
                            Contact.FACEBOOK,
                            rank=rank,
                            **names)
                        self._fetched_contacts[
                            fetched_contact.contact_id] = fetched_contact

            # Prepare to fetch next batch.
            if (len(response_dict['data'])
                    == FetchContactsOperation._MAX_FETCH_COUNT
                    and response_dict.has_key('paging')
                    and response_dict['paging'].has_key('next')):
                retries = 0
                url = response_dict['paging']['next']
            else:
                break
Exemple #8
0
    def _FetchGoogleContacts(self):
        """Do GMail specific data gathering and checking.
    Queries Google data API for contacts in JSON format.
    """
        # Track fetched contacts regardless of rank in order to dedup contacts retrieved from Google.
        assert self._identity.refresh_token is not None, self._identity

        if self._identity.expires and self._identity.expires < time.time():
            yield gen.Task(self._identity.RefreshGoogleAccessToken,
                           self._client)

        logging.info('fetching Google contacts for identity %r...' %
                     self._identity)
        http_client = httpclient.AsyncHTTPClient()
        # Google data API uses 1-based start index.
        start_index = 1
        retries = 0
        count = FetchContactsOperation._MAX_FETCH_COUNT
        while True:
            if retries >= FetchContactsOperation._MAX_FETCH_RETRIES:
                raise TooManyRetriesError(
                    'failed to fetch contacts %d times; aborting' % retries)
            logging.info('fetching next %d Google contacts for user %d' %
                         (count, self._user_id))
            url = FetchContactsOperation._GOOGLE_CONTACTS_URL + '?' + \
                urllib.urlencode({'max-results': count,
                                  'start-index': start_index,
                                  'alt': 'json'})
            response = yield gen.Task(http_client.fetch,
                                      url,
                                      method='GET',
                                      headers={
                                          'Authorization':
                                          'OAuth %s' %
                                          self._identity.access_token,
                                          'GData-Version':
                                          3.0
                                      })
            try:
                response_dict = www_util.ParseJSONResponse(response)['feed']
            except Exception as exc:
                logging.warning('failed to fetch Google contacts: %s' % exc)
                retries += 1
                continue

            # Temporarily log additional information to figure out why some responses don't seem to have "entry" fields.
            if 'entry' not in response_dict:
                logging.warning('Missing entry: %s' %
                                json.dumps(response_dict, indent=True))

            for c_dict in response_dict.get('entry', []):
                # Build identities_properties list from all emails/phone numbers associated with this contact.
                identities_properties = []
                # Process emails first so that if there are any emails, one of them will be first in the
                #   identities_properties list.  This will be *the* identity used for down-level client message
                #   migration.
                for email_info in c_dict.get('gd$email', []):
                    email = email_info.get('address', None)
                    if email is not None:
                        email_type = FetchContactsOperation._GOOGLE_TYPE_LOOKUP.get(
                            email_info.get('rel', None), None)
                        identity_properties = (
                            'Email:' + Identity.CanonicalizeEmail(email),
                            email_info.get('label', email_type))
                        if email_info.get('primary', False):
                            # Insert the primary email address at the head of the list.  Older clients will get this
                            #   as the only email address for this contact when they query_contacts.
                            identities_properties.insert(
                                0, identity_properties)
                        else:
                            identities_properties.append(identity_properties)
                for phone_info in c_dict.get('gd$phoneNumber', []):
                    # See RFC3966: "The tel URI for Telephone Numbers" for more information about this format.
                    #   It should be 'tel:' + E.164 format phone number.
                    phone = phone_info.get('uri', None)
                    if phone is not None and phone.startswith(
                            'tel:+') and Identity.CanCanonicalizePhone(
                                phone[4:]):
                        phone_type = FetchContactsOperation._GOOGLE_TYPE_LOOKUP.get(
                            phone_info.get('rel', None), None)
                        identities_properties.append(
                            ('Phone:' + Identity.CanonicalizePhone(phone[4:]),
                             phone_info.get('label', phone_type)))

                if len(identities_properties) == 0:
                    continue

                # Normalize name to None if empty.
                gd_name = c_dict.get('gd$name', None)
                if gd_name is not None:
                    names = {
                        'name':
                        gd_name.get('gd$fullName', {}).get('$t', None),
                        'given_name':
                        gd_name.get('gd$givenName', {}).get('$t', None),
                        'family_name':
                        gd_name.get('gd$familyName', {}).get('$t', None)
                    }
                else:
                    names = {
                        'name': None,
                        'given_name': None,
                        'family_name': None
                    }

                fetched_contact = Contact.CreateFromKeywords(
                    self._user_id,
                    identities_properties,
                    self._notify_timestamp,
                    Contact.GMAIL,
                    rank=None,
                    **names)
                self._fetched_contacts[
                    fetched_contact.contact_id] = fetched_contact

            # Prepare to fetch next batch.
            # Indexes are 1-based, so add 1 to max_index.
            if 'openSearch$totalResults' in response_dict:
                max_index = int(
                    response_dict['openSearch$totalResults']['$t']) + 1
            else:
                max_index = FetchContactsOperation._MAX_GOOGLE_CONTACTS + 1
            next_index = int(
                response_dict['openSearch$startIndex']['$t']) + len(
                    response_dict.get('entry', []))
            count = min(max_index - next_index,
                        FetchContactsOperation._MAX_FETCH_COUNT)
            if len(
                    self._fetched_contacts
            ) < FetchContactsOperation._MAX_GOOGLE_CONTACTS and count > 0:
                start_index = next_index
                retries = 0
                continue
            else:
                raise gen.Return()
Exemple #9
0
 def _OnGetDeviceCode(response):
   response_dict = www_util.ParseJSONResponse(response)
   self._device_code = response_dict.get('device_code')
   callback(response_dict.get('user_code'), response_dict.get('verification_url'))