Beispiel #1
0
 def authorize(self, request):
     ''' FB Authorize process for OAuth 2.0... returns dict with access_token and time to exp '''
     
     # If 'code' variable is returned, then move forward with 
     if 'code' in request and 'error' not in request:
         
         
         # Build Auth Params
         auth_params = {}
         auth_params['client_id'] = self._app_id
         auth_params['redirect_uri'] = self._app_url
         auth_params['client_secret'] = self._app_secret
         auth_params['code'] = request['code']
         
         auth_param_str = urllib.urlencode(auth_params)
         
         auth_request = self._oauth_url + '?' + auth_param_str
         
         # Do the OAuth request
         try:
             response = urllib2.urlopen(auth_request)
         except urllib2.URLError, e:
             raise errors.ConnectorError('facebook',e)
         
                     
         # Validate Response
         if (response.code == 400):
             raise errors.ConnectorError('facebook','400 response code from Oauth Request')
         
         response_data = self._decode_response_data(response)      
         
         # Build dict for response
         auth_return = cgi.parse_qs(response_data)
Beispiel #2
0
    def get_authorization_url(self):
        auth = tweepy.OAuthHandler(self._app_key, self._app_secret,
                                   self._app_call_back_url)

        try:
            redirect_auth_url = auth.get_authorization_url()
        except tweepy.TweepError, e:
            raise errors.ConnectorError('twitter', e)
Beispiel #3
0
    def authorize(self, request, request_secret):

        oauth_token = request.get('oauth_token')
        oauth_verifier = request.get('oauth_verifier')

        # Did these exist in the request?
        if (oauth_token == None or oauth_verifier == None):
            raise errors.ConnectorError('twitter',
                                        'OAuth variables not set in request')

        auth = tweepy.OAuthHandler(self._app_key, self._app_secret,
                                   self._app_call_back_url)

        # Setup to get Access Token
        auth.set_request_token(oauth_token, request_secret)

        try:
            auth.get_access_token(oauth_verifier)
        except tweepy.TweepError, e:
            raise errors.ConnectorError('twitter', e)
Beispiel #4
0
 def _request(self, url, args=None, post_args=None):
     """ Access GraphAPI """
     
     # If post data is present, add it
     post_data = None if post_args is None else urllib.urlencode(post_args)
     
     url = url + "?" + urllib.urlencode(args)
     
     # Url
     file = urllib.urlopen(url, post_data)
     
     try:
         response = _parse_json(file.read())
     finally:
         file.close()
     if response.get("error"):
         #TODO: Error handling
         logging.error(url)
         raise errors.ConnectorError('facebook','error response from API request')
     
     return response  
Beispiel #5
0
                response = urllib2.urlopen(auth_request)
            except urllib2.URLError, e:
                raise errors.ConnectorError('facebook',e)
            
                        
            # Validate Response
            if (response.code == 400):
                raise errors.ConnectorError('facebook','400 response code from Oauth Request')
            
            response_data = self._decode_response_data(response)      
            
            # Build dict for response
            auth_return = cgi.parse_qs(response_data)
            
        else:
            raise errors.ConnectorError('facebook','Oauth Error:' + request['error'])
        
        return auth_return['access_token']
    
    def get_authorization_url(self, permission_url, permissions=None):
        
        params = {}
        
        params['client_id'] = self._app_id
        params['redirect_uri'] = self._app_url
        params['scope'] = permissions
        
        return permission_url + '?' + urllib.urlencode(params)

    def _request(self, url, args=None, post_args=None):
        """ Access GraphAPI """
Beispiel #6
0
    def _scrape_html_friends(self, user_id):
        """ Scrape HTML friends from user_id """

        friend_url = config.YELP_FRIEND_HTML_URL
        friend_page_start = 0
        friend_list = []

        # Try to load all reviews.. break when none are left or you hit the last review date
        while True:

            # Query params
            query_params = {
                'userid': user_id,
                'sort': 'first_name',
                'start': friend_page_start
            }

            query_url = friend_url + '?' + urllib.urlencode(query_params)

            # Do the html initial HTML request
            try:
                response = urllib2.urlopen(query_url)
            except urllib2.URLError, e:
                raise errors.ConnectorError('yelp', e)

            # Now, lets analyze the html
            soup = BeautifulSoup.BeautifulSoup(response.read())

            friend_blocks = soup.findAll('div', {'class': 'friend_box'})

            # Break out of loop if we have no friend blocks
            if (len(friend_blocks) == 0):
                break

            # Go through friend blocks
            for friend_block in friend_blocks:

                # Name
                friend_name = friend_block.findAll(
                    'p', {'class': 'miniRegular'})[0].a.getText()

                # Get user id from link
                friend_user_id_link = friend_block.findAll(
                    'p', {'class': 'miniRegular'})[0].a['href']
                qs = urlparse.urlparse(friend_user_id_link)[4]
                friend_user_id = cgi.parse_qs(qs).get('userid')[0]

                # Location
                friend_location = friend_block.findAll(
                    'p', {'class': 'user_location smaller'})[0].getText()

                # Build Friend Class and add to list
                yelp_friend = YelpFriend()
                yelp_friend.display_name = friend_name
                yelp_friend.user_id = friend_user_id
                yelp_friend.location = friend_location

                friend_list.append(yelp_friend)

                # Increment Review counter for paging
                friend_page_start = friend_page_start + 1
                logging.debug('Yelp Friend Count: %d' % friend_page_start)
Beispiel #7
0
    def _scrape_html_reviews(self, user_id, last_review_date=None):
        """ Scrape HTML reviews from user_id and last review date """

        review_url = config.YELP_REVIEW_HTML_URL
        review_page_start = 0
        review_list = []
        break_loop_early = False

        # Try to load all reviews.. break when none are left or you hit the last review date
        while True:

            # Should we break early?
            if (break_loop_early):
                break

            # Query params
            query_params = {
                'userid': user_id,
                'review_sort': 'time',
                'rec_pagestart': review_page_start
            }

            query_url = review_url + '?' + urllib.urlencode(query_params)

            # Do the html initial HTML request
            try:
                response = urllib2.urlopen(query_url)
            except urllib2.URLError, e:
                raise errors.ConnectorError('yelp', e)

            # Now, lets analyze the html
            soup = BeautifulSoup.BeautifulSoup(response.read())

            review_blocks = soup.findAll('div', {'class': 'review clearfix'})

            # Break out of loop if we have no review blocks
            if (len(review_blocks) == 0):
                break

            # Go through review blocks
            for review_block in review_blocks:
                biz_block = review_block.findAll('div',
                                                 {'class': 'biz_info'})[0]
                business_name = biz_block.h4.a.getText()
                business_id = biz_block.h4.a['href']
                business_id = business_id[:business_id.find('#hrid:')]
                business_address = biz_block.address.getText()

                # Biz categories
                categories = []
                for a in biz_block.p.findAll('a'):
                    categories.append(a.getText())

                # Rating
                rating_text = review_block.findAll(
                    'div', {'class': 'rating'})[0].img['title']
                rating = rating_text[0:1]

                # Review Date
                review_date_str = review_block.findAll(
                    'div',
                    {'class': 'rating_info'})[0].find('em', {
                        'class': 'smaller'
                    }).getText()
                review_date = datetime.datetime.strptime(
                    review_date_str, '%m/%d/%Y')

                # Lets go ahead and check if review date is less that last_review_date, if so...
                #   bust ooot cause we prolly already have all this s***e
                if (last_review_date != None
                        and review_date < last_review_date):
                    break_loop_early = True
                    break

                # Review ID
                review_id = review_block.findAll(
                    'div', {'class': 'rateReview'})[0]['id']
                review_id = review_id[4:]

                # Check-in
                # TODO: Checkins from Yelp

                # Review
                review = review_block.findAll(
                    'div', {'class': 'review_comment'})[0].getText()

                # Build ReviewClass and add to list
                # Lets do a dict... go from there
                yelp_review = YelpReview()
                yelp_review.business_name = business_name
                yelp_review.business_id = business_id
                yelp_review.business_address = business_address
                yelp_review.category_list = yelp_review.category_list + categories
                yelp_review.rating = int(rating)
                yelp_review.review_date = review_date
                yelp_review.review_id = review_id
                yelp_review.review = review

                review_list.append(yelp_review)

                # Increment Review counter for paging
                review_page_start = review_page_start + 1
                logging.debug('Yelp Review Count: %d' % review_page_start)