Beispiel #1
0
    def test_graph_query(self):
        '''
        Tests simple graph query (those that return arrays of data)
        '''
        # Dependant on FB data. These are examples given on the Graph API
        # documentation page. If this test fails, check this site.
        # https://developers.facebook.com/docs/reference/api/

        # test a Graph API search for posts
        results = facebook_client.graph_api_collection_request('search',
                                                               type='post',
                                                               q='watermelon',
                                                               max_pages=1)
        # ensure that someone, anyone, is talking about watermelon publicly
        self.assertGreater(len(results), 0)
        # just check that first result is a post because it has a 'from' key
        self.assertIn('from', results[0].keys())

        # test a connection query
        results = facebook_client.graph_api_collection_request(
            'cocacola/events', limit=2, max_pages=3)
        # ensure paging and limit worked (contingent of course on Coke having 6 events)
        self.assertEquals(len(results), 6)
        # just check that first result is an event because it has a 'start_time' field
        self.assertIn('start_time', results[0].keys())
Beispiel #2
0
def gather_fb_place_pages(center,radius,query=None,limit=4000,batch_requests=True):
    '''
    Returns a list of Facebook place page info stubs represneting all places 
    found in the given area. Object fields can be found at 
    https://developers.facebook.com/docs/reference/api/page/

    center should be a tuple of (latitude,logitude) values, and radius is 
    in meters (i think?)

    If query is omitted, a "blank query" will be run by running the same 
    center and radius query 26 separate times, once per letter of the 
    alphabet as the actual query argument. 

    If batch_request is True (default), these requests will be batched, 
    otherwise they'll be run once at a time. Commands with a large number
    of results may fail if batched.

    No error handling right now -- if any of the search requests fail the whole 
    thing is coming down.
    '''
    search_opts = dict(type='place',
                        center='%f,%f' % center,
                        distance=radius,
                        limit=limit)
    
    # no query given, run one for each letter of the alphabet
    if query is None:
        batch_commands, pages_unfilitered = [], []
        letters = [chr(o) for o in range(ord('a'),ord('z')+1)]

        if batch_requests:
            for letter in letters:
                opts = copy.copy(search_opts)
                opts['q']=letter
                batch_commands.append(facebook.BatchCommand('search',options=opts))
            for response in facebook_client.run_batch_request(batch_commands):
                pages_unfilitered.extend(response['data'])
        else:
            for letter in letters:
                pages_unfilitered.extend(facebook_client.graph_api_collection_request('search',q=letter,**search_opts))
                  
        # need to go through the 26 separate page sets to filter out dups
        ids_seen = set()    # cache the ids in the list for a quick duplicate check
        pages = []
        for page in pages_unfilitered:
            if page['id'] not in ids_seen:
                ids_seen.add(page['id'])
                pages.append(page)
        return pages
    else:
        return facebook_client.graph_api_collection_request('search',q=query,**search_opts)
Beispiel #3
0
    def test_graph_query(self):
        '''
        Tests simple graph query (those that return arrays of data)
        '''
        # Dependant on FB data. These are examples given on the Graph API
        # documentation page. If this test fails, check this site.
        # https://developers.facebook.com/docs/reference/api/

        # test a Graph API search for posts
        results = facebook_client.graph_api_collection_request('search',type='post',q='watermelon',max_pages=1)
        # ensure that someone, anyone, is talking about watermelon publicly
        self.assertGreater(len(results),0)
        # just check that first result is a post because it has a 'from' key
        self.assertIn('from',results[0].keys())
        
        # test a connection query
        results = facebook_client.graph_api_collection_request('cocacola/events',limit=2,max_pages=3)
        # ensure paging and limit worked (contingent of course on Coke having 6 events)
        self.assertEquals(len(results),6)
        # just check that first result is an event because it has a 'start_time' field
        self.assertIn('start_time',results[0].keys())
Beispiel #4
0
def gather_fb_place_pages(center,
                          radius,
                          query=None,
                          limit=4000,
                          batch_requests=True):
    '''
    Returns a list of Facebook place page info stubs represneting all places 
    found in the given area. Object fields can be found at 
    https://developers.facebook.com/docs/reference/api/page/

    center should be a tuple of (latitude,logitude) values, and radius is 
    in meters (i think?)

    If query is omitted, a "blank query" will be run by running the same 
    center and radius query 26 separate times, once per letter of the 
    alphabet as the actual query argument. 

    If batch_request is True (default), these requests will be batched, 
    otherwise they'll be run once at a time. Commands with a large number
    of results may fail if batched.

    No error handling right now -- if any of the search requests fail the whole 
    thing is coming down.
    '''
    search_opts = dict(type='place',
                       center='%f,%f' % center,
                       distance=radius,
                       limit=limit)

    # no query given, run one for each letter of the alphabet
    if query is None:
        batch_commands, pages_unfilitered = [], []
        letters = [chr(o) for o in range(ord('a'), ord('z') + 1)]

        if batch_requests:
            for letter in letters:
                opts = copy.copy(search_opts)
                opts['q'] = letter
                batch_commands.append(
                    facebook.BatchCommand('search', options=opts))
            for response in facebook_client.run_batch_request(batch_commands):
                pages_unfilitered.extend(response['data'])
        else:
            for letter in letters:
                pages_unfilitered.extend(
                    facebook_client.graph_api_collection_request(
                        'search', q=letter, **search_opts))

        # need to go through the 26 separate page sets to filter out dups
        ids_seen = set(
        )  # cache the ids in the list for a quick duplicate check
        pages = []
        for page in pages_unfilitered:
            if page['id'] not in ids_seen:
                ids_seen.add(page['id'])
                pages.append(page)
        return pages
    else:
        return facebook_client.graph_api_collection_request('search',
                                                            q=query,
                                                            **search_opts)