def _add_batch(batch): responses = facebook_client.run_batch_request(batch) for resp,batch_req in zip(responses,batch): try: resp = facebook_client.postprocess_response(batch_req.to_GET_format(),resp) except facebook.FacebookAPIError as e: resp = e page_details.append(resp)
def _add_batch(batch): responses = facebook_client.run_batch_request(batch) for resp, batch_req in zip(responses, batch): try: resp = facebook_client.postprocess_response( batch_req.to_GET_format(), resp) except facebook.FacebookAPIError as e: resp = e page_details.append(resp)
def _run_batch(batch): batch_response = facebook_client.run_batch_request(batch) responses = [] for resp,batch_req in zip(batch_response,batch): try: resp = facebook_client.postprocess_response(batch_req.to_GET_format(),resp) except FacebookAPIError as e: resp = e responses.append(resp) return responses
def _run_batch(batch): batch_response = facebook_client.run_batch_request(batch) responses = [] for resp, batch_req in zip(batch_response, batch): try: resp = facebook_client.postprocess_response( batch_req.to_GET_format(), resp) except FacebookAPIError as e: resp = e responses.append(resp) return responses
def gather_fb_place_pages(center,radius,query=None,limit=4000,batch_requests=True): ''' Returns a list of Facebook place page info stubs represneting all places found in the given area. Object fields can be found at https://developers.facebook.com/docs/reference/api/page/ center should be a tuple of (latitude,logitude) values, and radius is in meters (i think?) If query is omitted, a "blank query" will be run by running the same center and radius query 26 separate times, once per letter of the alphabet as the actual query argument. If batch_request is True (default), these requests will be batched, otherwise they'll be run once at a time. Commands with a large number of results may fail if batched. No error handling right now -- if any of the search requests fail the whole thing is coming down. ''' search_opts = dict(type='place', center='%f,%f' % center, distance=radius, limit=limit) # no query given, run one for each letter of the alphabet if query is None: batch_commands, pages_unfilitered = [], [] letters = [chr(o) for o in range(ord('a'),ord('z')+1)] if batch_requests: for letter in letters: opts = copy.copy(search_opts) opts['q']=letter batch_commands.append(facebook.BatchCommand('search',options=opts)) for response in facebook_client.run_batch_request(batch_commands): pages_unfilitered.extend(response['data']) else: for letter in letters: pages_unfilitered.extend(facebook_client.graph_api_collection_request('search',q=letter,**search_opts)) # need to go through the 26 separate page sets to filter out dups ids_seen = set() # cache the ids in the list for a quick duplicate check pages = [] for page in pages_unfilitered: if page['id'] not in ids_seen: ids_seen.add(page['id']) pages.append(page) return pages else: return facebook_client.graph_api_collection_request('search',q=query,**search_opts)
def gather_fb_place_pages(center, radius, query=None, limit=4000, batch_requests=True): ''' Returns a list of Facebook place page info stubs represneting all places found in the given area. Object fields can be found at https://developers.facebook.com/docs/reference/api/page/ center should be a tuple of (latitude,logitude) values, and radius is in meters (i think?) If query is omitted, a "blank query" will be run by running the same center and radius query 26 separate times, once per letter of the alphabet as the actual query argument. If batch_request is True (default), these requests will be batched, otherwise they'll be run once at a time. Commands with a large number of results may fail if batched. No error handling right now -- if any of the search requests fail the whole thing is coming down. ''' search_opts = dict(type='place', center='%f,%f' % center, distance=radius, limit=limit) # no query given, run one for each letter of the alphabet if query is None: batch_commands, pages_unfilitered = [], [] letters = [chr(o) for o in range(ord('a'), ord('z') + 1)] if batch_requests: for letter in letters: opts = copy.copy(search_opts) opts['q'] = letter batch_commands.append( facebook.BatchCommand('search', options=opts)) for response in facebook_client.run_batch_request(batch_commands): pages_unfilitered.extend(response['data']) else: for letter in letters: pages_unfilitered.extend( facebook_client.graph_api_collection_request( 'search', q=letter, **search_opts)) # need to go through the 26 separate page sets to filter out dups ids_seen = set( ) # cache the ids in the list for a quick duplicate check pages = [] for page in pages_unfilitered: if page['id'] not in ids_seen: ids_seen.add(page['id']) pages.append(page) return pages else: return facebook_client.graph_api_collection_request('search', q=query, **search_opts)
def test_batch_request(self): ''' Tests batch API interface ''' # Dependant on FB data. These are examples ones similar to those # listed on the Graph API BatchRequest documentation: # https://developers.facebook.com/docs/reference/api/batch ### # Main test consists of querying for events associated with Coca-Cola, # results are tested to be sure certain event-specific fields are # returned. The status code of the full response is also returned # Also, we want to test the behavior of the "omit_response_on_success" # variable so we run it twice. Once where the first response is omitted # and we expect the full batch response to have a None first object, and # once where it is not omitted we expect it to have a list of event stubs. for omit_first_response in (True,False): batch_request = [ BatchCommand('cocacola/events', options={'limit':5}, name='get-events', omit_response_on_success=omit_first_response), BatchCommand('', options={'ids':'{result=get-events:$.data.*.id}'}), ] full_response = facebook_client.run_batch_request(batch_request,process_response=False) self.assertEquals(len(full_response),2) first_resp,second_resp = full_response # test result of first command if omit_first_response: self.assertIsNone(first_resp) else: body = json.loads(first_resp['body']) for stub in body['data']: self.assertIn('start_time',stub) # duck test for event stub # test response from second command self.assertEquals(second_resp['code'],200) body = json.loads(second_resp['body']) for event in body.values(): # test that results are events via "duck-typing" test self.assertIn('start_time',event) self.assertIn('owner',event) ### # Also test the behavior when process_response is left to be True # This is a simpler command that requests Coca-Cola's user object and # its first 5 events in one go. Leaving process_response to True in the # run_batch_request() call should yield responses with already-JSON parsed # body content only batch_request = [ BatchCommand('cocacola'), BatchCommand('cocacola/events', options={'limit':5}), ] responses = facebook_client.run_batch_request(batch_request) self.assertEquals(len(responses),2) self.assertIn('username',responses[0]) # first response is a single user object for event in responses[1]['data']: # second response is a map of {id:event stubs} self.assertIn('name',event) self.assertIn('start_time',event)
def test_batch_request(self): ''' Tests batch API interface ''' # Dependant on FB data. These are examples ones similar to those # listed on the Graph API BatchRequest documentation: # https://developers.facebook.com/docs/reference/api/batch ### # Main test consists of querying for events associated with Coca-Cola, # results are tested to be sure certain event-specific fields are # returned. The status code of the full response is also returned # Also, we want to test the behavior of the "omit_response_on_success" # variable so we run it twice. Once where the first response is omitted # and we expect the full batch response to have a None first object, and # once where it is not omitted we expect it to have a list of event stubs. for omit_first_response in (True, False): batch_request = [ BatchCommand('cocacola/events', options={'limit': 5}, name='get-events', omit_response_on_success=omit_first_response), BatchCommand( '', options={'ids': '{result=get-events:$.data.*.id}'}), ] full_response = facebook_client.run_batch_request( batch_request, process_response=False) self.assertEquals(len(full_response), 2) first_resp, second_resp = full_response # test result of first command if omit_first_response: self.assertIsNone(first_resp) else: body = json.loads(first_resp['body']) for stub in body['data']: self.assertIn('start_time', stub) # duck test for event stub # test response from second command self.assertEquals(second_resp['code'], 200) body = json.loads(second_resp['body']) for event in body.values(): # test that results are events via "duck-typing" test self.assertIn('start_time', event) self.assertIn('owner', event) ### # Also test the behavior when process_response is left to be True # This is a simpler command that requests Coca-Cola's user object and # its first 5 events in one go. Leaving process_response to True in the # run_batch_request() call should yield responses with already-JSON parsed # body content only batch_request = [ BatchCommand('cocacola'), BatchCommand('cocacola/events', options={'limit': 5}), ] responses = facebook_client.run_batch_request(batch_request) self.assertEquals(len(responses), 2) self.assertIn('username', responses[0]) # first response is a single user object for event in responses[1][ 'data']: # second response is a map of {id:event stubs} self.assertIn('name', event) self.assertIn('start_time', event)