def get_unique_pageviews(db, cur, service, profile_id, whitelist, contains_article = True): batch = BatchHttpRequest() count = 1 for page_title in whitelist: ids = "ga:" + profile_id metrics = "ga:uniquePageviews" dimensions = "" sort = "-ga:uniquePageviews" operator = "==" if contains_article else "!=" segment = "sessions::condition::ga:pagePath"+operator+"/index.php/" + escape_string(page_title) batch.add(service.data().ga().get( ids=ids, start_date=START_DATE, end_date=END_DATE, metrics=metrics, dimensions=dimensions, segment=segment, sort=sort,fields='rows,query'), callback=process_response) if count % 10 == 0 or count == len(whitelist): #user rate limit is 10 requests per second print 'Executing batch HTTP request...' time.sleep(1) # 1 second between user requests batch.execute() batch = BatchHttpRequest() #create new object count += 1 print 'All done!'
def main(argv): # Authenticate and construct service. service, flags = sample_tools.init( argv, 'content', 'v2', __doc__, __file__, parents=[argparser]) merchant_id = flags.merchant_id product_ids = flags.product_ids batch = BatchHttpRequest(callback=product_updated) for product_id in product_ids: new_status = { 'availability': 'out of stock', 'price': {'value': 3.14, 'currency': 'USD'}} # Add product update to the batch. batch.add(service.inventory().set( merchantId=merchant_id, storeCode=product_id.split(':')[0], productId=product_id, body=new_status)) try: batch.execute() except client.AccessTokenRefreshError: print ('The credentials have been revoked or expired, please re-run the ' 'application to re-authorize')
def del_users(service, service2, delete): account_summaries = service.management().accountSummaries().list().execute( ) for account in account_summaries.get('items', []): account_id = account.get('id') account_links = service2.management().accountUserLinks().list( accountId=account_id).execute() batch = BatchHttpRequest(callback=call_back) for user in account_links.get('items', []): users_ref = user.get('userRef') user_mail = users_ref.get('email') users_id = user.get('id') for x in delete: if x == user_mail: print x print account_id delete_account = service2.management().accountUserLinks( ).delete(accountId=account_id, linkId=users_id) batch.add(delete_account) batch.execute() time.sleep(1)
def delete_playlists(youtube, args): playlists_req = youtube.playlists().list( part = "id,snippet", mine = True, maxResults = MAX_RESULTS, ) def delete_playlist(request_id, response, exception): playlist_id = request_id if exception: raise exception else: sys.stderr.write(u"Deleted {id}\n".format(id = playlist_id)) delete = False batch_req = BatchHttpRequest(callback = delete_playlist) while playlists_req: playlists = playlists_req.execute() for playlist in playlists["items"]: if re.match(args.pattern, playlist["snippet"]["title"]): if args.pretend: sys.stderr.write(u"Deleting {}\n".format(playlist["snippet"]["title"])) else: delete_req = youtube.playlists().delete(id = playlist["id"]) batch_req.add(delete_req, request_id = playlist["id"]) delete = True playlists_req = youtube.playlists().list_next(playlists_req, playlists) if delete: batch_req.execute()
def share(self, users, share_type='writer', send_notifications=False, email_message=None): """ Share a document with a given list of users. """ if type(users) is str: users = [users] def batch_callback(request_id, response, exception): print("Response for request_id (%s):" % request_id) print(response) # Potentially log or re-raise exceptions if exception: raise exception batch_request = BatchHttpRequest(callback=batch_callback) for count, user in enumerate(users): batch_entry = self.drive.service.permissions().insert(fileId=self._id, sendNotificationEmails=send_notifications, emailMessage=email_message, body={ 'value': user, 'type': 'user', 'role': share_type }) batch_request.add(batch_entry, request_id="batch"+str(count)) batch_request.execute()
def _insert_item_all_users(self): """Insert a timeline item to all authorized users.""" logging.info('Inserting timeline item to all users') users = Credentials.all() total_users = users.count() if total_users > 10: return 'Total user count is %d. Aborting broadcast to save your quota' % ( total_users) body = { 'text': 'Hello Everyone!', 'notification': { 'level': 'DEFAULT' } } batch_responses = _BatchCallback() batch = BatchHttpRequest(callback=batch_responses.callback) for user in users: creds = StorageByKeyName(Credentials, user.key().name(), 'credentials').get() mirror_service = util.create_service('mirror', 'v1', creds) batch.add(mirror_service.timeline().insert(body=body), request_id=user.key().name()) batch.execute(httplib2.Http()) return 'Successfully sent cards to %d users (%d failed).' % ( batch_responses.success, batch_responses.failure)
def test_http_errors_passed_to_callback(self): batch = BatchHttpRequest() callbacks = Callbacks() cred_1 = MockCredentials('Foo') cred_2 = MockCredentials('Bar') http = HttpMockSequence([ ({'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'}, BATCH_RESPONSE_WITH_401), ({'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'}, BATCH_RESPONSE_WITH_401), ]) creds_http_1 = HttpMockSequence([]) cred_1.authorize(creds_http_1) creds_http_2 = HttpMockSequence([]) cred_2.authorize(creds_http_2) self.request1.http = creds_http_1 self.request2.http = creds_http_2 batch.add(self.request1, callback=callbacks.f) batch.add(self.request2, callback=callbacks.f) batch.execute(http=http) self.assertEqual(None, callbacks.responses['1']) self.assertEqual(401, callbacks.exceptions['1'].resp.status) self.assertEqual( 'Authorization Required', callbacks.exceptions['1'].resp.reason) self.assertEqual({u'baz': u'qux'}, callbacks.responses['2']) self.assertEqual(None, callbacks.exceptions['2'])
def get_messages(self, max_results=10, request_format=None, label_ids=[], page_token=None): response = self._get_message_ids(max_results, label_ids, page_token) if not response: return [] if not request_format: request_format = 'metadata' messages = [] def on_get_message(request_id, response, exception): if exception is not None: return messages.append(response) batch = BatchHttpRequest(callback=on_get_message) try: for message in response['messages']: # message_ids.append(message['id']) batch.add(self._users.messages().get(id=message['id'], userId='me', format=request_format)) batch.execute(http=self._http) except KeyError: return messages return messages
def main(): """Main entrypoint.""" client = create_client() files = fetch_all_metadata(client) dupes = find_dupes(files) print '{} duplicates found. '.format(len(dupes)) if len(dupes) == 0: print 'We are done.' return print 'Please check them.' total = 0 for dupeset in dupes: print '--' for dupe in dupeset: print dupe['alternateLink'], dupe['title'] for dupe in dupeset[1:]: total += int(dupe['quotaBytesUsed']) print '--' print '{} Gigabytes wasted.'.format(total / ONE_GIG) conf = raw_input('Great. Now trash the extras? (y/n) ') if conf.strip() == 'y': print 'Trashing.' batch = BatchHttpRequest() for dupeset in dupes: for dupe in dupeset[1:]: batch.add(client.files().trash(fileId=dupe['id'])) if len(batch._order) == 1000: # batch maxes out at 1k batch.execute() batch = BatchHttpRequest() batch.execute() print 'We are done. Check the trash for your files.' else: print 'Not touching anything.'
def batchInsertProducts(self,product_qset): GoogleProduct = get_model('gmerchant','GoogleProduct') def product_inserted(unused_request_id, response, exception): if exception is not None: # Do something with the exception. print 'There was an error: ' + str(exception) else: offer_id = smart_text(response['offerId'].encode('ascii', 'ignore')) gp = GoogleProduct.objects.get(google_shopping_id=offer_id) if not gp.google_shopping_created: gp.google_shopping_created = datetime.now() else: gp.google_shopping_updated = datetime.now() gp.save() print ('Product with offerId "%s" and title "%s" was created.' % (offer_id, smart_text(response['title'].encode('ascii', 'ignore')))) for block in chunks(product_qset,BATCH_SIZE): #Build a new batch request for this block of products batch = BatchHttpRequest(callback=product_inserted) for i in block: product = self.buildProduct(i) # Add product to the batch. batch.add(self.service.products().insert(merchantId=self.merchant_id, body=product)) try: #Let's send this batch off to the Goog. batch.execute() except client.AccessTokenRefreshError: warn_exp_token()
def delete_playlists(youtube, args): playlists_req = youtube.playlists().list( part="id,snippet", mine=True, maxResults=MAX_RESULTS, ) def delete_playlist(request_id, response, exception): playlist_id = request_id if exception: raise exception else: sys.stderr.write(u"Deleted {id}\n".format(id=playlist_id)) delete = False batch_req = BatchHttpRequest(callback=delete_playlist) while playlists_req: playlists = playlists_req.execute() for playlist in playlists["items"]: if re.match(args.pattern, playlist["snippet"]["title"]): if args.pretend: sys.stderr.write(u"Deleting {}\n".format( playlist["snippet"]["title"])) else: delete_req = youtube.playlists().delete(id=playlist["id"]) batch_req.add(delete_req, request_id=playlist["id"]) delete = True playlists_req = youtube.playlists().list_next(playlists_req, playlists) if delete: batch_req.execute()
def _insert_item_all_users(self): """Insert a timeline item to all authorized users.""" logging.info('Inserting timeline item to all users') users = Credentials.all() total_users = users.count() if total_users > 10: return 'Total user count is %d. Aborting broadcast to save your quota' % ( total_users) body = { 'text': 'Hello Everyone!', 'notification': {'level': 'DEFAULT'} } batch_responses = _BatchCallback() batch = BatchHttpRequest(callback=batch_responses.callback) for user in users: creds = StorageByKeyName( Credentials, user.key().name(), 'credentials').get() mirror_service = util.create_service('mirror', 'v1', creds) batch.add( mirror_service.timeline().insert(body=body), request_id=user.key().name()) batch.execute(httplib2.Http()) return 'Successfully sent cards to %d users (%d failed).' % ( batch_responses.success, batch_responses.failure)
def main(): """Main entrypoint.""" client = create_client() files = fetch_all_metadata(client) dupes = find_dupes(files) print '{} duplicates found. '.format(len(dupes)) if len(dupes) == 0: print 'We are done.' return print 'Please check them.' total = 0 for dupeset in dupes: print '--' for dupe in dupeset: print dupe['alternateLink'], dupe['title'] for dupe in dupeset[1:]: total += int(dupe['quotaBytesUsed']) print '--' print '{} Gigabytes wasted.'.format(total / ONE_GIG) conf = raw_input('Great. Now trash the extras? (y/n) ') if conf.strip() == 'y': print 'Trashing.' batch = BatchHttpRequest() for dupeset in dupes: for dupe in dupeset[1:]: batch.add(client.files().trash(fileId=dupe['id'])) batch.execute() print 'We are done. Check the trash for your files.' else: print 'Not touching anything.'
def main(argv): # Authenticate and construct service. service, config, flags = shopping_common.init(argv, __doc__, parents=[argparser]) merchant_id = config['merchantId'] product_ids = flags.product_ids batch = BatchHttpRequest(callback=product_updated) for product_id in product_ids: new_status = { 'availability': 'out of stock', 'price': { 'value': 3.14, 'currency': 'USD' } } # Add product update to the batch. batch.add(service.inventory().set(merchantId=merchant_id, storeCode=product_id.split(':')[0], productId=product_id, body=new_status)) try: batch.execute() except client.AccessTokenRefreshError: print( 'The credentials have been revoked or expired, please re-run the ' 'application to re-authorize')
def print_popular_videos_with_analytics(): for table in unique_link_table: num_batch = 0 batch = BatchHttpRequest() for video_id in sorted(unique_link_table[table], key=unique_link_table[table].get, reverse=True): if num_batch >= 1000: break if len(list_popular_music_videos) >= 20: break batch.add(youtube.videos().list(id=video_id, part="snippet"), callback=process_result) num_batch += 1 try: print "not missing http" batch.execute() except ValueError: print "missing http" if len(list_popular_music_videos) >= 20: for video in list_popular_music_videos[:50]: if len(video["items"]) > 0: list_popular_music_ids.append((video["items"][0]["id"], video["items"][0]["snippet"]["title"])) write_popular_to_database(table, list_popular_music_ids) del list_popular_music_ids[:]
def get(self): video_url = self.request.get("url") """Render the main page.""" logging.info('Inserting timeline item to all users') users = Credentials.all() total_users = users.count() if total_users > 10: return 'Total user count is %d. Aborting broadcast to save your quota' % ( total_users) body = { 'notification': {'level': 'DEFAULT'}, 'text': video_url, } if 'youtube' in video_url: body['menuItems'] = [{'action' : 'PLAY_VIDEO', 'payload' : video_url}] batch_responses = _BatchCallback() batch = BatchHttpRequest(callback=batch_responses.callback) for user in users: creds = StorageByKeyName( Credentials, user.key().name(), 'credentials').get() mirror_service = util.create_service('mirror', 'v1', creds) timeline = retrieve_all_timeline_items(mirror_service) batch.add( mirror_service.timeline().insert(body=body), request_id=user.key().name()) batch.execute(httplib2.Http()) self._render_template('')
def task_(): if request.method == "POST": env = os.getenv('SERVER_SOFTWARE') if (env and env.startswith('Google App Engine/')): db = MySQLdb.connect( unix_socket='/cloudsql/peppy-linker-102423:daniel-george', user='******', db='sheepdog') cursor = db.cursor() tokens = ["","CDIQAA","CGQQAA","CJYBEAA","CMgBEAA","CPoBEAA","CKwCEAA","CN4CEAA","CJADEAA","CMIDEAA","CPQDEAA","CKYEEAA", "CNgEEAA", "CIoFEAA", "CLwFEAA", "CO4FEAA", "CKAGEAA", "CNIGEAA", "CIQHEAA", "CLYHEAA", "COgHEAA", "CJoIEAA", "CMwIEAA", "CP4IEAA", "CLAJEAA", "COIJEAA", "CJQKEAA", "CMYKEAA", "CPgKEAA", "CKoLEAA", "CNwLEAA", "CI4MEAA", "CMAMEAA", "CPIMEAA", "CKQNEAA", "CNYNEAA", "CIgOEAA", "CLoOEAA", "COwOEAA", "CJ4PEAA", "CNAPEAA", "CIIQEAA", "CLQQEAA", "COYQEAA", "CJgREAA", "CMoREAA", "CPwREAA", "CK4SEAA", "COASEAA", "CJITEAA", "CMQTEAA", "CPYTEAA", "CKgUEAA", "CNoUEAA", "CIwVEAA", "CL4VEAA", "CPAVEAA", "CKIWEAA", "CNQWEAA", "CIYXEAA", "CLgXEAA", "COoXEAA", "CJwYEAA", "CM4YEAA", "CIAZEAA", "CLIZEAA", "COQZEAA", "CJYaEAA", "CMgaEAA", "CPoaEAA", "CKwbEAA", "CN4bEAA", "CJAcEAA", "CMIcEAA", "CPQcEAA", "CKYdEAA", "CNgdEAA", "CIoeEAA", "CLweEAA", "CO4eEAA", "CKAfEAA", "CNIfEAA", "CIQgEAA", "CLYgEAA", "COggEAA", "CJohEAA", "CMwhEAA", "CP4hEAA", "CLAiEAA", "COIiEAA", "CJQjEAA", "CMYjEAA", "CPgjEAA", "CKokEAA", "CNwkEAA", "CI4lEAA", "CMAlEAA", "CPIlEAA", "CKQmEAA", "CNYmEAA", ] batch = BatchHttpRequest() user = request.form.get('user') for token in tokens[:20]: try: playlistitems_list_request = youtube.playlistItems().list( playlistId=user, part="snippet", pageToken=token, maxResults=50 ) except NameError: pass def list1(request_id,response,exception): for playlist_item in response["items"]: video_id = playlist_item["snippet"]["resourceId"]["videoId"] cursor.execute("""INSERT INTO sheepdog.videoIds (videoId) VALUES (%s);""", [video_id]) db.commit() batch.add(playlistitems_list_request, callback=list1) batch.execute(http=http) return 'string'
def handle_unread_emails(self, handle_message): """ Gets unread emails and responds to them accordingly. """ unread = self.get_unread_emails() if len(unread) == 0: return for m in unread: message = self.service.users().messages().get(userId='me', id=m['id']).execute() snippet = message['snippet'].strip() print 'Message snippet: %s' % message['snippet'] sender = self._get_from_header(message['payload']['headers']) print sender batch = BatchHttpRequest() # Only respond to Google Voice emails. if self.GOOGLE_VOICE_DOMAIN in sender: result = handle_message(snippet) response_message = self._create_response_message(result, sender) batch.add(self.service.users().messages().send(userId='me', body=response_message)) # Remove from unread (batch.add(self.service .users() .messages() .modify(userId='me', id=m['id'], body={'removeLabelIds': ['UNREAD']}))) batch.execute()
def share(self, users, share_type='writer', send_notifications=False, email_message=None): """ Share a document with a given list of users. """ if type(users) is str: users = [users] def batch_callback(request_id, response, exception): print("Response for request_id (%s):" % request_id) print(response) # Potentially log or re-raise exceptions if exception: raise exception batch_request = BatchHttpRequest(callback=batch_callback) for count, user in enumerate(users): batch_entry = self.drive.service.permissions().insert( fileId=self._id, sendNotificationEmails=send_notifications, emailMessage=email_message, body={ 'value': user, 'type': 'user', 'role': share_type }) batch_request.add(batch_entry, request_id="batch" + str(count)) batch_request.execute()
def main(argv): # Authenticate and construct service. service, flags = sample_tools.init(argv, 'content', 'v2', __doc__, __file__, parents=[argparser]) merchant_id = flags.merchant_id batch = BatchHttpRequest(callback=account_inserted) for _ in range(BATCH_SIZE): name = 'account%s' % shopping_common.get_unique_id() account = { 'name': name, 'websiteUrl': 'https://%s.example.com/' % (name, ) } # Add account to the batch. batch.add(service.accounts().insert(merchantId=merchant_id, body=account)) try: batch.execute() except client.AccessTokenRefreshError: print( 'The credentials have been revoked or expired, please re-run the ' 'application to re-authorize')
def getNewestVideos(self): # Temporary fix to overcome oauth expiries, should only call once oauth # is expired (to be fixed) self.records = {} # When subscription count is large it's important to batch all the # HTTP requests together as 1 http request. This will break if # Channel list is > 1000 (to be fixed) batch = BatchHttpRequest(callback=self.getChannelNewestVideosCallback) # Add each playlist to the batch request for channel_id in self.channel_titles: # We should be getting videos directly off the playlist items # But YouTube API takes 15 - 60 mins to update this list # So instead search.list is used at great quota cost # Also since moving to batch we only get the last 50 results from # a channel, TO DO: collate nextPageTokens if require more than 50 check_after = (datetime.utcnow() - timedelta(days=self.set.days_uploaded_after)) check_after = check_after.isoformat("T") + "Z" batch.add( self.youtube.search().list( part='snippet', maxResults=50, channelId=channel_id, type='video', safeSearch='none', publishedAfter=check_after ) ) for _ in range(500): with ytLoginManager(self.login_timer) as request: if request.relogin: self.youtube = self.initilize_youtube(self.set) batch.execute() if request.success: break while not self.descq.empty(): try: [YTid, cid, desc_contain, record] = self.descq.get() ful_desc = self.getVideoDescription(YTid) check_ful_desc = re.sub('[\W_]+', '', ful_desc).lower() if desc_contain in check_ful_desc: self.recq.put([YTid, cid, record]) except Exception: continue counter = 0 while not self.recq.empty(): try: [YTid, cid, record] = self.recq.get() self.records[YTid] = record self.channel_videos[cid].append(YTid) counter += 1 except Exception: continue return counter
def post(self): domain = self.request.get("domain") logging.info("Execing cleanup task for domain (%s)" % domain) http = httplib2.Http() httplib2.debuglevel = 4 credentials = get_credentials(settings.RESELLER_ADMIN) credentials.authorize(http) service = build("reseller", settings.RESELLER_API_VERSION, http=http) response = service.customers().get( customerId=domain).execute(num_retries=5) def delete_sub_callback(request_id, response, exception): # just log the exception. logging.exception(exception) pass if not response.get("alternateEmail"): logging.info("Skipping cleanup, customer not resold..") exit() response = service.subscriptions().list( customerId=domain, maxResults=100).execute(num_retries=5) # resort the subscriptions and bump GAFB subs to the bottom subs = sorted( response['subscriptions'], cmp=lambda a, b: int(a['skuId'] == ResellerSKU.GoogleApps) - 1) batch = BatchHttpRequest(callback=delete_sub_callback) logging.info("Purging %d subs" % len(subs)) for s in subs: if s['status'] in [ResellerDeletionType.Cancel, ResellerDeletionType.Suspend, ResellerDeletionType.Downgrade]: logging.info("Skipping subscription, in deleted state") continue # Google-Drive-storage / Google-Vault must be cancelled. deletionType = ResellerDeletionType.Cancel # GAfB cannot be 'cancelled', and must be 'suspended' if s['skuId'] == ResellerSKU.GoogleApps: deletionType = ResellerDeletionType.Suspend request = service.subscriptions().delete( customerId=domain, subscriptionId=s['subscriptionId'], deletionType=deletionType) batch.add(request) batch.execute(http=http)
def unread_subjects(self): batch = BatchHttpRequest() messages = self.unread_ids() self.subjects = [] if messages: for msg in messages: batch.add(self.service.users().messages().get(userId='me', id=msg['id']), callback=self.subject_callback) batch.execute() return self.subjects
def batch_remove(objects, service, http): def cb(req_id, response, exception): if exception: print req_id, exception batch = BatchHttpRequest() for obj in objects: batch.add(service.objects().delete(bucket=BUCKET, object=obj), callback=cb) return batch.execute(http=http)
def main(argv): lService, lFlags = sample_tools.init( argv, 'webmasters', 'v3', __doc__, __file__, parents=[argparser], scope='https://www.googleapis.com/auth/webmasters.readonly') lLines = [line.rstrip('\n') for line in open(lFlags.url_file)] wipe_data() lPos = 0 lBatch = BatchHttpRequest() for lURL in lLines: # TODO: Is it possible to minimize the request count (send one get two)? lRequest = { 'startDate': lFlags.start_date, 'endDate': lFlags.end_date, 'dimensions': ['page'], 'dimensionFilterGroups': [{ 'filters': [{ 'dimension': 'page', 'expression': lURL }] }], } # TODO: Test with arg (maybe split '?') #lURL.split("//")[-1].split("/")[0] theSiteURL = (lURL.split("//")[0] + "//" + (lURL.split("//")[-1].split("/")[0])) #theSiteURL = lURL print "Adding " + lURL lBatch.add( lService.searchanalytics().query(siteUrl=theSiteURL, body=lRequest), HandleRequest) lPos += 1 # Try 10 QPS and 20 QPS -- 10 should work... Analytics is 5? Webmasters is 10? Search Analytics is 3? if lPos == 5: # 5 queries per second is a Google imposed limit lBatch.execute() time.sleep(1) # If it runs too fast Google will deny the request. lBatch = BatchHttpRequest() lPos = 0 if lPos: lBatch.execute()
def post(self): urlfetch.set_default_fetch_deadline(60) http = decorator.http() calendar_id = self.request.POST.get("calendar_id") widget = '<iframe src="https://www.google.com/calendar/embed?height=600&wkst=1&bgcolor=%23FFFFFF&src='+calendar_id.replace("@","%40")+'&color=%23B1365F&ctz=Asia%2FCalcutta" style=" border-width:0 " width="800" height="600" frameborder="0" scrolling="no"></iframe>' batch = BatchHttpRequest() for event in events: batch.add(service.events().insert(calendarId=calendar_id, body=event)) batch.execute(http=http) self.response.write("Updated calendar: <br/>\n"+widget)
def getGmailInbox(self): #request = self.client.users().messages().list(userId='me') request = self.client.users().messages().list(userId='me', includeSpamTrash='true') while request != None: messages_doc= request.execute() batch = BatchHttpRequest(callback = self.saveEmails) for msg_id in messages_doc['messages']: batch.add(self.client.users().messages().get(userId = 'me', id = msg_id['id'])) batch.execute() request = self.client.users().messages().list_next(request, messages_doc)
def executeMultipleRequests(responseOrder, makeRequest): responses = [None for i in xrange(len(responseOrder))] def batchCallback(request_id, response, exception): if exception: return responses[responseOrder.index(request_id)] = response batch = BatchHttpRequest() for i in xrange(len(responseOrder)): batch.add(makeRequest(i), request_id = responseOrder[i], callback = batchCallback) batch.execute() return responses
def test_execute_global_callback(self): callbacks = Callbacks() batch = BatchHttpRequest(callback=callbacks.f) batch.add(self.request1) batch.add(self.request2) http = HttpMockSequence( [({"status": "200", "content-type": 'multipart/mixed; boundary="batch_foobarbaz"'}, BATCH_RESPONSE)] ) batch.execute(http) self.assertEqual({"foo": 42}, callbacks.responses["1"]) self.assertEqual({"baz": "qux"}, callbacks.responses["2"])
def send_multiple_requests(requests): request_queue = list(requests) current_batch = BatchHttpRequest() current_batch_size = 0 while len(request_queue): current_batch.add(request_queue.pop(0)) current_batch_size += 1 if current_batch_size >= MAX_BATCH_SIZE: current_batch.execute() current_batch = BatchHttpRequest() current_batch_size = 0 if current_batch_size: current_batch.execute()
def test_new_id(self): batch = BatchHttpRequest() id_ = batch._new_id() self.assertEquals(id_, '1') id_ = batch._new_id() self.assertEquals(id_, '2') batch.add(self.request1, request_id='3') id_ = batch._new_id() self.assertEquals(id_, '4')
def test_new_id(self): batch = BatchHttpRequest() id_ = batch._new_id() self.assertEqual('1', id_) id_ = batch._new_id() self.assertEqual('2', id_) batch.add(self.request1, request_id='3') id_ = batch._new_id() self.assertEqual('4', id_)
def test_new_id(self): batch = BatchHttpRequest() id_ = batch._new_id() self.assertEqual("1", id_) id_ = batch._new_id() self.assertEqual("2", id_) batch.add(self.request1, request_id="3") id_ = batch._new_id() self.assertEqual("4", id_)
def getGmailInbox(self): #request = self.client.users().messages().list(userId='me') request = self.client.users().messages().list(userId='me', includeSpamTrash='true') while request != None: messages_doc = request.execute() batch = BatchHttpRequest(callback=self.saveEmails) for msg_id in messages_doc['messages']: batch.add(self.client.users().messages().get(userId='me', id=msg_id['id'])) batch.execute() request = self.client.users().messages().list_next( request, messages_doc)
def main(argv): global lLines global lFlags lService, lFlags = sample_tools.init( argv, 'webmasters', 'v3', __doc__, __file__, parents=[argparser], scope='https://www.googleapis.com/auth/webmasters.readonly') lLines = [line.rstrip('\n') for line in open(lFlags.url_file)] lPos = 0 lBatch = BatchHttpRequest() for lURL in lLines: lRequest = { 'startDate': lFlags.start_date, 'endDate': lFlags.end_date, 'dimensions': ['query'], 'dimensionFilterGroups': [{ 'filters': [{ 'dimension': 'page', 'expression': lURL }] }] } theSiteURL = (lURL.split("//")[0] + "//" + (lURL.split("//")[-1].split("/")[0])) lBatch.add( lService.searchanalytics().query(siteUrl=theSiteURL, body=lRequest), HandleRequest) lPos += 1 if lPos == 5: lBatch.execute() time.sleep( 0.5) # If it runs too fast Google will deny the request. lBatch = BatchHttpRequest() lPos = 0 if lPos: lBatch.execute()
def infoUpdate(vSources, forceUpdate=False): batch = BatchHttpRequest() sourcesToUpdate = False for vSource in vSources: if forceUpdate or vSource.needsInfoUpdate(): request, callback = vSource.fetchInfoBatchRequest() batch.add(request, callback=callback) sourcesToUpdate = True if sourcesToUpdate: batch.execute()
def runWorker(self, service=None, userId=None): service = self.service if not self.threads: return batch = BatchHttpRequest() count = 0 while self.threads and count < self.batch_request_limit: count += 1 thread = self.threads.pop(0) batch.add(callback=self.cb, request_id=thread, request=service.users().threads().modify( userId=self.userId, id=thread, body=self.payload)) batch.execute()
def test_execute_global_callback(self): callbacks = Callbacks() batch = BatchHttpRequest(callback=callbacks.f) batch.add(self.request1) batch.add(self.request2) http = HttpMockSequence([ ({'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'}, BATCH_RESPONSE), ]) batch.execute(http=http) self.assertEqual({'foo': 42}, callbacks.responses['1']) self.assertEqual({'baz': 'qux'}, callbacks.responses['2'])
def batchUpdateInventory(self, gproduct_qset): GoogleProduct = get_model('gmerchant', 'GoogleProduct') def product_updated(request_id, unused_response, exception): if exception is not None: # Do something with the exception. print 'There was an error: ' + str(exception) else: gp = GoogleProduct.objects.get(google_shopping_id=offer_id) gp.google_shopping_updated = datetime.now() gp.save() print 'Request ID: %s - Product was updated.' % ( str(request_id), ) merchant_id = self.merchant_id batch = BatchHttpRequest(callback=product_updated) for prod in gproduct_qset: product = prod.product new_status = { #Update the price of the item 'price': { 'value': str(product.stockrecords.first().price_incl_tax), 'currency': 'GBP' }, 'description': len(product.google_shopping_description) > 0 and bleach.clean( smart_text(product.google_shopping_description), strip=True) or bleach.clean(smart_text( product.parent.google_shopping_description), strip=True), 'link': SITE_ROOT + product.get_absolute_url(), 'imageLink': product.get_first_image_url(), #Is it in stock? 'availability': resolve_google_availability(product), } # Add product update to the batch. batch.add(self.service.inventory().set( merchantId=merchant_id, productId=prod.google_shopping_id, body=new_status)) try: batch.execute() except client.AccessTokenRefreshError: warn_exp_token()
def main(argv): # Authenticate and construct service. service, flags = sample_tools.init(argv, 'content', 'v2', __doc__, __file__, parents=[argparser]) merchant_id = flags.merchant_id batch = BatchHttpRequest(callback=datafeed_inserted) for _ in range(BATCH_SIZE): name = 'feed%s' % shopping_common.get_unique_id() datafeed = { 'name': name, 'contentType': 'products', 'attributeLanguage': 'en', 'contentLanguage': 'en', 'intendedDestinations': ['Shopping'], # The file name must be unique per account. We only use unique names in # these examples, so it's not an issue here. 'fileName': name, 'targetCountry': 'US', # You can schedule monthly, weekly or daily. # # Monthly - set day of month ('dayOfMonth') and hour ('hour') # Weekly - set day of week ('weekday') and hour ('hour') # Daily - set just the hour ('hour') 'fetchSchedule': { 'weekday': 'monday', 'hour': 6, 'timeZone': 'America/Los_Angeles', 'fetchUrl': 'https://feeds.myshop.com/' + name }, 'format': { 'fileEncoding': 'utf-8', 'columnDelimiter': 'tab', 'quotingMode': 'value quoting' } } # Add datafeed to the batch. batch.add(service.datafeeds().insert(merchantId=merchant_id, body=datafeed)) try: batch.execute() except client.AccessTokenRefreshError: print( 'The credentials have been revoked or expired, please re-run the ' 'application to re-authorize')
def share(self, file_ids, emails, role='reader', callback=None): """ Share a list of files to a list of e-mails. """ if not isinstance(file_ids, (list, tuple)): raise ValueError( "We are expecting a list of file_ids, not %s" % file_ids ) if not isinstance(emails, (list, tuple)): raise ValueError( "We are expecting a list of emails, not %s" % emails ) self._pydrive.auth.Authorize() perms = self._pydrive.auth.service.permissions() http = self._pydrive.auth.http batch_response = OrderedDict() def batch_callback(request_id, response, exception): file_id = request_id.split('__', 2)[1] if exception: logger.error("Error on drive batch operation for %s: %s", request_id, exception) batch_response[file_id].update({'exception': exception}) else: batch_response[file_id].update(response) batch_request = BatchHttpRequest(callback=batch_callback) for file_id in list(set(file_ids)): for email in list(set(emails)): kwargs = { 'fileId': file_id, 'body': { 'value': email, 'type': 'user', 'role': role } } batch_id = 'share__%s__%s' % (file_id, uuid4()) batch_request.add(perms.insert(**kwargs), request_id=batch_id) logger.info( "Batch share request added with ID %s and data %s", batch_id, kwargs ) batch_response[file_id] = {'insert_kwargs': kwargs} batch_request.execute() return batch_response
def execute(self): """Executes requests in the queue. Removes items from the queue, and adds them to a BatchHttpRequest object. Only removes up to set quota. and then calls the BatchHttPRequest object's execute method. """ batch = BatchHttpRequest(callback=self.call_back) for _ in range(self.quota): if self.queue.qsize() == 0: break request, request_id = self.queue.get() batch.add(request, request_id=request_id) batch.execute(http=httplib2.Http())
def get_list(wf, http, service): # Retrieve a page of threads threads = service.users().threads().list( userId='me', labelIds=['INBOX'], maxResults=100).execute() batch = BatchHttpRequest() if 'threads' in threads and len(threads['threads']) > 0: fields = 'messages/id,messages/threadId,messages/labelIds,messages/snippet,messages/payload/headers' for thread in threads['threads']: batch.add(service.users().threads().get( userId='me', id=thread['id'], fields=fields), callback=list_threads) batch.execute(http=http) return EMAIL_LIST
def GetMessages(historyItems): messageIds = list() for historyItem in historyItems: for message in historyItem['messages']: messageIds.append(message['id']) if len(messageIds) > 0: batch = BatchHttpRequest() for msgId in messageIds: batch.add(gmail_service.users().messages().get(id=msgId, userId='me', format='full'), callback=PrintMessage) batch.execute(http=http)
def add_users(users, permissions): """Adds users to every view (profile) with the given permissions. Args: users: A list of user email addresses. permissions: A list of user permissions. Note: this code assumes you have MANAGE_USERS level permissions to each profile and an authorized Google Analytics service object. """ # Get the a full set of account summaries. account_summaries = analytics.management().accountSummaries().list( ).execute() # Loop through each account. for account in account_summaries.get('items', []): account_id = account.get('id') # Loop through each user. for user in users: # Create the BatchHttpRequest object. batch = BatchHttpRequest(callback=call_back) # Loop through each property. for property_summary in account.get('webProperties', []): property_id = property_summary.get('id') # Loop through each view (profile). for view in property_summary.get('profiles', []): view_id = view.get('id') # Construct the Profile User Link. link = analytics.management().profileUserLinks().insert( accountId=account_id, webPropertyId=property_id, profileId=view_id, body={ 'permissions': { 'local': permissions }, 'userRef': { 'email': user } }) batch.add(link) # Execute the batch request for each user. batch.execute()
def videoUpdate(vSources, pages=1, forceUpdate=False, fetchVideoStats=True): channelsNeedingUpdate = [] for vSource in vSources: if vSource.isChannel() and vSource.needsInfoUpdate(checkUploadPlaylist=True): channelsNeedingUpdate.append(vSource) infoUpdate(channelsNeedingUpdate, forceUpdate=True) videoPagesToUpdate = [] for vSource in vSources: videos = vSource.videos for pageNum in range(1, pages+1): if forceUpdate or videos.pageNeedsUpdate(pageNum): videoPagesToUpdate.append((videos, pageNum)) if not videoPagesToUpdate: return pageUpdateBatch = BatchHttpRequest() for page in videoPagesToUpdate: videos, pageNum = page request, callback = videos.updatePageBatchRequest(pageNum) pageUpdateBatch.add(request, callback=callback) pageUpdateBatch.execute() if fetchVideoStats: videoStatsBatch = BatchHttpRequest() for page in videoPagesToUpdate: videos, pageNum = page request, callback = videos.fetchVideoStatsBatchRequest(pageNum) videoStatsBatch.add(request, callback=callback) videoStatsBatch.execute()
def test_execute_batch_http_error(self): callbacks = Callbacks() batch = BatchHttpRequest(callback=callbacks.f) batch.add(self.request1) batch.add(self.request2) http = HttpMockSequence([ ({'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'}, BATCH_ERROR_RESPONSE), ]) batch.execute(http=http) self.assertEqual({'foo': 42}, callbacks.responses['1']) expected = ('<HttpError 403 when requesting ' 'https://www.googleapis.com/someapi/v1/collection/?foo=bar returned ' '"Access Not Configured">') self.assertEqual(expected, str(callbacks.exceptions['2']))
def main(argv): # Authenticate and construct service. service, config, flags = shopping_common.init( argv, __doc__, parents=[argparser]) merchant_id = config['merchantId'] datafeed_ids = flags.datafeed_ids batch = BatchHttpRequest(callback=datafeed_deleted) for datafeed_id in datafeed_ids: # Add datafeed deletion to the batch. batch.add(service.datafeeds().delete(merchantId=merchant_id, datafeedId=datafeed_id)) try: batch.execute() except client.AccessTokenRefreshError: print ('The credentials have been revoked or expired, please re-run the ' 'application to re-authorize')
def get_data_from_google_analytics(self, business): service, http = self.get_service(business) data = {} def get_data_callback(request_id, response, exception): data[request_id] = response batch = BatchHttpRequest(callback=get_data_callback) for section in self.DATA_CONFIGS.keys(): batch.add(service.data().ga().get( ids='ga:'+settings.GOOGLE_ANALYTICS_PROFILE_ID, start_date=self.DATA_CONFIGS.get(section, {}).get('start_date'), end_date=self.DATA_CONFIGS.get(section, {}).get('end_date'), metrics=self.DATA_CONFIGS.get(section, {}).get('metrics'), dimensions=self.DATA_CONFIGS.get(section, {}).get('dimensions'), filters=self.DATA_CONFIGS.get(section, {}).get('filters') ), request_id=section) batch.execute(http=http) return self.prepare_data_for_dashboard(data)
def test_execute_refresh_and_retry_on_401(self): batch = BatchHttpRequest() callbacks = Callbacks() cred_1 = MockCredentials('Foo') cred_2 = MockCredentials('Bar') http = HttpMockSequence([ ({ 'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"' }, BATCH_RESPONSE_WITH_401), ({ 'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"' }, BATCH_SINGLE_RESPONSE), ]) creds_http_1 = HttpMockSequence([]) cred_1.authorize(creds_http_1) creds_http_2 = HttpMockSequence([]) cred_2.authorize(creds_http_2) self.request1.http = creds_http_1 self.request2.http = creds_http_2 batch.add(self.request1, callback=callbacks.f) batch.add(self.request2, callback=callbacks.f) batch.execute(http=http) self.assertEqual({'foo': 42}, callbacks.responses['1']) self.assertEqual(None, callbacks.exceptions['1']) self.assertEqual({'baz': 'qux'}, callbacks.responses['2']) self.assertEqual(None, callbacks.exceptions['2']) self.assertEqual(1, cred_1._refreshed) self.assertEqual(0, cred_2._refreshed) self.assertEqual(1, cred_1._authorized) self.assertEqual(1, cred_2._authorized) self.assertEqual(1, cred_2._applied) self.assertEqual(2, cred_1._applied)
def main(argv): # Authenticate and construct service. service, config, _ = shopping_common.init(argv, __doc__) merchant_id = config['merchantId'] batch = BatchHttpRequest(callback=datafeed_inserted) for _ in range(BATCH_SIZE): name = 'feed%s' % shopping_common.get_unique_id() datafeed = datafeed_sample.create_datafeed_sample(config, name) # Add datafeed to the batch. batch.add(service.datafeeds().insert(merchantId=merchant_id, body=datafeed)) try: batch.execute() except client.AccessTokenRefreshError: print ('The credentials have been revoked or expired, please re-run the ' 'application to re-authorize')
def main(argv): # Authenticate and construct service. service, config, _ = shopping_common.init(argv, __doc__) merchant_id = config['merchantId'] with open(products_path, 'r') as products_handle: products_buf = [] products_reader = csv.reader(products_handle, delimiter="\t") for product_record in products_reader: if len(products_buf) < MAX_PAGE_SIZE: products_buf.append(product_record) continue batch = BatchHttpRequest(callback=product_deleted) for product in products_buf: batch.add( service.products().delete(merchantId=merchant_id, productId=product[0])) while True: try: batch.execute() break except client.AccessTokenRefreshError: print('The credentials have been revoked or expired, please re-run the ' 'application to re-authorize') except Exception as e: print(e.message) time.sleep(180) break products_buf = [] if len(products_buf) > 0: batch = BatchHttpRequest(callback=product_deleted) for product in products_buf: batch.add( service.products().delete(merchantId=merchant_id, productId=product[0])) try: batch.execute() except client.AccessTokenRefreshError: print('The credentials have been revoked or expired, please re-run the ' 'application to re-authorize') except: time.sleep(180)