Esempio n. 1
0
    def loadMsgs(self,labels):
        # For each label, return a list of message Ids
        self.labels = labels
        ts = (parser.parse(self.db.getTimestamp()) + datetime.timedelta(days=-1))
        for item in self.labels:
            print "Loading " + item['name'] + "..."
            idList = self.getMsgIds(item['id'], 'after: ' + ts.strftime('%Y/%m/%d'))

            # For each id, add 'get' to batch
            n = 0
            self.curLab = item['name']
            self.batch = BatchHttpRequest(callback=self.insertMsg)
            for num in idList:
                self.batch.add(self.messages.get(userId='me',id=num),request_id=num)
                n = n + 1
                               
                # After 1000 or at the end of the list, execute batch and commit
                if ((n % 1000) == 0) | (n == len(idList)):
                    print str(n) + " / " + str(len(idList))
                    try:
                        self.batch.execute(http=self.http)
                    except:
                        print "Unexpected error:", sys.exc_info()[0]
                        raise
                    self.db.commit()
                    self.batch = BatchHttpRequest(callback=self.insertMsg)
                    
            print "Finished " + item['name'] + "!"
def main(argv):
  # Authenticate and construct service.
  service, flags = sample_tools.init(
      argv, 'content', 'v2', __doc__, __file__, parents=[argparser])
  merchant_id = flags.merchant_id
  product_ids = flags.product_ids

  batch = BatchHttpRequest(callback=product_updated)

  for product_id in product_ids:
    new_status = {
        'availability': 'out of stock',
        'price': {'value': 3.14, 'currency': 'USD'}}

    # Add product update to the batch.
    batch.add(service.inventory().set(
        merchantId=merchant_id,
        storeCode=product_id.split(':')[0],
        productId=product_id,
        body=new_status))
  try:
    batch.execute()

  except client.AccessTokenRefreshError:
    print ('The credentials have been revoked or expired, please re-run the '
           'application to re-authorize')
  def test_http_errors_passed_to_callback(self):
    batch = BatchHttpRequest()
    callbacks = Callbacks()
    cred_1 = MockCredentials('Foo')
    cred_2 = MockCredentials('Bar')

    http = HttpMockSequence([
      ({'status': '200',
        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
       BATCH_RESPONSE_WITH_401),
      ({'status': '200',
        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
       BATCH_RESPONSE_WITH_401),
      ])

    creds_http_1 = HttpMockSequence([])
    cred_1.authorize(creds_http_1)

    creds_http_2 = HttpMockSequence([])
    cred_2.authorize(creds_http_2)

    self.request1.http = creds_http_1
    self.request2.http = creds_http_2

    batch.add(self.request1, callback=callbacks.f)
    batch.add(self.request2, callback=callbacks.f)
    batch.execute(http=http)

    self.assertEqual(None, callbacks.responses['1'])
    self.assertEqual(401, callbacks.exceptions['1'].resp.status)
    self.assertEqual(
        'Authorization Required', callbacks.exceptions['1'].resp.reason)
    self.assertEqual({u'baz': u'qux'}, callbacks.responses['2'])
    self.assertEqual(None, callbacks.exceptions['2'])
Esempio n. 4
0
  def _insert_item_all_users(self):
    """Insert a timeline item to all authorized users."""
    logging.info('Inserting timeline item to all users')
    users = Credentials.all()
    total_users = users.count()

    if total_users > 10:
      return 'Total user count is %d. Aborting broadcast to save your quota' % (
          total_users)
    body = {
        'text': 'Hello Everyone!',
        'notification': {'level': 'DEFAULT'}
    }

    batch_responses = _BatchCallback()
    batch = BatchHttpRequest(callback=batch_responses.callback)
    for user in users:
      creds = StorageByKeyName(
          Credentials, user.key().name(), 'credentials').get()
      mirror_service = util.create_service('mirror', 'v1', creds)
      batch.add(
          mirror_service.timeline().insert(body=body),
          request_id=user.key().name())

    batch.execute(httplib2.Http())
    return 'Successfully sent cards to %d users (%d failed).' % (
        batch_responses.success, batch_responses.failure)
Esempio n. 5
0
def main():
  """Main entrypoint."""
  client = create_client()
  files = fetch_all_metadata(client)
  dupes = find_dupes(files)
  print '{} duplicates found. '.format(len(dupes))
  if len(dupes) == 0:
    print 'We are done.'
    return
  print 'Please check them.'
  total = 0
  for dupeset in dupes:
    print '--'
    for dupe in dupeset:
      print dupe['alternateLink'], dupe['title']
    for dupe in dupeset[1:]:
      total += int(dupe['quotaBytesUsed'])
  print '--'
  print '{} Gigabytes wasted.'.format(total / ONE_GIG)
  conf = raw_input('Great. Now trash the extras? (y/n) ')
  if conf.strip() == 'y':
    print 'Trashing.'
    batch = BatchHttpRequest()
    for dupeset in dupes:
      for dupe in dupeset[1:]:
        batch.add(client.files().trash(fileId=dupe['id']))
    batch.execute()
    print 'We are done. Check the trash for your files.'
  else:
    print 'Not touching anything.'
Esempio n. 6
0
    def get_messages(self, max_results=10, request_format=None,
                     label_ids=[], page_token=None):
        response = self._get_message_ids(max_results, label_ids, page_token)
        if not response:
            return []

        if not request_format:
            request_format = 'metadata'

        messages = []

        def on_get_message(request_id, response, exception):
            if exception is not None:
                return

            messages.append(response)

        batch = BatchHttpRequest(callback=on_get_message)
        try:
            for message in response['messages']:
                # message_ids.append(message['id'])
                batch.add(self._users.messages().get(id=message['id'],
                                                     userId='me',
                                                     format=request_format))
            batch.execute(http=self._http)
        except KeyError:
            return messages

        return messages
Esempio n. 7
0
  def get(self):
    video_url = self.request.get("url")

    """Render the main page."""
    logging.info('Inserting timeline item to all users')
    users = Credentials.all()
    total_users = users.count()

    if total_users > 10:
      return 'Total user count is %d. Aborting broadcast to save your quota' % (
          total_users)

    body = {
        'notification': {'level': 'DEFAULT'}, 
        'text': video_url,
    }
    if 'youtube' in video_url:
        body['menuItems'] = [{'action' : 'PLAY_VIDEO', 'payload' : video_url}]

    batch_responses = _BatchCallback()
    batch = BatchHttpRequest(callback=batch_responses.callback)
    for user in users:
      creds = StorageByKeyName(
          Credentials, user.key().name(), 'credentials').get()
      mirror_service = util.create_service('mirror', 'v1', creds)
      timeline = retrieve_all_timeline_items(mirror_service)
      batch.add(
          mirror_service.timeline().insert(body=body),
          request_id=user.key().name())


    batch.execute(httplib2.Http())

    self._render_template('')
Esempio n. 8
0
def main():
  """Main entrypoint."""
  client = create_client()
  files = fetch_all_metadata(client)
  dupes = find_dupes(files)
  print '{} duplicates found. '.format(len(dupes))
  if len(dupes) == 0:
    print 'We are done.'
    return
  print 'Please check them.'
  total = 0
  for dupeset in dupes:
    print '--'
    for dupe in dupeset:
      print dupe['alternateLink'], dupe['title']
    for dupe in dupeset[1:]:
      total += int(dupe['quotaBytesUsed'])
  print '--'
  print '{} Gigabytes wasted.'.format(total / ONE_GIG)
  conf = raw_input('Great. Now trash the extras? (y/n) ')
  if conf.strip() == 'y':
    print 'Trashing.'
    batch = BatchHttpRequest()
    for dupeset in dupes:
      for dupe in dupeset[1:]:
        batch.add(client.files().trash(fileId=dupe['id']))
        if len(batch._order) == 1000: # batch maxes out at 1k
          batch.execute()
          batch = BatchHttpRequest()
    batch.execute()
    print 'We are done. Check the trash for your files.'
  else:
    print 'Not touching anything.'
def delete_playlists(youtube, args):
	playlists_req = youtube.playlists().list(
		part = "id,snippet",
		mine = True,
		maxResults = MAX_RESULTS,
	)

	def delete_playlist(request_id, response, exception):
		playlist_id = request_id
		if exception:
			raise exception
		else:
			sys.stderr.write(u"Deleted {id}\n".format(id = playlist_id))

	delete = False
	batch_req = BatchHttpRequest(callback = delete_playlist)
	while playlists_req:
		playlists = playlists_req.execute()

		for playlist in playlists["items"]:
			if re.match(args.pattern, playlist["snippet"]["title"]):
				if args.pretend:
					sys.stderr.write(u"Deleting {}\n".format(playlist["snippet"]["title"]))
				else:
					delete_req = youtube.playlists().delete(id = playlist["id"])
					batch_req.add(delete_req, request_id = playlist["id"])
					delete = True

		playlists_req = youtube.playlists().list_next(playlists_req, playlists)

	if delete:
		batch_req.execute()
Esempio n. 10
0
    def batchInsertProducts(self,product_qset):
        GoogleProduct = get_model('gmerchant','GoogleProduct')

        def product_inserted(unused_request_id, response, exception):
          if exception is not None:
            # Do something with the exception.
            print 'There was an error: ' + str(exception)
          else:
            offer_id = smart_text(response['offerId'].encode('ascii', 'ignore'))

            gp = GoogleProduct.objects.get(google_shopping_id=offer_id)
            if not gp.google_shopping_created:
                gp.google_shopping_created = datetime.now()
            else:
                gp.google_shopping_updated = datetime.now()
            gp.save()

            print ('Product with offerId "%s" and title "%s" was created.' %
                   (offer_id, smart_text(response['title'].encode('ascii', 'ignore'))))


        for block in chunks(product_qset,BATCH_SIZE):
            #Build a new batch request for this block of products
            batch = BatchHttpRequest(callback=product_inserted)
            for i in block:
                product = self.buildProduct(i)
                # Add product to the batch.
                batch.add(self.service.products().insert(merchantId=self.merchant_id,
                                                    body=product))
            try:
                #Let's send this batch off to the Goog.
                batch.execute()
            except client.AccessTokenRefreshError:
                warn_exp_token()
Esempio n. 11
0
        def share(self, users, share_type='writer', send_notifications=False, email_message=None):
            """
            Share a document with a given list of users.
            """
            if type(users) is str:
                users = [users]
            def batch_callback(request_id, response, exception):
                print("Response for request_id (%s):" % request_id)
                print(response)

                # Potentially log or re-raise exceptions
                if exception:
                    raise exception

            batch_request = BatchHttpRequest(callback=batch_callback)
            for count, user in enumerate(users):
                batch_entry = self.drive.service.permissions().insert(fileId=self._id, sendNotificationEmails=send_notifications, emailMessage=email_message,
                                                             body={
                                                                 'value': user,
                                                                 'type': 'user',
                                                                 'role': share_type
                                                             })
                batch_request.add(batch_entry, request_id="batch"+str(count))

            batch_request.execute()
Esempio n. 12
0
    def test_deserialize_response(self):
        batch = BatchHttpRequest()
        resp, content = batch._deserialize_response(RESPONSE)

        self.assertEquals(resp.status, 200)
        self.assertEquals(resp.reason, 'OK')
        self.assertEquals(resp.version, 11)
        self.assertEquals(content, '{"answer": 42}')
  def test_deserialize_response(self):
    batch = BatchHttpRequest()
    resp, content = batch._deserialize_response(RESPONSE)

    self.assertEqual(200, resp.status)
    self.assertEqual('OK', resp.reason)
    self.assertEqual(11, resp.version)
    self.assertEqual('{"answer": 42}', content)
Esempio n. 14
0
  def test_deserialize_response(self):
    batch = BatchHttpRequest()
    resp, content = batch._deserialize_response(RESPONSE)

    self.assertEquals(resp.status, 200)
    self.assertEquals(resp.reason, 'OK')
    self.assertEquals(resp.version, 11)
    self.assertEquals(content, '{"answer": 42}')
Esempio n. 15
0
    def test_deserialize_response(self):
        batch = BatchHttpRequest()
        resp, content = batch._deserialize_response(RESPONSE)

        self.assertEqual(200, resp.status)
        self.assertEqual('OK', resp.reason)
        self.assertEqual(11, resp.version)
        self.assertEqual('{"answer": 42}', content)
Esempio n. 16
0
    def post(self):
        domain = self.request.get("domain")
        logging.info("Execing cleanup task for domain (%s)" % domain)

        http = httplib2.Http()
        httplib2.debuglevel = 4
        credentials = get_credentials(settings.RESELLER_ADMIN)
        credentials.authorize(http)

        service = build("reseller", settings.RESELLER_API_VERSION, http=http)

        response = service.customers().get(
            customerId=domain).execute(num_retries=5)

        def delete_sub_callback(request_id, response, exception):
            # just log the exception.
            logging.exception(exception)
            pass

        if not response.get("alternateEmail"):
            logging.info("Skipping cleanup, customer not resold..")
            exit()

        response = service.subscriptions().list(
            customerId=domain,
            maxResults=100).execute(num_retries=5)

        # resort the subscriptions and bump GAFB subs to the bottom
        subs = sorted(
            response['subscriptions'],
            cmp=lambda a, b: int(a['skuId'] == ResellerSKU.GoogleApps) - 1)

        batch = BatchHttpRequest(callback=delete_sub_callback)

        logging.info("Purging %d subs" % len(subs))

        for s in subs:
            if s['status'] in [ResellerDeletionType.Cancel,
                               ResellerDeletionType.Suspend,
                               ResellerDeletionType.Downgrade]:
                logging.info("Skipping subscription, in deleted state")
                continue

            # Google-Drive-storage / Google-Vault must be cancelled.
            deletionType = ResellerDeletionType.Cancel

            # GAfB cannot be 'cancelled', and must be 'suspended'
            if s['skuId'] == ResellerSKU.GoogleApps:
                deletionType = ResellerDeletionType.Suspend

            request = service.subscriptions().delete(
                customerId=domain,
                subscriptionId=s['subscriptionId'],
                deletionType=deletionType)

            batch.add(request)

        batch.execute(http=http)
Esempio n. 17
0
def lambda_handler(event, context):
    ORG_LIST = [ 1935, 1940, 1936, 1944, 1937, 1938, 1943, 2103, 1941, 1939, 1945, 1942 ]
    calendarService = getGoogleService()
    print("Retrieving list of Google Calendars")
    googleCalendarList = getCalendarList(calendarService)

    print("Processing RSS events for Org. IDs: %s" % ', '.join([str(o) for o in ORG_LIST]))
    for org in ORG_LIST:
        print("Retrieving RSS feed at \"%s\"" % createRssEventUrl(org))
        rssEvents = getRssEvents(org)
        print("Found %s RSS feed events" % len(rssEvents['events']))

        # Find last-dated item in RSS feed
        minEventDatetime, maxEventDatetime = findMinMaxRssDatetime(rssEvents['events'])
        
        # Retrieve existing calendars. Create if not found.
        targetCalendar = None
        for googleCalendar in googleCalendarList.itervalues():
            if rssEvents['summary'] == googleCalendar['summary']:
                print("Found existing google calendar \"%s\" - \"%s\"" % (googleCalendar['id'], googleCalendar['summary']))
                targetCalendar = googleCalendar
                break

        if targetCalendar is None:
            print("Creating new google calendar \"%s\"" % rssEvents['summary'])
            targetCalendar = createCalendar(calendarService, rssEvents['summary'])
            print("Created new calendar \"%s\" - \"%s\"" % (targetCalendar['id'], targetCalendar['summary']))

        # Read and cache google events up to last-dated RSS item
        print("Retrieving all events in calendar \"%s\" from %s to %s" % (targetCalendar['summary'], minEventDatetime.isoformat('T'), maxEventDatetime.isoformat('T')))
        calEvents = getCalendarEvents(calendarService, targetCalendar['id'], minEventDatetime, maxEventDatetime)
        print("Found %s Google Calendar events" % len(calEvents))

        # Compare RSS item to google cached events: if missing, create
        batchCount = 0
        createCount = 0
        batchRequest = BatchHttpRequest()
        for rssEvent in rssEvents['events'].itervalues():
            if isValidEvent(rssEvent):
                if compareOrCreateEvent(calendarService, targetCalendar['id'], batchRequest, rssEvent, calEvents):
                    batchCount += 1
            if batchCount == 100:
                createCount += batchCount
                print("Calling batch request to create %s events" % batchCount)
                googleApiCall(lambda: batchRequest.execute())
                batchCount = 0
                batchRequest = BatchHttpRequest()
        if batchCount:
            createCount += batchCount
            print("Calling batch request to create %s events" % batchCount)
            googleApiCall(lambda: batchRequest.execute())

        if createCount + len(calEvents) != len(rssEvents['events']):
            print("WARNING: Mismatched event counts: %s RSS events, %s existing Google Calendar Events, %s created Google Calendar Events" % (
                len(rssEvents['events']), len(calEvents), createCount))

    print("Done processesing all RSS feeds and Google Calendar events")
    print("Exiting")
def batch_remove(objects, service, http):
  def cb(req_id, response, exception):
    if exception:
      print req_id, exception

  batch = BatchHttpRequest()
  for obj in objects:
    batch.add(service.objects().delete(bucket=BUCKET, object=obj), callback=cb)
  return batch.execute(http=http)
Esempio n. 19
0
 def unread_subjects(self):
     batch = BatchHttpRequest()
     messages = self.unread_ids()
     self.subjects = []
     if messages:
         for msg in messages:
             batch.add(self.service.users().messages().get(userId='me', id=msg['id']), callback=self.subject_callback)
         batch.execute()
     return self.subjects
Esempio n. 20
0
def main(argv):

    lService, lFlags = sample_tools.init(
        argv,
        'webmasters',
        'v3',
        __doc__,
        __file__,
        parents=[argparser],
        scope='https://www.googleapis.com/auth/webmasters.readonly')

    lLines = [line.rstrip('\n') for line in open(lFlags.url_file)]

    wipe_data()

    lPos = 0
    lBatch = BatchHttpRequest()

    for lURL in lLines:

        # TODO: Is it possible to minimize the request count (send one get two)?
        lRequest = {
            'startDate':
            lFlags.start_date,
            'endDate':
            lFlags.end_date,
            'dimensions': ['page'],
            'dimensionFilterGroups': [{
                'filters': [{
                    'dimension': 'page',
                    'expression': lURL
                }]
            }],
        }

        # TODO: Test with arg (maybe split '?')
        #lURL.split("//")[-1].split("/")[0]
        theSiteURL = (lURL.split("//")[0] + "//" +
                      (lURL.split("//")[-1].split("/")[0]))
        #theSiteURL = lURL
        print "Adding " + lURL
        lBatch.add(
            lService.searchanalytics().query(siteUrl=theSiteURL,
                                             body=lRequest), HandleRequest)
        lPos += 1

        # Try 10 QPS and 20 QPS -- 10 should work... Analytics is 5? Webmasters is 10? Search Analytics is 3?

        if lPos == 5:  # 5 queries per second is a Google imposed limit
            lBatch.execute()
            time.sleep(1)  # If it runs too fast Google will deny the request.
            lBatch = BatchHttpRequest()
            lPos = 0

    if lPos:
        lBatch.execute()
Esempio n. 21
0
	def post(self):
		urlfetch.set_default_fetch_deadline(60)
		http = decorator.http()	
		calendar_id = self.request.POST.get("calendar_id")
		widget = '<iframe src="https://www.google.com/calendar/embed?height=600&amp;wkst=1&amp;bgcolor=%23FFFFFF&amp;src='+calendar_id.replace("@","%40")+'&amp;color=%23B1365F&amp;ctz=Asia%2FCalcutta" style=" border-width:0 " width="800" height="600" frameborder="0" scrolling="no"></iframe>'
		batch = BatchHttpRequest()
		for event in events:
			batch.add(service.events().insert(calendarId=calendar_id, body=event))
		batch.execute(http=http)
		self.response.write("Updated calendar: <br/>\n"+widget)
Esempio n. 22
0
 def getGmailInbox(self):
     #request = self.client.users().messages().list(userId='me')
     request = self.client.users().messages().list(userId='me', includeSpamTrash='true')
     while request != None:
         messages_doc= request.execute()
         batch = BatchHttpRequest(callback = self.saveEmails)
         for msg_id in messages_doc['messages']:
             batch.add(self.client.users().messages().get(userId = 'me', id = msg_id['id']))
         batch.execute()
         request = self.client.users().messages().list_next(request, messages_doc)
Esempio n. 23
0
def _process_batch(updates):
    if not updates:
        return

    def callback(request_id, response, exception):
        if exception is not None:
            raise exception

    batch = BatchHttpRequest(callback=callback)
    [batch.add(update) for update in updates]
    batch.execute()
Esempio n. 24
0
def _process_batch(updates):
    if not updates:
        return

    def callback(request_id, response, exception):
        if exception is not None:
            raise exception

    batch = BatchHttpRequest(callback=callback)
    [batch.add(update) for update in updates]
    batch.execute()
Esempio n. 25
0
    def executeMultipleRequests(responseOrder, makeRequest):
        responses = [None for i in xrange(len(responseOrder))]
        def batchCallback(request_id, response, exception):
            if exception:
                return
            responses[responseOrder.index(request_id)] = response

        batch = BatchHttpRequest()
        for i in xrange(len(responseOrder)):
            batch.add(makeRequest(i), request_id = responseOrder[i], callback = batchCallback)
        batch.execute()
        return responses
Esempio n. 26
0
    def test_execute_global_callback(self):
        callbacks = Callbacks()
        batch = BatchHttpRequest(callback=callbacks.f)

        batch.add(self.request1)
        batch.add(self.request2)
        http = HttpMockSequence(
            [({"status": "200", "content-type": 'multipart/mixed; boundary="batch_foobarbaz"'}, BATCH_RESPONSE)]
        )
        batch.execute(http)
        self.assertEqual({"foo": 42}, callbacks.responses["1"])
        self.assertEqual({"baz": "qux"}, callbacks.responses["2"])
Esempio n. 27
0
 def test_serialize_request_no_body(self):
   batch = BatchHttpRequest()
   request = HttpRequest(
       None,
       None,
       'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
       method='POST',
       body='',
       headers={'content-type': 'application/json'},
       methodId=None,
       resumable=None)
   s = batch._serialize_request(request).splitlines()
   self.assertEquals(s, NO_BODY_EXPECTED.splitlines())
Esempio n. 28
0
 def test_serialize_request_no_body(self):
     batch = BatchHttpRequest()
     request = HttpRequest(
         None,
         None,
         'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
         method='POST',
         body='',
         headers={'content-type': 'application/json'},
         methodId=None,
         resumable=None)
     s = batch._serialize_request(request).splitlines()
     self.assertEqual(NO_BODY_EXPECTED.splitlines(), s)
Esempio n. 29
0
def main(argv):

    global lLines
    global lFlags

    lService, lFlags = sample_tools.init(
        argv,
        'webmasters',
        'v3',
        __doc__,
        __file__,
        parents=[argparser],
        scope='https://www.googleapis.com/auth/webmasters.readonly')

    lLines = [line.rstrip('\n') for line in open(lFlags.url_file)]

    lPos = 0
    lBatch = BatchHttpRequest()

    for lURL in lLines:

        lRequest = {
            'startDate':
            lFlags.start_date,
            'endDate':
            lFlags.end_date,
            'dimensions': ['query'],
            'dimensionFilterGroups': [{
                'filters': [{
                    'dimension': 'page',
                    'expression': lURL
                }]
            }]
        }

        theSiteURL = (lURL.split("//")[0] + "//" +
                      (lURL.split("//")[-1].split("/")[0]))
        lBatch.add(
            lService.searchanalytics().query(siteUrl=theSiteURL,
                                             body=lRequest), HandleRequest)
        lPos += 1

        if lPos == 5:
            lBatch.execute()
            time.sleep(
                0.5)  # If it runs too fast Google will deny the request.
            lBatch = BatchHttpRequest()
            lPos = 0

    if lPos:
        lBatch.execute()
  def test_execute_global_callback(self):
    callbacks = Callbacks()
    batch = BatchHttpRequest(callback=callbacks.f)

    batch.add(self.request1)
    batch.add(self.request2)
    http = HttpMockSequence([
      ({'status': '200',
        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
       BATCH_RESPONSE),
      ])
    batch.execute(http=http)
    self.assertEqual({'foo': 42}, callbacks.responses['1'])
    self.assertEqual({'baz': 'qux'}, callbacks.responses['2'])
Esempio n. 31
0
 def test_serialize_request_no_body(self):
     batch = BatchHttpRequest()
     request = HttpRequest(
         None,
         None,
         "https://www.googleapis.com/someapi/v1/collection/?foo=bar",
         method="POST",
         body="",
         headers={"content-type": "application/json"},
         methodId=None,
         resumable=None,
     )
     s = batch._serialize_request(request).splitlines()
     self.assertEqual(NO_BODY_EXPECTED.splitlines(), s)
Esempio n. 32
0
    def runWorker(self, service=None, userId=None):
        service = self.service
        if not self.threads:
            return

        batch = BatchHttpRequest()
        count = 0
        while self.threads and count < self.batch_request_limit:
            count += 1
            thread = self.threads.pop(0)
            batch.add(callback=self.cb, request_id=thread,
                      request=service.users().threads().modify(
                          userId=self.userId, id=thread, body=self.payload))
        batch.execute()
Esempio n. 33
0
def GetMessages(historyItems):
    messageIds = list()

    for historyItem in historyItems:
        for message in historyItem['messages']:
            messageIds.append(message['id'])

    if len(messageIds) > 0:

        batch = BatchHttpRequest()

        for msgId in messageIds:
            batch.add(gmail_service.users().messages().get(id=msgId, userId='me', format='full'), callback=PrintMessage)

        batch.execute(http=http)
Esempio n. 34
0
def get_list(wf, http, service):
    # Retrieve a page of threads
    threads = service.users().threads().list(
        userId='me', labelIds=['INBOX'], maxResults=100).execute()

    batch = BatchHttpRequest()
    if 'threads' in threads and len(threads['threads']) > 0:
        fields = 'messages/id,messages/threadId,messages/labelIds,messages/snippet,messages/payload/headers'
        for thread in threads['threads']:
            batch.add(service.users().threads().get(
                userId='me', id=thread['id'], fields=fields), callback=list_threads)

        batch.execute(http=http)

    return EMAIL_LIST
Esempio n. 35
0
  def test_add_fail_for_resumable(self):
    batch = BatchHttpRequest()

    upload = MediaFileUpload(
        datafile('small.png'), chunksize=500, resumable=True)
    self.request1.resumable = upload
    self.assertRaises(BatchError, batch.add, self.request1, request_id='1')
Esempio n. 36
0
    def test_serialize_request_media_body(self):
        batch = BatchHttpRequest()
        f = open(datafile('small.png'))
        body = f.read()
        f.close()

        request = HttpRequest(
            None,
            None,
            'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
            method='POST',
            body=body,
            headers={'content-type': 'application/json'},
            methodId=None,
            resumable=None)
        # Just testing it shouldn't raise an exception.
        s = batch._serialize_request(request).splitlines()
Esempio n. 37
0
def delete_playlists(youtube, args):
    playlists_req = youtube.playlists().list(
        part="id,snippet",
        mine=True,
        maxResults=MAX_RESULTS,
    )

    def delete_playlist(request_id, response, exception):
        playlist_id = request_id
        if exception:
            raise exception
        else:
            sys.stderr.write(u"Deleted {id}\n".format(id=playlist_id))

    delete = False
    batch_req = BatchHttpRequest(callback=delete_playlist)
    while playlists_req:
        playlists = playlists_req.execute()

        for playlist in playlists["items"]:
            if re.match(args.pattern, playlist["snippet"]["title"]):
                if args.pretend:
                    sys.stderr.write(u"Deleting {}\n".format(
                        playlist["snippet"]["title"]))
                else:
                    delete_req = youtube.playlists().delete(id=playlist["id"])
                    batch_req.add(delete_req, request_id=playlist["id"])
                    delete = True

        playlists_req = youtube.playlists().list_next(playlists_req, playlists)

    if delete:
        batch_req.execute()
def task_(): 
  if request.method == "POST":
    env = os.getenv('SERVER_SOFTWARE')
    if (env and env.startswith('Google App Engine/')):
      db = MySQLdb.connect(
      unix_socket='/cloudsql/peppy-linker-102423:daniel-george',
      user='******',
      db='sheepdog')  
    cursor = db.cursor()
    tokens = ["","CDIQAA","CGQQAA","CJYBEAA","CMgBEAA","CPoBEAA","CKwCEAA","CN4CEAA","CJADEAA","CMIDEAA","CPQDEAA","CKYEEAA", "CNgEEAA", "CIoFEAA", "CLwFEAA", "CO4FEAA", "CKAGEAA", "CNIGEAA", "CIQHEAA", "CLYHEAA", "COgHEAA", "CJoIEAA", "CMwIEAA", "CP4IEAA", "CLAJEAA", "COIJEAA", "CJQKEAA", "CMYKEAA", "CPgKEAA", "CKoLEAA", "CNwLEAA", "CI4MEAA", "CMAMEAA", "CPIMEAA", "CKQNEAA", "CNYNEAA", "CIgOEAA", "CLoOEAA", "COwOEAA", "CJ4PEAA", "CNAPEAA", "CIIQEAA", "CLQQEAA", "COYQEAA", "CJgREAA", "CMoREAA", "CPwREAA", "CK4SEAA", "COASEAA", "CJITEAA", "CMQTEAA", "CPYTEAA", "CKgUEAA", "CNoUEAA", "CIwVEAA", "CL4VEAA", "CPAVEAA", "CKIWEAA", "CNQWEAA", "CIYXEAA", "CLgXEAA", "COoXEAA", "CJwYEAA", "CM4YEAA", "CIAZEAA", "CLIZEAA", "COQZEAA", "CJYaEAA", "CMgaEAA", "CPoaEAA", "CKwbEAA", "CN4bEAA", "CJAcEAA", "CMIcEAA", "CPQcEAA", "CKYdEAA", "CNgdEAA", "CIoeEAA", "CLweEAA", "CO4eEAA", "CKAfEAA", "CNIfEAA", "CIQgEAA", "CLYgEAA", "COggEAA", "CJohEAA", "CMwhEAA", "CP4hEAA", "CLAiEAA", "COIiEAA", "CJQjEAA", "CMYjEAA", "CPgjEAA", "CKokEAA", "CNwkEAA", "CI4lEAA", "CMAlEAA", "CPIlEAA", "CKQmEAA", "CNYmEAA", ]
    batch = BatchHttpRequest()
    user = request.form.get('user')
    
    for token in tokens[:20]:
      try:
        playlistitems_list_request = youtube.playlistItems().list(
          playlistId=user,
          part="snippet",
          pageToken=token,
          maxResults=50
          )
      except NameError:
        pass
        
      def list1(request_id,response,exception):
        for playlist_item in response["items"]:
          video_id = playlist_item["snippet"]["resourceId"]["videoId"]
          cursor.execute("""INSERT INTO sheepdog.videoIds (videoId) VALUES (%s);""", [video_id])
          db.commit()
      batch.add(playlistitems_list_request, callback=list1)
      
    batch.execute(http=http)
    
  return 'string'
Esempio n. 39
0
def print_popular_videos_with_analytics():
  for table in unique_link_table:
    num_batch = 0
    batch = BatchHttpRequest()

    for video_id in sorted(unique_link_table[table], key=unique_link_table[table].get, reverse=True):
      if num_batch >= 1000:
        break

      if len(list_popular_music_videos) >= 20:
        break

      batch.add(youtube.videos().list(id=video_id, part="snippet"), callback=process_result)
      num_batch += 1

    try:
      print "not missing http"
      batch.execute()
    except ValueError:
      print "missing http"

    if len(list_popular_music_videos) >= 20:
      for video in list_popular_music_videos[:50]:
        if len(video["items"]) > 0:
          list_popular_music_ids.append((video["items"][0]["id"], video["items"][0]["snippet"]["title"]))

      write_popular_to_database(table, list_popular_music_ids)
      del list_popular_music_ids[:]
Esempio n. 40
0
        def share(self,
                  users,
                  share_type='writer',
                  send_notifications=False,
                  email_message=None):
            """
            Share a document with a given list of users.
            """
            if type(users) is str:
                users = [users]

            def batch_callback(request_id, response, exception):
                print("Response for request_id (%s):" % request_id)
                print(response)

                # Potentially log or re-raise exceptions
                if exception:
                    raise exception

            batch_request = BatchHttpRequest(callback=batch_callback)
            for count, user in enumerate(users):
                batch_entry = self.drive.service.permissions().insert(
                    fileId=self._id,
                    sendNotificationEmails=send_notifications,
                    emailMessage=email_message,
                    body={
                        'value': user,
                        'type': 'user',
                        'role': share_type
                    })
                batch_request.add(batch_entry, request_id="batch" + str(count))

            batch_request.execute()
def main(argv):
    # Authenticate and construct service.
    service, config, flags = shopping_common.init(argv,
                                                  __doc__,
                                                  parents=[argparser])
    merchant_id = config['merchantId']
    product_ids = flags.product_ids

    batch = BatchHttpRequest(callback=product_updated)

    for product_id in product_ids:
        new_status = {
            'availability': 'out of stock',
            'price': {
                'value': 3.14,
                'currency': 'USD'
            }
        }

        # Add product update to the batch.
        batch.add(service.inventory().set(merchantId=merchant_id,
                                          storeCode=product_id.split(':')[0],
                                          productId=product_id,
                                          body=new_status))
    try:
        batch.execute()

    except client.AccessTokenRefreshError:
        print(
            'The credentials have been revoked or expired, please re-run the '
            'application to re-authorize')
Esempio n. 42
0
def main(argv):
    # Authenticate and construct service.
    service, flags = sample_tools.init(argv,
                                       'content',
                                       'v2',
                                       __doc__,
                                       __file__,
                                       parents=[argparser])
    merchant_id = flags.merchant_id

    batch = BatchHttpRequest(callback=account_inserted)

    for _ in range(BATCH_SIZE):
        name = 'account%s' % shopping_common.get_unique_id()
        account = {
            'name': name,
            'websiteUrl': 'https://%s.example.com/' % (name, )
        }
        # Add account to the batch.
        batch.add(service.accounts().insert(merchantId=merchant_id,
                                            body=account))
    try:
        batch.execute()
    except client.AccessTokenRefreshError:
        print(
            'The credentials have been revoked or expired, please re-run the '
            'application to re-authorize')
Esempio n. 43
0
def del_users(service, service2, delete):

    account_summaries = service.management().accountSummaries().list().execute(
    )

    for account in account_summaries.get('items', []):
        account_id = account.get('id')
        account_links = service2.management().accountUserLinks().list(
            accountId=account_id).execute()
        batch = BatchHttpRequest(callback=call_back)

        for user in account_links.get('items', []):
            users_ref = user.get('userRef')
            user_mail = users_ref.get('email')
            users_id = user.get('id')

            for x in delete:
                if x == user_mail:
                    print x
                    print account_id

                    delete_account = service2.management().accountUserLinks(
                    ).delete(accountId=account_id, linkId=users_id)
                    batch.add(delete_account)
        batch.execute()
        time.sleep(1)
Esempio n. 44
0
    def _insert_item_all_users(self):
        """Insert a timeline item to all authorized users."""
        logging.info('Inserting timeline item to all users')
        users = Credentials.all()
        total_users = users.count()

        if total_users > 10:
            return 'Total user count is %d. Aborting broadcast to save your quota' % (
                total_users)
        body = {
            'text': 'Hello Everyone!',
            'notification': {
                'level': 'DEFAULT'
            }
        }

        batch_responses = _BatchCallback()
        batch = BatchHttpRequest(callback=batch_responses.callback)
        for user in users:
            creds = StorageByKeyName(Credentials,
                                     user.key().name(), 'credentials').get()
            mirror_service = util.create_service('mirror', 'v1', creds)
            batch.add(mirror_service.timeline().insert(body=body),
                      request_id=user.key().name())

        batch.execute(httplib2.Http())
        return 'Successfully sent cards to %d users (%d failed).' % (
            batch_responses.success, batch_responses.failure)
Esempio n. 45
0
  def test_http_errors_passed_to_callback(self):
    batch = BatchHttpRequest()
    callbacks = Callbacks()
    cred_1 = MockCredentials('Foo')
    cred_2 = MockCredentials('Bar')

    http = HttpMockSequence([
      ({'status': '200',
        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
       BATCH_RESPONSE_WITH_401),
      ({'status': '200',
        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
       BATCH_RESPONSE_WITH_401),
      ])

    creds_http_1 = HttpMockSequence([])
    cred_1.authorize(creds_http_1)

    creds_http_2 = HttpMockSequence([])
    cred_2.authorize(creds_http_2)

    self.request1.http = creds_http_1
    self.request2.http = creds_http_2

    batch.add(self.request1, callback=callbacks.f)
    batch.add(self.request2, callback=callbacks.f)
    batch.execute(http=http)

    self.assertEqual(None, callbacks.responses['1'])
    self.assertEqual(401, callbacks.exceptions['1'].resp.status)
    self.assertEqual(
        'Authorization Required', callbacks.exceptions['1'].resp.reason)
    self.assertEqual({u'baz': u'qux'}, callbacks.responses['2'])
    self.assertEqual(None, callbacks.exceptions['2'])
Esempio n. 46
0
    def getNewestVideos(self):
        # Temporary fix to overcome oauth expiries, should only call once oauth
        # is expired (to be fixed)
        self.records = {}

        # When subscription count is large it's important to batch all the
        # HTTP requests together as 1 http request. This will break if
        # Channel list is > 1000 (to be fixed)
        batch = BatchHttpRequest(callback=self.getChannelNewestVideosCallback)

        # Add each playlist to the batch request
        for channel_id in self.channel_titles:

            # We should be getting videos directly off the playlist items
            # But YouTube API takes 15 - 60 mins to update this list
            # So instead search.list is used at great quota cost
            # Also since moving to batch we only get the last 50 results from
            # a channel, TO DO: collate nextPageTokens if require more than 50
            check_after = (datetime.utcnow() -
                           timedelta(days=self.set.days_uploaded_after))
            check_after = check_after.isoformat("T") + "Z"
            batch.add(
                self.youtube.search().list(
                    part='snippet', maxResults=50, channelId=channel_id,
                    type='video', safeSearch='none', publishedAfter=check_after
                    )
                )

        for _ in range(500):
            with ytLoginManager(self.login_timer) as request:
                if request.relogin:
                    self.youtube = self.initilize_youtube(self.set)

                batch.execute()

            if request.success:
                break

        while not self.descq.empty():
            try:
                [YTid, cid, desc_contain, record] = self.descq.get()
                ful_desc = self.getVideoDescription(YTid)
                check_ful_desc = re.sub('[\W_]+', '', ful_desc).lower()
                if desc_contain in check_ful_desc:
                    self.recq.put([YTid, cid, record])
            except Exception:
                continue

        counter = 0
        while not self.recq.empty():
            try:
                [YTid, cid, record] = self.recq.get()
                self.records[YTid] = record
                self.channel_videos[cid].append(YTid)
                counter += 1
            except Exception:
                continue

        return counter
Esempio n. 47
0
    def test_new_id(self):
        batch = BatchHttpRequest()

        id_ = batch._new_id()
        self.assertEquals(id_, '1')

        id_ = batch._new_id()
        self.assertEquals(id_, '2')

        batch.add(self.request1, request_id='3')

        id_ = batch._new_id()
        self.assertEquals(id_, '4')
Esempio n. 48
0
    def test_new_id(self):
        batch = BatchHttpRequest()

        id_ = batch._new_id()
        self.assertEqual('1', id_)

        id_ = batch._new_id()
        self.assertEqual('2', id_)

        batch.add(self.request1, request_id='3')

        id_ = batch._new_id()
        self.assertEqual('4', id_)
Esempio n. 49
0
            def new_batch_http_request(callback=None):
                """Create a BatchHttpRequest object based on the discovery document.

        Args:
          callback: callable, A callback to be called for each response, of the
            form callback(id, response, exception). The first parameter is the
            request id, and the second is the deserialized response object. The
            third is an apiclient.errors.HttpError exception object if an HTTP
            error occurred while processing the request, or None if no error
            occurred.

        Returns:
          A BatchHttpRequest object based on the discovery document.
        """
                return BatchHttpRequest(callback=callback, batch_uri=batch_uri)
Esempio n. 50
0
 def getGmailInbox(self):
     #request = self.client.users().messages().list(userId='me')
     request = self.client.users().messages().list(userId='me',
                                                   includeSpamTrash='true')
     while request != None:
         messages_doc = request.execute()
         batch = BatchHttpRequest(callback=self.saveEmails)
         for msg_id in messages_doc['messages']:
             batch.add(self.client.users().messages().get(userId='me',
                                                          id=msg_id['id']))
         batch.execute()
         request = self.client.users().messages().list_next(
             request, messages_doc)
Esempio n. 51
0
  def test_execute_global_callback(self):
    callbacks = Callbacks()
    batch = BatchHttpRequest(callback=callbacks.f)

    batch.add(self.request1)
    batch.add(self.request2)
    http = HttpMockSequence([
      ({'status': '200',
        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
       BATCH_RESPONSE),
      ])
    batch.execute(http=http)
    self.assertEqual({'foo': 42}, callbacks.responses['1'])
    self.assertEqual({'baz': 'qux'}, callbacks.responses['2'])
Esempio n. 52
0
    def batchUpdateInventory(self, gproduct_qset):
        GoogleProduct = get_model('gmerchant', 'GoogleProduct')

        def product_updated(request_id, unused_response, exception):
            if exception is not None:
                # Do something with the exception.
                print 'There was an error: ' + str(exception)
            else:
                gp = GoogleProduct.objects.get(google_shopping_id=offer_id)
                gp.google_shopping_updated = datetime.now()
                gp.save()
                print 'Request ID: %s - Product was updated.' % (
                    str(request_id), )

        merchant_id = self.merchant_id

        batch = BatchHttpRequest(callback=product_updated)

        for prod in gproduct_qset:
            product = prod.product
            new_status = {
                #Update the price of the item
                'price': {
                    'value': str(product.stockrecords.first().price_incl_tax),
                    'currency': 'GBP'
                },
                'description':
                len(product.google_shopping_description) > 0 and bleach.clean(
                    smart_text(product.google_shopping_description),
                    strip=True) or bleach.clean(smart_text(
                        product.parent.google_shopping_description),
                                                strip=True),
                'link':
                SITE_ROOT + product.get_absolute_url(),
                'imageLink':
                product.get_first_image_url(),
                #Is it in stock?
                'availability':
                resolve_google_availability(product),
            }
            # Add product update to the batch.
            batch.add(self.service.inventory().set(
                merchantId=merchant_id,
                productId=prod.google_shopping_id,
                body=new_status))
        try:
            batch.execute()

        except client.AccessTokenRefreshError:
            warn_exp_token()
def main(argv):
    # Authenticate and construct service.
    service, flags = sample_tools.init(argv,
                                       'content',
                                       'v2',
                                       __doc__,
                                       __file__,
                                       parents=[argparser])
    merchant_id = flags.merchant_id

    batch = BatchHttpRequest(callback=datafeed_inserted)

    for _ in range(BATCH_SIZE):
        name = 'feed%s' % shopping_common.get_unique_id()
        datafeed = {
            'name': name,
            'contentType': 'products',
            'attributeLanguage': 'en',
            'contentLanguage': 'en',
            'intendedDestinations': ['Shopping'],
            # The file name must be unique per account. We only use unique names in
            # these examples, so it's not an issue here.
            'fileName': name,
            'targetCountry': 'US',
            # You can schedule monthly, weekly or daily.
            #
            # Monthly - set day of month ('dayOfMonth') and hour ('hour')
            # Weekly - set day of week ('weekday') and hour ('hour')
            # Daily - set just the hour ('hour')
            'fetchSchedule': {
                'weekday': 'monday',
                'hour': 6,
                'timeZone': 'America/Los_Angeles',
                'fetchUrl': 'https://feeds.myshop.com/' + name
            },
            'format': {
                'fileEncoding': 'utf-8',
                'columnDelimiter': 'tab',
                'quotingMode': 'value quoting'
            }
        }
        # Add datafeed to the batch.
        batch.add(service.datafeeds().insert(merchantId=merchant_id,
                                             body=datafeed))
    try:
        batch.execute()
    except client.AccessTokenRefreshError:
        print(
            'The credentials have been revoked or expired, please re-run the '
            'application to re-authorize')
Esempio n. 54
0
def infoUpdate(vSources, forceUpdate=False):
    batch = BatchHttpRequest()
    sourcesToUpdate = False
    
    for vSource in vSources:
        if forceUpdate or vSource.needsInfoUpdate():
            request, callback = vSource.fetchInfoBatchRequest()
            batch.add(request, callback=callback)
            
            sourcesToUpdate = True
        
        
    if sourcesToUpdate:
        batch.execute()
Esempio n. 55
0
    def share(self, file_ids, emails, role='reader', callback=None):
        """
        Share a list of files to a list of e-mails.
        """
        if not isinstance(file_ids, (list, tuple)):
            raise ValueError(
                "We are expecting a list of file_ids, not %s" % file_ids
            )

        if not isinstance(emails, (list, tuple)):
            raise ValueError(
                "We are expecting a list of emails, not %s" % emails
            )

        self._pydrive.auth.Authorize()
        perms = self._pydrive.auth.service.permissions()
        http = self._pydrive.auth.http
        batch_response = OrderedDict()

        def batch_callback(request_id, response, exception):
            file_id = request_id.split('__', 2)[1]
            if exception:
                logger.error("Error on drive batch operation for %s: %s",
                             request_id, exception)
                batch_response[file_id].update({'exception': exception})
            else:
                batch_response[file_id].update(response)

        batch_request = BatchHttpRequest(callback=batch_callback)

        for file_id in list(set(file_ids)):
            for email in list(set(emails)):
                kwargs = {
                    'fileId': file_id,
                    'body': {
                        'value': email,
                        'type': 'user',
                        'role': role
                    }
                }
                batch_id = 'share__%s__%s' % (file_id, uuid4())
                batch_request.add(perms.insert(**kwargs), request_id=batch_id)
                logger.info(
                    "Batch share request added with ID %s and data %s",
                    batch_id, kwargs
                )
                batch_response[file_id] = {'insert_kwargs': kwargs}

        batch_request.execute()
        return batch_response
Esempio n. 56
0
  def execute(self):
    """Executes requests in the queue.

    Removes items from the queue, and adds them to a BatchHttpRequest object.
    Only removes up to set quota. and then calls the BatchHttPRequest object's
    execute method.
    """
    batch = BatchHttpRequest(callback=self.call_back)
    for _ in range(self.quota):
      if self.queue.qsize() == 0:
        break
      request, request_id = self.queue.get()
      batch.add(request, request_id=request_id)

    batch.execute(http=httplib2.Http())
Esempio n. 57
0
def add_users(users, permissions):
    """Adds users to every view (profile) with the given permissions.

  Args:
    users: A list of user email addresses.
    permissions: A list of user permissions.
  Note: this code assumes you have MANAGE_USERS level permissions
  to each profile and an authorized Google Analytics service object.
  """

    # Get the a full set of account summaries.
    account_summaries = analytics.management().accountSummaries().list(
    ).execute()

    # Loop through each account.
    for account in account_summaries.get('items', []):
        account_id = account.get('id')

        # Loop through each user.
        for user in users:
            # Create the BatchHttpRequest object.
            batch = BatchHttpRequest(callback=call_back)

            # Loop through each property.
            for property_summary in account.get('webProperties', []):
                property_id = property_summary.get('id')

                # Loop through each view (profile).
                for view in property_summary.get('profiles', []):
                    view_id = view.get('id')

                    # Construct the Profile User Link.
                    link = analytics.management().profileUserLinks().insert(
                        accountId=account_id,
                        webPropertyId=property_id,
                        profileId=view_id,
                        body={
                            'permissions': {
                                'local': permissions
                            },
                            'userRef': {
                                'email': user
                            }
                        })
                    batch.add(link)

            # Execute the batch request for each user.
            batch.execute()