def remove(self, path, recursive=True): (bucket, obj) = self._path_to_bucket_and_key(path) if self._is_root(obj): raise InvalidDeleteException( 'Cannot delete root of bucket at path {}'.format(path)) if self._obj_exists(bucket, obj): self.client.objects().delete(bucket=bucket, object=obj).execute() _wait_for_consistency(lambda: not self._obj_exists(bucket, obj)) return True if self.isdir(path): if not recursive: raise InvalidDeleteException( 'Path {} is a directory. Must use recursive delete'.format( path)) req = http.BatchHttpRequest() for it in self._list_iter(bucket, self._add_path_delimiter(obj)): req.add(self.client.objects().delete(bucket=bucket, object=it['name'])) req.execute() _wait_for_consistency(lambda: not self.isdir(path)) return True return False
def send_all(self, messages, dry_run=False): """Sends the given messages to FCM via the batch API.""" if not isinstance(messages, list): raise ValueError('Messages must be an list of messaging.Message instances.') if len(messages) > 100: raise ValueError('send_all messages must not contain more than 100 messages.') responses = [] def batch_callback(_, response, error): exception = None if error: exception = self._parse_batch_error(error) send_response = SendResponse(response, exception) responses.append(send_response) batch = http.BatchHttpRequest(batch_callback, _MessagingService.FCM_BATCH_URL) for message in messages: body = json.dumps(self._message_data(message, dry_run)) req = http.HttpRequest( http=self._transport, postproc=self._postproc, uri=self._fcm_url, method='POST', body=body, headers=self._fcm_headers ) batch.add(req) try: batch.execute() except googleapiclient.http.HttpError as error: raise self._parse_batch_error(error) else: return BatchResponse(responses)
def _ExecuteBatchRequest(self, requests): """Executes the given |requests| in batches. Documentation: * https://developers.google.com/api-client-library/python/guide/batch Args: requests: list of (request_id, request) pairs """ def _BatchHttpRequestCallback(request_id, response, exception): if exception is not None: print 'Error: %s' % exception else: self._PrintJsonResponse(request_id, response) batch = None num_batch_requests = 0 MAX_BATCH_SIZE = 1000 # documented at the URL above for request_id, request in requests: if batch is None: batch = http.BatchHttpRequest( callback=_BatchHttpRequestCallback) batch.add(request, request_id=request_id) num_batch_requests += 1 if num_batch_requests == MAX_BATCH_SIZE: batch.execute(http=self.__auth_http) batch = None num_batch_requests = 0 # Execute the left-over requests in the batch if there are any. if batch is not None: batch.execute(http=self.__auth_http) batch = None
def annotate_text_batch(texts, *args, **kwargs): num_texts = len(texts) queue = multiprocessing.Queue(num_texts) responses = [None] * num_texts batch = http.BatchHttpRequest( callback=_batch_accumulate(queue), batch_uri='https://language.googleapis.com/batch') for i, text in enumerate(texts): batch.add(_annotate_text_request(text, *args, **kwargs), request_id=str(i)) try: _with_retries(batch.execute, 'NL Batch') # This only happens for 400 errors except http.HttpError: return responses for _ in range(num_texts): reqid, resp, exc = queue.get() reqid = int(reqid) if exc: # Fall back to doing it serially logging.warning('Error sending batch request ({}).\n' 'Falling back to serial'.format(exc)) responses[reqid] = annotate_text(texts[reqid], *args, **kwargs) else: responses[reqid] = resp return responses