def Post(self): if utils.IsDevAppserver(): # Don't require developers to zip the body. # In prod, the data will be written to cloud storage and processed on the # taskqueue, so the caller will not see any errors. In dev_appserver, # process the data immediately so the caller will see errors. # Also always create upload completion token for such requests. token, token_info = self._CreateUploadCompletionToken() ProcessHistogramSet( _LoadHistogramList(StringIO.StringIO(self.request.body)), token) token.UpdateStateAsync( upload_completion_token.State.COMPLETED).wait() return token_info with timing.WallTimeLogger('decompress'): try: data_str = self.request.body # Try to decompress at most 100 bytes from the data, only to determine # if we've been given compressed payload. zlib.decompressobj().decompress(data_str, 100) logging.info('Received compressed data.') except zlib.error: data_str = self.request.get('data') if not data_str: raise api_request_handler.BadRequestError( 'Missing or uncompressed data.') data_str = zlib.compress(data_str) logging.info('Received uncompressed data.') if not data_str: raise api_request_handler.BadRequestError( 'Missing "data" parameter') filename = uuid.uuid4() params = {'gcs_file_path': '/add-histograms-cache/%s' % filename} gcs_file = cloudstorage.open(params['gcs_file_path'], 'w', content_type='application/octet-stream', retry_params=_RETRY_PARAMS) gcs_file.write(data_str) gcs_file.close() token_info = None if utils.ShouldTurnOnUploadCompletionTokenExperiment(): _, token_info = self._CreateUploadCompletionToken( params['gcs_file_path']) params['upload_completion_token'] = token_info['token'] retry_options = taskqueue.TaskRetryOptions( task_retry_limit=_TASK_RETRY_LIMIT) queue = taskqueue.Queue('default') queue.add( taskqueue.Task(url='/add_histograms/process', payload=json.dumps(params), retry_options=retry_options)) return token_info
def Post(self): if utils.IsDevAppserver(): # Don't require developers to zip the body. # In prod, the data will be written to cloud storage and processed on the # taskqueue, so the caller will not see any errors. In dev_appserver, # process the data immediately so the caller will see errors. ProcessHistogramSet(json.loads(self.request.body)) return with timing.WallTimeLogger('decompress'): try: data_str = self.request.body # Try to decompress at most 100 bytes from the data, only to determine # if we've been given compressed payload. zlib.decompressobj().decompress(data_str, 100) logging.info('Received compressed data.') except zlib.error: data_str = self.request.get('data') if not data_str: raise api_request_handler.BadRequestError( 'Missing or uncompressed data.') data_str = zlib.compress(data_str) logging.info('Received uncompressed data.') if not data_str: raise api_request_handler.BadRequestError( 'Missing "data" parameter') filename = uuid.uuid4() params = {'gcs_file_path': '/add-histograms-cache/%s' % filename} gcs_file = cloudstorage.open(params['gcs_file_path'], 'w', content_type='application/octet-stream', retry_params=_RETRY_PARAMS) gcs_file.write(data_str) gcs_file.close() retry_options = taskqueue.TaskRetryOptions( task_retry_limit=_TASK_RETRY_LIMIT) queue = taskqueue.Queue('default') queue.add( taskqueue.Task(url='/add_histograms/process', payload=json.dumps(params), retry_options=retry_options))
def _CheckIsLoggedIn(self): if utils.IsDevAppserver(): return api_auth.Authorize()
def _CheckIsInternalUser(self): if utils.IsDevAppserver(): return self._CheckIsLoggedIn() if not utils.IsInternalUser(): raise ForbiddenError()