def get_uploads(self): """Get all uploads sent to this controller. Returns: A dictionary mapping field names to a list of blobinfo objects. This blobinfos will have an additional cloud_storage property if they have been uploaded to cloud storage but be aware that this will not be persisted. """ if self.__uploads is None: self.__uploads = {} for key, value in self.controller.request.params.items(): if isinstance(value, cgi.FieldStorage): if 'blob-key' in value.type_options: blob_info = blobstore.parse_blob_info(value) cloud_info = blobstore.parse_file_info(value) # work around mangled names blob_info = blobstore.BlobInfo.get(blob_info.key()) # Add cloud storage data setattr(blob_info, 'cloud_storage', cloud_info) self.__uploads.setdefault(key, []).append(blob_info) return self.__uploads
def get_file_infos(self, field_name=None): """Get the file infos associated to the uploads sent to this handler. Args: field_name: Only select uploads that were sent as a specific field. Specify None to select all the uploads. Returns: A list of FileInfo records corresponding to each upload. Empty list if there are no FileInfo records for field_name. """ if self.__file_infos is None: self.__file_infos = collections.defaultdict(list) for key, value in self.request.params.items(): if isinstance(value, cgi.FieldStorage): if 'blob-key' in value.type_options: self.__file_infos[key].append(blobstore.parse_file_info(value)) if field_name: return list(self.__file_infos.get(field_name, [])) else: results = [] for uploads in self.__file_infos.itervalues(): results.extend(uploads) return results
def get_file_infos(self, field_name=None): """Get the file infos associated to the uploads sent to this handler. Args: field_name: Only select uploads that were sent as a specific field. Specify None to select all the uploads. Returns: A list of FileInfo records corresponding to each upload. Empty list if there are no FileInfo records for field_name. """ if self.__file_infos is None: self.__file_infos = collections.defaultdict(list) for key, value in list(self.request.params.items()): if isinstance(value, cgi.FieldStorage): if 'blob-key' in value.type_options: self.__file_infos[key].append( blobstore.parse_file_info(value)) if field_name: return list(self.__file_infos.get(field_name, [])) else: results = [] for uploads in six.itervalues(self.__file_infos): results.extend(uploads) return results
def get_uploads(self, request, field_name=None, populate_post=False): """Get uploads sent to this handler. Modified to support GCS from: https://gist.github.com/harperreed/305322 Args: field_name: Only select uploads that were sent as a specific field. populate_post: Add the non blob fields to request.POST Returns: A list of BlobInfo or FileInfo records corresponding to each upload. Empty list if there are no blob-info records for field_name. """ if hasattr(request, '__uploads') == False: #request.META['wsgi.input'].seek(0) #fields = cgi.FieldStorage(request.META['wsgi.input'], environ=request.META) fields = request.POST.mixed() request.__uploads = {} #if populate_post: # request.POST = {} for key in fields.keys(): field = fields[key] if isinstance( field, cgi.FieldStorage) and 'blob-key' in field.type_options: logging.warning(field) logging.warning(field.type_options) logging.warning(field.type_options['blob-key']) logging.warning(type(field.type_options['blob-key'])) #if field.type_options['blob-key'].find('encoded_gs_file:') == 0: if True: # This is a Cloud Store Upload file_info = blobstore.parse_file_info(field) logging.warning(file_info) request.__uploads.setdefault(key, []).append(file_info) #else: # # This is the normal blobstore upload # blob_info = parse_blob_info(field) # request.__uploads.setdefault(key, []).append(blob_info) if populate_post: request.POST[key] = field.value if field_name: try: return list(request.__uploads[field_name]) except KeyError: return [] else: results = [] for uploads in request.__uploads.itervalues(): results += uploads return results
def get_uploads(request, field_name=None, populate_post=False): import logging from google.appengine.ext import blobstore import cgi """Get uploads sent to this handler. Args: field_name: Only select uploads that were sent as a specific field. populate_post: Add the non blob fields to request.POST Returns: A list of BlobInfo records corresponding to each upload. Empty list if there are no blob-info records for field_name. http://pastebin.com/9haziPhd """ if hasattr(request,'__uploads') == False: request.META['wsgi.input'].seek(0) fields = cgi.FieldStorage(request.META['wsgi.input'], environ=request.META) request.__uploads = {} if populate_post: request.POST = {} for key in fields.keys(): field = fields[key] logging.warning(field.type_options) if isinstance(field, cgi.FieldStorage) and 'blob-key' in field.type_options: # TODO: Differentiate between cloudstorage and blogstore 'blob-key': 'encoded_gs_file:....' fileinfo_info = blobstore.parse_file_info(field) logging.warning(fileinfo_info) request.__uploads.setdefault(key, []).append(fileinfo_info) if populate_post: request.POST[key] = [str(fileinfo_info.gs_object_name)] elif populate_post: request.POST[key] = [] if isinstance(field, list): for item in field: request.POST[key].append(item.value) else: request.POST[key] = [field.value] if field_name: try: return list(request.__uploads[field_name]) except KeyError: return [] else: results = [] for uploads in request.__uploads.itervalues(): results += uploads return results
def _convert_value(self, value, path=None): if path is None: path = self._code_name if not self._repeated: value = [value] out = [] total = len(value) - 1 for i, v in enumerate(value): if not self._upload: if not isinstance(v, dict) and not self._required: continue out.append(self._structured_property_format(v, path)) else: if not isinstance(v, cgi.FieldStorage): if self._required: raise FormatError('invalid_input') else: continue # These will throw errors if the 'v' is not cgi.FileStorage and it does # not have compatible blob-key. file_info = blobstore.parse_file_info(v) blob_info = blobstore.parse_blob_info(v) # We only accept jpg/png. This list can be and should be customizable # on the property option itself? if file_info.content_type not in ('image/jpeg', 'image/jpg', 'image/png'): # First line of validation based on meta data from client. raise FormatError('invalid_image_type') new_image = self.get_modelclass()(**{ 'size': file_info.size, 'content_type': file_info.content_type, 'gs_object_name': file_info.gs_object_name, 'image': blob_info.key(), '_sequence': total - i }) out.append(new_image) if not out: # if field is not required, and there isnt any processed return non # existent if not self._required: return tools.Nonexistent else: raise FormatError('required') # otherwise required if self._upload: if self._process_config.get( 'transform') or self._process_config.get('copy'): self.process(out) else: self.generate_serving_urls(out) if self._process_config.get('measure', True): self.generate_measurements(out) map(lambda x: self.save_blobs_on_success(x.image), out) if not self._repeated: out = out[0] return out
def get_uploads(self, request, field_name=None, populate_post=False): """Get uploads sent to this handler. Modified to support GCS from: https://gist.github.com/harperreed/305322 Args: field_name: Only select uploads that were sent as a specific field. populate_post: Add the non blob fields to request.POST Returns: A list of BlobInfo or FileInfo records corresponding to each upload. Empty list if there are no blob-info records for field_name. """ if hasattr(request,'__uploads') == False: request.META['wsgi.input'].seek(0) fields = cgi.FieldStorage(request.META['wsgi.input'], environ=request.META) request.__uploads = {} if populate_post: request.POST = {} for key in fields.keys(): field = fields[key] if isinstance(field, cgi.FieldStorage) and 'blob-key' in field.type_options: logging.warning(field) logging.warning(field.type_options) logging.warning(field.type_options['blob-key']) logging.warning(type(field.type_options['blob-key'])) #if field.type_options['blob-key'].find('encoded_gs_file:') == 0: if True: # This is a Cloud Store Upload file_info = parse_file_info(field) logging.warning(file_info) request.__uploads.setdefault(key, []).append(file_info) #else: # # This is the normal blobstore upload # blob_info = parse_blob_info(field) # request.__uploads.setdefault(key, []).append(blob_info) if populate_post: request.POST[key] = field.value if field_name: try: return list(request.__uploads[field_name]) except KeyError: return [] else: results = [] for uploads in request.__uploads.itervalues(): results += uploads return results
def post(self): base_handler = BaseHandler() try: upload = self.get_uploads()[0] file_info = None for name, fieldStorage in self.request.POST.items(): if isinstance(fieldStorage, unicode): continue file_info = blobstore.parse_file_info(fieldStorage) photo = model.File(created_by=base_handler.user.key, file_typ='picture') file_info.file_path = file_info.gs_object_name[3:] #user_photo.put() #self. except: self.error(500)
def file_upload_receive(current, upload_request, client_id): upload_request = location.FileUploadRequest.from_json(upload_request) db = current.db file_info = blobstore.parse_file_info(current.request.vars['file']) if file_info: gs_object_name = file_info.gs_object_name blob_key = blobstore.create_gs_key(gs_object_name) upload_id = db.uploads.insert(blob_key=blob_key, state="received") db.upload_files.insert( file_information=upload_request.file_information, upload_id=upload_id, flow_id=upload_request.flow_id, client_id=client_id) return dict()
def post(self, ident): """Called internally by the system upon successfully writing file to GCS.""" logging.info('Creating avatar...') # Get the "gcs_object_name" (which is its path in GCS) from the uploaded file. cgi_data = self.request.POST['file'] file_info = blobstore.parse_file_info(cgi_data) gs_object_name = file_info.gs_object_name # Store the "gcs_object_name" on our avatar entity. try: avatar = Avatar.get(ident) avatar.update(gs_object_name) except AvatarDoesNotExistError: avatar = Avatar.create(ident, gs_object_name) logging.info('Created avatar: {}'.format(avatar)) # Redirect the user back to the homepage. self.redirect('/')
def value_format(self, value): if self._repeated and not isinstance(value, list): value = [value] if (self._repeated and (not len(value) or not isinstance(value[0], cgi.FieldStorage))) or (not self._repeated and not isinstance(value, cgi.FieldStorage)): return super(_BaseImageProperty, self).value_format(value) value = self._property_value_format(value) if value is Nonexistent: return value if not self._repeated: value = [value] out = [] for i, v in enumerate(value): if isinstance(v, dict): out.append(self._structured_property_format(v)) else: if not isinstance(v, cgi.FieldStorage) and not self._required: return Nonexistent # If the field is not required, and it's not an actual upload, immediately return Nonexistent. # These will throw errors if the 'v' is not cgi.FileStorage and it does not have compatible blob-key. file_info = blobstore.parse_file_info(v) blob_info = blobstore.parse_blob_info(v) meta_required = ('image/jpeg', 'image/jpg', 'image/png') # We only accept jpg/png. This list can be and should be customizable on the property option itself? if file_info.content_type not in meta_required: raise orm.PropertyError('invalid_image_type') # First line of validation based on meta data from client. new_image = self.get_modelclass()(**{'size': file_info.size, 'content_type': file_info.content_type, 'gs_object_name': file_info.gs_object_name, 'image': blob_info.key(), '_sequence': i}) out.append(new_image) if self._process_config.get('transform') or self._process_config.get('copy'): self.process(out) else: self.generate_serving_urls(out) if self._process_config.get('measure', True): self.generate_measurements(out) map(lambda x: self.save_blobs_on_success(x.image), out) if not self._repeated: out = out[0] return out
def upload_receive(current, type, collection_id, flow_id, client_id, part=0): """Handle GCS callback. The user uploads to GCS directly and once the upload is complete, GCS calls this handler with the file information. This API is not normally called directly. """ db = current.db file_info = blobstore.parse_file_info(current.request.vars['file']) gs_object_name = file_info.gs_object_name blob_key = blobstore.create_gs_key(gs_object_name) db.collections.update_or_insert( db.collections.collection_id == collection_id, flow_id=flow_id, client_id=client_id, collection_id=collection_id, part=part, blob_key=blob_key, gs_object_name=gs_object_name) return dict()
def handle_http_request(env, start_response, dict=dict, isinstance=isinstance, urlunquote=urlunquote, unicode=unicode, get_response_headers=lambda: None): reqlocal.template_error_traceback = None try: http_method = env['REQUEST_METHOD'] ssl_mode = env['wsgi.url_scheme'] == 'https' if RUNNING_ON_GOOGLE_SERVERS and env['HTTP_HOST'] in ALTERNATIVE_HOSTS: if SSL_ONLY or ssl_mode: dest = 'https://' + CANONICAL_HOST else: dest = 'http://' + CANONICAL_HOST if http_method == 'GET': dest += env['PATH_INFO'] query_string = env['QUERY_STRING'] if query_string: dest += '?' + query_string else: dest += '/' start_response(STATUS_301, [('Location', dest)]) return [] if http_method == 'OPTIONS': start_response(*RESPONSE_OPTIONS) return [] if http_method not in SUPPORTED_HTTP_METHODS: start_response(*RESPONSE_NOT_IMPLEMENTED) return [] _path_info = env['PATH_INFO'] if isinstance(_path_info, unicode): _args = [arg for arg in _path_info.split(u'/') if arg] else: _args = [ unicode(arg, 'utf-8', 'strict') for arg in _path_info.split('/') if arg ] if _args: name = _args[0] args = _args[1:] else: name = '/' args = () routed = 0 if name not in HANDLERS: router = handle_http_request.router if router: _info = router(env, _args) if not _info: logging.error("No handler found for: %s" % _path_info) raise NotFound name, args = _info routed = 1 else: logging.error("Handler not found: %s" % name) raise NotFound handler, renderers, config = HANDLERS[name] kwargs = {} ctx = Context(name, env, ssl_mode) ctx.was_routed = routed for part in [ sub_part for part in env['QUERY_STRING'].lstrip('?').split('&') for sub_part in part.split(';') ]: if not part: continue part = part.split('=', 1) if len(part) == 1: value = None else: value = part[1] key = urlunquote(part[0].replace('+', ' ')) if value: value = unicode(urlunquote(value.replace('+', ' ')), 'utf-8', 'strict') else: value = None kwargs[key] = value # Parse the POST body if it exists and is of a known content type. if http_method == 'POST': content_type = env.get('CONTENT-TYPE', '') if not content_type: content_type = env.get('CONTENT_TYPE', '') if ';' in content_type: content_type = content_type.split(';', 1)[0] if content_type in VALID_REQUEST_CONTENT_TYPES: post_environ = env.copy() post_environ['QUERY_STRING'] = '' if config['post_encoding']: ctx.request_body = env['wsgi.input'].read() env['wsgi.input'] = StringIO(ctx.request_body) post_encoding = config['post_encoding'] else: post_encoding = 'utf-8' post_data = FieldStorage(environ=post_environ, fp=env['wsgi.input'], keep_blank_values=True).list or [] for field in post_data: key = field.name if field.filename: if config['blob']: value = parse_file_info(field) else: value = field else: value = unicode(field.value, post_encoding, 'strict') kwargs[key] = value elif content_type == 'application/json': kwargs.update(json_decode(env['wsgi.input'].read())) def get_response_headers(): # Figure out the HTTP headers for the response ``cookies``. cookie_output = SimpleCookie() for name, values in ctx._response_cookies.iteritems(): name = str(name) cookie_output[name] = values.pop('value') cur = cookie_output[name] for key, value in values.items(): if key == 'max_age': key = 'max-age' if key not in COOKIE_KEY_NAMES: continue cur[key] = value if cookie_output: raw_headers = ctx._raw_headers + [ ('Set-Cookie', ck.split(' ', 1)[-1]) for ck in str(cookie_output).split('\r\n') ] else: raw_headers = ctx._raw_headers str_headers = [] new_header = str_headers.append for k, v in raw_headers: if isinstance(k, unicode): k = k.encode('utf-8') if isinstance(v, unicode): v = v.encode('utf-8') new_header((k, v)) return str_headers if 'submit' in kwargs: del kwargs['submit'] if RUNNING_ON_GOOGLE_SERVERS: if config['cron']: if not env.get('HTTP_X_APPENGINE_CRON'): ctx.check_task_auth(kwargs) elif config['task']: if not env.get('HTTP_X_APPENGINE_TASKNAME'): ctx.check_task_auth(kwargs) elif config['ssl'] and not ssl_mode: raise NotFound if config['xsrf']: if 'xsrf' not in kwargs: raise AuthorisationError("XSRF token not present.") provided_xsrf = kwargs.pop('xsrf') if not secure_string_comparison(provided_xsrf, ctx.xsrf_token): raise AuthorisationError("XSRF token does not match.") if config['admin'] and not ctx.is_admin: if ctx.user_id: raise NotFound raise Redirect(ctx.get_login_url()) if (not config['anon']) and (not ctx.user_id): raise Redirect(ctx.get_login_url()) # Try and respond with the result of calling the handler. content = handler(ctx, *args, **kwargs) for renderer in renderers: if ctx.end_pipeline: break if content is None: content = {'content': ''} elif not isinstance(content, dict): content = {'content': content} if isinstance(renderer, str): content = ctx.render_mako_template(renderer, **content) else: content = renderer(ctx, **content) if content is None: content = '' elif isinstance(content, unicode): content = content.encode('utf-8') elif not isinstance(content, str): content = str(content) if ctx.after_runners: for func in ctx.after_runners: func(ctx) cache = config['cache'] if cache and http_method != 'POST': cache, duration = cache etag = md5(content).hexdigest() if cache == 'public': ctx.cache_response(etag, duration) elif cache == 'private': ctx.cache_private_response(etag, duration) else: ctx.do_not_cache_response() raise HTTPContent(content) # Return the content. except HTTPContent, payload: content = payload.content if 'Content-Type' not in ctx.response_headers: ctx.response_headers['Content-Type'] = 'text/html; charset=utf-8' ctx.response_headers['Content-Length'] = str(len(content)) start_response(('%d %s\r\n' % ctx._status), get_response_headers()) if http_method == 'HEAD': return [] return [content]
def handle_http_request( env, start_response, dict=dict, isinstance=isinstance, urlunquote=urlunquote, unicode=unicode, get_response_headers=lambda: None ): reqlocal.template_error_traceback = None try: http_method = env['REQUEST_METHOD'] ssl_mode = env['wsgi.url_scheme'] == 'https' if RUNNING_ON_GOOGLE_SERVERS and env['HTTP_HOST'] in ALTERNATIVE_HOSTS: if SSL_ONLY or ssl_mode: dest = 'https://' + CANONICAL_HOST else: dest = 'http://' + CANONICAL_HOST if http_method == 'GET': dest += env['PATH_INFO'] query_string = env['QUERY_STRING'] if query_string: dest += '?' + query_string else: dest += '/' start_response(STATUS_301, [('Location', dest)]) return [] if http_method == 'OPTIONS': start_response(*RESPONSE_OPTIONS) return [] if http_method not in SUPPORTED_HTTP_METHODS: start_response(*RESPONSE_NOT_IMPLEMENTED) return [] _path_info = env['PATH_INFO'] if isinstance(_path_info, unicode): _args = [arg for arg in _path_info.split(u'/') if arg] else: _args = [ unicode(arg, 'utf-8', 'strict') for arg in _path_info.split('/') if arg ] if _args: name = _args[0] args = _args[1:] else: name = '/' args = () routed = 0 if name not in HANDLERS: router = handle_http_request.router if router: _info = router(env, _args) if not _info: logging.error("No handler found for: %s" % _path_info) raise NotFound name, args = _info routed = 1 else: logging.error("Handler not found: %s" % name) raise NotFound handler, renderers, config = HANDLERS[name] kwargs = {} ctx = Context(name, env, ssl_mode) ctx.was_routed = routed for part in [ sub_part for part in env['QUERY_STRING'].lstrip('?').split('&') for sub_part in part.split(';') ]: if not part: continue part = part.split('=', 1) if len(part) == 1: value = None else: value = part[1] key = urlunquote(part[0].replace('+', ' ')) if value: value = unicode( urlunquote(value.replace('+', ' ')), 'utf-8', 'strict' ) else: value = None kwargs[key] = value # Parse the POST body if it exists and is of a known content type. if http_method == 'POST': content_type = env.get('CONTENT-TYPE', '') if not content_type: content_type = env.get('CONTENT_TYPE', '') if ';' in content_type: content_type = content_type.split(';', 1)[0] if content_type in VALID_REQUEST_CONTENT_TYPES: post_environ = env.copy() post_environ['QUERY_STRING'] = '' if config['post_encoding']: ctx.request_body = env['wsgi.input'].read() env['wsgi.input'] = StringIO(ctx.request_body) post_encoding = config['post_encoding'] else: post_encoding = 'utf-8' post_data = FieldStorage( environ=post_environ, fp=env['wsgi.input'], keep_blank_values=True ).list or [] for field in post_data: key = field.name if field.filename: if config['blob']: value = parse_file_info(field) else: value = field else: value = unicode(field.value, post_encoding, 'strict') kwargs[key] = value elif content_type == 'application/json': kwargs.update(json_decode(env['wsgi.input'].read())) def get_response_headers(): # Figure out the HTTP headers for the response ``cookies``. cookie_output = SimpleCookie() for name, values in ctx._response_cookies.iteritems(): name = str(name) cookie_output[name] = values.pop('value') cur = cookie_output[name] for key, value in values.items(): if key == 'max_age': key = 'max-age' if key not in COOKIE_KEY_NAMES: continue cur[key] = value if cookie_output: raw_headers = ctx._raw_headers + [ ('Set-Cookie', ck.split(' ', 1)[-1]) for ck in str(cookie_output).split('\r\n') ] else: raw_headers = ctx._raw_headers str_headers = []; new_header = str_headers.append for k, v in raw_headers: if isinstance(k, unicode): k = k.encode('utf-8') if isinstance(v, unicode): v = v.encode('utf-8') new_header((k, v)) return str_headers if 'submit' in kwargs: del kwargs['submit'] if RUNNING_ON_GOOGLE_SERVERS: if config['cron']: if not env.get('HTTP_X_APPENGINE_CRON'): ctx.check_task_auth(kwargs) elif config['task']: if not env.get('HTTP_X_APPENGINE_TASKNAME'): ctx.check_task_auth(kwargs) elif config['ssl'] and not ssl_mode: raise NotFound if config['xsrf']: if 'xsrf' not in kwargs: raise AuthError("XSRF token not present.") provided_xsrf = kwargs.pop('xsrf') if not secure_string_comparison(provided_xsrf, ctx.xsrf_token): raise AuthError("XSRF token does not match.") if config['admin'] and not ctx.is_admin: if ctx.user_id: raise NotFound raise Redirect(ctx.get_login_url()) if (not config['anon']) and (not ctx.user_id): raise Redirect(ctx.get_login_url()) # Try and respond with the result of calling the handler. content = handler(ctx, *args, **kwargs) for renderer in renderers: if ctx.end_pipeline: break if content is None: content = { 'content': '' } elif not isinstance(content, dict): content = { 'content': content } if isinstance(renderer, str): content = ctx.render_mako_template(renderer, **content) else: content = renderer(ctx, **content) if content is None: content = '' elif isinstance(content, unicode): content = content.encode('utf-8') elif not isinstance(content, str): content = str(content) if ctx.after_runners: for func in ctx.after_runners: func(ctx) cache = config['cache'] if cache and http_method != 'POST': cache, duration = cache etag = md5(content).hexdigest() if cache == 'public': ctx.cache_response(etag, duration) elif cache == 'private': ctx.cache_private_response(etag, duration) else: ctx.do_not_cache_response() raise HTTPContent(content) # Return the content. except HTTPContent, payload: content = payload.content if 'Content-Type' not in ctx.response_headers: ctx.response_headers['Content-Type'] = 'text/html; charset=utf-8' ctx.response_headers['Content-Length'] = str(len(content)) start_response(('%d %s\r\n' % ctx._status), get_response_headers()) if http_method == 'HEAD': return [] return [content]