def post(self, request, *args, **kwargs): email = request.POST['email'] if self.send_email(email): nxt = request.POST.get('next', '/') return HttpResponseRedirect(nxt) else: return HttpResponseNotModified()
def get(self, request, zipped_filename, embedded_filepath): """ Handles GET requests and serves a static file from within the zip file. """ zipped_path = get_path_or_404(zipped_filename) # if client has a cached version, use that (we can safely assume nothing has changed, due to MD5) if request.META.get("HTTP_IF_MODIFIED_SINCE"): return HttpResponseNotModified() with zipfile.ZipFile(zipped_path) as zf: # handle H5P files if zipped_path.endswith("h5p"): if not embedded_filepath or embedded_filepath.startswith("dist/"): response = get_h5p(zf, embedded_filepath) else: # Don't bother doing any hashi parsing of HTML content for h5p response = get_embedded_file( request, zf, zipped_filename, embedded_filepath, skip_hashi=True ) else: response = get_embedded_file( request, zf, zipped_filename, embedded_filepath ) # ensure the browser knows not to try byte-range requests, as we don't support them here response["Accept-Ranges"] = "none" return response
def check_modified_since(self, cache): modified_since_str = self.request.META.get("HTTP_IF_MODIFIED_SINCE", None) if modified_since_str: modified_since = time.mktime(parsedate(modified_since_str)) file_time = time.mktime(cache.original_file_time()) if modified_since >= file_time: return HttpResponseNotModified()
def register(request): try: new_user = User.objects.create_user(request.data['username'], email=request.data['email'], password=request.data['password']) return JsonResponse({"created_id": new_user.id}, status=201) except IntegrityError: return HttpResponseNotModified()
def post(self, request, *args, **kwargs): amount = request.POST['amount'] path_id = request.POST['path_id'] if self.create_users(int(amount), int(path_id)): nxt = request.POST.get('next', '/') return HttpResponseRedirect(nxt) else: return HttpResponseNotModified()
def wrapped_view(*args, **kwargs): try: return view_func(*args, **kwargs) except UnicornViewError as e: return JsonResponse({"error": str(e)}) except RenderNotModified: return HttpResponseNotModified() except AssertionError as e: return JsonResponse({"error": str(e)})
def get(self, request, zipped_filename, embedded_filepath): """ Handles GET requests and serves a static file from within the zip file. """ zipped_path = get_path_or_404(zipped_filename) # Sometimes due to URL concatenation, we get URLs with double-slashes in them, like //path/to/file.html. # the zipped_filename and embedded_filepath are defined by the regex capturing groups in the URL defined # in urls.py in the same folder as this file: # r"^zipcontent/(?P<zipped_filename>[^/]+)/(?P<embedded_filepath>.*)" # If the embedded_filepath contains a leading slash because of an input URL like: # /zipcontent/filename.zip//file.html # then the embedded_filepath will have a value of "/file.html" # we detect this leading slash in embedded_filepath and remove it. if embedded_filepath.startswith("/"): embedded_filepath = embedded_filepath[1:] # Any double-slashes later in the URL will be present as double-slashes, such as: # /zipcontent/filename.zip/path//file.html # giving an embedded_filepath value of "path//file.html" # Normalize the path by converting double-slashes occurring later in the path to a single slash. # This would change our example embedded_filepath to "path/file.html" which will resolve properly. embedded_filepath = embedded_filepath.replace("//", "/") # if client has a cached version, use that (we can safely assume nothing has changed, due to MD5) if request.META.get("HTTP_IF_MODIFIED_SINCE"): return HttpResponseNotModified() with zipfile.ZipFile(zipped_path) as zf: # handle H5P files if zipped_path.endswith("h5p"): if not embedded_filepath or embedded_filepath.startswith( "dist/"): response = get_h5p(zf, embedded_filepath) else: # Don't bother doing any hashi parsing of HTML content for h5p response = get_embedded_file(request, zf, zipped_filename, embedded_filepath, skip_hashi=True) else: response = get_embedded_file(request, zf, zipped_filename, embedded_filepath) # ensure the browser knows not to try byte-range requests, as we don't support them here response["Accept-Ranges"] = "none" return response
def get(self, request, zipped_filename, embedded_filepath): """ Handles GET requests and serves a static file from within the zip file. """ # calculate the local file path to the zip file zipped_path = get_content_storage_file_path(zipped_filename) # if the zipfile doesn't not exist on disk, return a 404 if not os.path.exists(zipped_path): raise Http404('"%(filename)s" does not exist locally' % {'filename': zipped_filename}) # if client has a cached version, use that (we can safely assume nothing has changed, due to MD5) if request.META.get('HTTP_IF_MODIFIED_SINCE'): return HttpResponseNotModified() with zipfile.ZipFile(zipped_path) as zf: # get the details about the embedded file, and ensure it exists try: info = zf.getinfo(embedded_filepath) except KeyError: raise Http404('"{}" does not exist inside "{}"'.format( embedded_filepath, zipped_filename)) # try to guess the MIME type of the embedded file being referenced content_type = mimetypes.guess_type( embedded_filepath)[0] or 'application/octet-stream' # generate a streaming response object, pulling data from within the zip file response = FileResponse(zf.open(info), content_type=content_type) # set the last-modified header to the date marked on the embedded file if info.date_time: response["Last-Modified"] = http_date( float(datetime.datetime(*info.date_time).strftime("%s"))) # cache these resources forever; this is safe due to the MD5-naming used on content files response["Expires"] = "Sun, 17-Jan-2038 19:14:07 GMT" # set the content-length header to the size of the embedded file if info.file_size: response["Content-Length"] = info.file_size # ensure the browser knows not to try byte-range requests, as we don't support them here response["Accept-Ranges"] = "none" return response
def decorator_list(self, request, *args, **kwargs): last_modified_header = request.META.get("HTTP_IF_MODIFIED_SINCE") if last_modified_header is not None: parsed_modified = datetime( *http_parse_date(last_modified_header)[:6]) modified_since = pytz.UTC.localize(parsed_modified) modified_since_qs = cls.queryset.filter( Q(updated__gte=modified_since) & (Q(status=models.SynchronizedModel.STATUS_PUBLISHED) | Q(status=models.SynchronizedModel.STATUS_PUBLISHED_DRAFT) | Q(status=models.SynchronizedModel.STATUS_PUBLISHED_DELETED)) ) if modified_since_qs.count() == 0: return HttpResponseNotModified() else: self.queryset = modified_since_qs else: self.queryset = self.queryset.filter( status=models.SynchronizedModel.STATUS_PUBLISHED) return original_list(self, request, *args, **kwargs)
def get(self, request, name): db_file = get_object_or_404(DBFile.objects.defer('content'), name=name) mtime = time.mktime(db_file.updated_on.timetuple()) modified = was_modified_since( header=self.request.META.get('HTTP_IF_MODIFIED_SINCE'), mtime=mtime, size=db_file.size) if not modified: return HttpResponseNotModified() content_type, encoding = mimetypes.guess_type(db_file.name) content_type = content_type or 'application/octet-stream' response = HttpResponse(db_file.content, content_type=content_type) response['Last-Modified'] = http_date(mtime) response['Content-Length'] = db_file.size if encoding: response['Content-Encoding'] = encoding return response
def retrieve(self, request: Request, *args, **kwargs) -> _t.Union[FileResponse, HttpResponseNotModified]: instance = self.get_object() instance_edit_timestamp = None if self.instance_field_timestamp: instance_edit: datetime.datetime = getattr(instance, self.instance_field_timestamp, None) instance_edit_timestamp = instance_edit.timestamp() if instance_edit else None if instance_edit_timestamp and instance_edit_timestamp == float(request.META.get('HTTP_IF_NONE_MATCH', '0.0')): return HttpResponseNotModified() response: FileResponse = self.get_serializer(**self.get_file_response_kwargs(instance)) # type: ignore if self.instance_field_timestamp and instance_edit_timestamp: cache_control_header_data = getattr(self, 'cache_control_header_data', None) if cache_control_header_data is None: cache_control_header_data = default_cache_control_header_data response['Cache-Control'] = cache_control_header_data response['ETag'] = str(instance_edit_timestamp) return response
def process_request(self, request): """ process_request 先将 cache_key 从 COOKIES 取出, 然后判断 cache_key 是否在缓存里面,有则返回HttpResponseNotModified,无则缓存该cache_key并设置过期时间 process_response 设置 COOKIE_NAME 为 cache_key。 --2-- request-id-7a946fcf3c034f0182c8b18c4bf69a9f /core/blog/article/1/ GET --3-- request-id-7a946fcf3c034f0182c8b18c4bf69a9f /core/blog/article/1/ GET [28/Mar/2018 09:16:06] "GET /core/blog/article/1/ HTTP/1.1" 200 11044 --1-- request-id-5e81825e84e64c828dcf338358b1e953 /core/blog/article/1/ POST ---------view article_modify------- --2-- request-id-5e81825e84e64c828dcf338358b1e953 /core/blog/article/1/ POST --3-- request-id-5e81825e84e64c828dcf338358b1e953 /core/blog/article/1/ POST [28/Mar/2018 09:16:34] "POST /core/blog/article/1/ HTTP/1.1" 302 0 --2-- request-id-e8467f020d644675be7b2651ac34b003 /core/blog/article GET --3-- request-id-e8467f020d644675be7b2651ac34b003 /core/blog/article GET """ # request.visitor = {} # request.visitor['country'] = "中国" if request.method.lower() not in ('post', 'put', 'delete', 'patch') or request.is_ajax(): return None cache_key = request.COOKIES.get(COOKIE_NAME) if not cache_key: return None cache = caches[CACHE_NAME] print('--1--', cache_key, request.path, request.method) # print(request.get_host(), request.path_info, request.session, request.user) # ['COOKIES', 'DNT', 'FILES', 'GET', 'META', 'POST', # '__class__', '__delattr__', '__dict__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', # '__gt__', '__hash__', '__init__', '__init_subclass__', '__iter__', '__le__', '__lt__', '__module__', '__ne__', '__new__', # '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', # '_encoding', '_get_post', '_get_raw_host', '_get_scheme', '_initialize_handlers', '_load_post_and_files', '_mark_post_parse_error', # '_messages', '_post_parse_error', '_read_started', '_set_post', '_stream', '_upload_handlers', # 'body', 'build_absolute_uri', 'close', 'content_params', 'content_type', 'encoding', 'environ', 'get_full_path', 'get_host', # 'get_port', 'get_raw_uri', 'get_signed_cookie', 'is_ajax', 'is_secure', 'method', 'parse_file_upload', 'path', 'path_info', # 'read', 'readline', 'readlines', 'resolver_match', 'scheme', 'session', 'upload_handlers', 'user', 'xreadlines'] if cache_key in cache: return HttpResponseNotModified() cache.set(cache_key, True, CACHE_TIMEOUT)
def get(self, request, zipped_filename, embedded_filepath): """ Handles GET requests and serves a static file from within the zip file. """ # calculate the local file path to the zip file zipped_path = get_content_storage_file_path(zipped_filename) # file size file_size = 0 # if the zipfile does not exist on disk, return a 404 if not os.path.exists(zipped_path): raise Http404('"%(filename)s" does not exist locally' % {'filename': zipped_filename}) # if client has a cached version, use that (we can safely assume nothing has changed, due to MD5) if request.META.get('HTTP_IF_MODIFIED_SINCE'): return HttpResponseNotModified() with zipfile.ZipFile(zipped_path) as zf: # if no path, or a directory, is being referenced, look for an index.html file if not embedded_filepath or embedded_filepath.endswith("/"): embedded_filepath += "index.html" # get the details about the embedded file, and ensure it exists try: info = zf.getinfo(embedded_filepath) except KeyError: raise Http404('"{}" does not exist inside "{}"'.format( embedded_filepath, zipped_filename)) # try to guess the MIME type of the embedded file being referenced content_type = mimetypes.guess_type( embedded_filepath)[0] or 'application/octet-stream' if not os.path.splitext(embedded_filepath)[1] == '.json': # generate a streaming response object, pulling data from within the zip file response = FileResponse(zf.open(info), content_type=content_type) file_size = info.file_size else: # load the stream from json file into memory, replace the path_place_holder. content = zf.open(info).read() str_to_be_replaced = ('$' + exercises.IMG_PLACEHOLDER).encode() zipcontent = ('/' + request.resolver_match.url_name + "/" + zipped_filename).encode() content_with_path = content.replace(str_to_be_replaced, zipcontent) response = HttpResponse(content_with_path, content_type=content_type) file_size = len(content_with_path) # cache these resources forever; this is safe due to the MD5-naming used on content files response["Expires"] = "Sun, 17-Jan-2038 19:14:07 GMT" # set the content-length header to the size of the embedded file if info.file_size: response["Content-Length"] = file_size # ensure the browser knows not to try byte-range requests, as we don't support them here response["Accept-Ranges"] = "none" # allow all origins so that content can be read from within zips within sandboxed iframes response["Access-Control-Allow-Origin"] = "*" # Newer versions of Chrome block iframes from loading # solution is to override xframe options with any string (https://stackoverflow.com/a/6767901) response['X-Frame-Options'] = "GOFORIT" return response
def post(self, request, *args, **kwargs): if self.create_users_for_paths(): nxt = request.POST.get('next', '/') return HttpResponseRedirect(nxt) else: return HttpResponseNotModified()
def get(self, request, zipped_filename, embedded_filepath): """ Handles GET requests and serves a static file from within the zip file. """ assert VALID_STORAGE_FILENAME.match( zipped_filename ), "'{}' is not a valid content storage filename".format( zipped_filename) storage = default_storage # calculate the local file path to the zip file filename, ext = os.path.splitext(zipped_filename) zipped_path = generate_object_storage_name(filename, zipped_filename) # file size file_size = 0 # if the zipfile does not exist on disk, return a 404 if not storage.exists(zipped_path): return HttpResponseNotFound( '"%(filename)s" does not exist in storage' % {'filename': zipped_path}) # if client has a cached version, use that (we can safely assume nothing has changed, due to MD5) if request.META.get('HTTP_IF_MODIFIED_SINCE'): return HttpResponseNotModified() zf_obj = storage.open(zipped_path) with zipfile.ZipFile(zf_obj) as zf: # if no path, or a directory, is being referenced, look for an index.html file if not embedded_filepath or embedded_filepath.endswith("/"): embedded_filepath += "index.html" # get the details about the embedded file, and ensure it exists try: info = zf.getinfo(embedded_filepath) except KeyError: return HttpResponseNotFound( '"{}" does not exist inside "{}"'.format( embedded_filepath, zipped_filename)) # try to guess the MIME type of the embedded file being referenced content_type = mimetypes.guess_type( embedded_filepath)[0] or 'application/octet-stream' if not os.path.splitext(embedded_filepath)[1] == '.json': # generate a streaming response object, pulling data from within the zip file response = FileResponse(zf.open(info), content_type=content_type) file_size = info.file_size else: # load the stream from json file into memory, replace the path_place_holder. content = zf.open(info).read() str_to_be_replaced = ('$' + exercises.IMG_PLACEHOLDER).encode() zipcontent = ('/' + request.resolver_match.url_name + "/" + zipped_filename).encode() content_with_path = content.replace(str_to_be_replaced, zipcontent) response = HttpResponse(content_with_path, content_type=content_type) file_size = len(content_with_path) # set the last-modified header to the date marked on the embedded file if info.date_time: response["Last-Modified"] = http_date( time.mktime(datetime.datetime(*info.date_time).timetuple())) #cache these resources forever; this is safe due to the MD5-naming used on content files response["Expires"] = "Sun, 17-Jan-2038 19:14:07 GMT" # set the content-length header to the size of the embedded file if file_size: response["Content-Length"] = file_size # ensure the browser knows not to try byte-range requests, as we don't support them here response["Accept-Ranges"] = "none" _add_access_control_headers(request, response) # restrict CSP to only allow resources to be loaded from the Studio host, to prevent info leakage # (e.g. via passing user info out as GET parameters to an attacker's server), or inadvertent data usage host = request.build_absolute_uri('/').strip("/") response[ "Content-Security-Policy"] = "default-src 'self' 'unsafe-inline' 'unsafe-eval' data: " + host return response
def get(self, request, zipped_filename, embedded_filepath): """ Handles GET requests and serves a static file from within the zip file. """ # calculate the local file path to the zip file zipped_path = get_content_storage_file_path(zipped_filename) # file size file_size = 0 # if the zipfile does not exist on disk, return a 404 if not os.path.exists(zipped_path): raise Http404('"%(filename)s" does not exist locally' % {'filename': zipped_filename}) # if client has a cached version, use that (we can safely assume nothing has changed, due to MD5) if request.META.get('HTTP_IF_MODIFIED_SINCE'): return HttpResponseNotModified() with zipfile.ZipFile(zipped_path) as zf: # if no path, or a directory, is being referenced, look for an index.html file if not embedded_filepath or embedded_filepath.endswith("/"): embedded_filepath += "index.html" # get the details about the embedded file, and ensure it exists try: info = zf.getinfo(embedded_filepath) except KeyError: raise Http404('"{}" does not exist inside "{}"'.format( embedded_filepath, zipped_filename)) # try to guess the MIME type of the embedded file being referenced content_type = mimetypes.guess_type( embedded_filepath)[0] or 'application/octet-stream' if not os.path.splitext(embedded_filepath)[1] == '.json': # generate a streaming response object, pulling data from within the zip file response = FileResponse(zf.open(info), content_type=content_type) file_size = info.file_size else: # load the stream from json file into memory, replace the path_place_holder. content = zf.open(info).read() str_to_be_replaced = ('$' + exercises.IMG_PLACEHOLDER).encode() zipcontent = ('/' + request.resolver_match.url_name + "/" + zipped_filename).encode() content_with_path = content.replace(str_to_be_replaced, zipcontent) response = HttpResponse(content_with_path, content_type=content_type) file_size = len(content_with_path) # set the content-length header to the size of the embedded file if info.file_size: response["Content-Length"] = file_size # ensure the browser knows not to try byte-range requests, as we don't support them here response["Accept-Ranges"] = "none" # add headers to ensure AJAX requests will be permitted for these files, even from a null origin _add_access_control_headers(request, response) # restrict CSP to only allow resources to be loaded from the Kolibri host, to prevent info leakage # (e.g. via passing user info out as GET parameters to an attacker's server), or inadvertent data usage host = request.build_absolute_uri('/').strip("/") response[ "Content-Security-Policy"] = "default-src 'self' 'unsafe-inline' 'unsafe-eval' data: " + host return response
def locker_add_activity(request, locker_id: int, activity_type: str): """ Concentrated endpoint for the locker base to query the webserver. """ try: lb = get_object_or_404(LockerBase, pk=locker_id) if LockerBase.verify(request.POST["verification_code"]) != lb: return HttpResponseBadRequest() else: activity_type = activity_type.lower() if activity_type == "online": # report to webserver that the locker base is online. if lb.add_activity( activity_type=LockerActivity.ActivityType.ONLINE, locker_unit=None): return JsonResponse({"success": True}) else: return HttpResponseNotModified() elif activity_type == "offline": # report to webserver that the locker base is offline. if lb.add_activity( activity_type=LockerActivity.ActivityType.OFFLINE, locker_unit=None): return JsonResponse({"success": True}) else: return HttpResponseNotModified() elif activity_type == "register": # reports locker units connected to webserver. lu = get_object_or_404(LockerUnit, pk=request.POST["unit_id"]) if lb.add_activity( activity_type=LockerActivity.ActivityType.REGISTER, locker_unit=lu): return JsonResponse({ "success": True, "length": lu.length, "width": lu.width, "height": lu.height, "is_available": lu.is_available }) else: return HttpResponseNotModified() elif activity_type == "change": # requests for a change in verification code. la = lb.change_v_code() if isinstance(la, LockerActivity): return JsonResponse({ "success": True, "verification_code": lb.verification_code }) else: return HttpResponseNotModified() elif activity_type == "parcel": # queries the webserver if the parcel belongs to this lockerbase. parcel = get_object_or_404( Parcel, tracking_number=request.POST["tracking_number"]) pa = parcel.add_activity( locker_base=lb, activity_type=ParcelActivity.ActivityType.QUERY) if pa and isinstance(pa, ParcelActivity): return JsonResponse({"success": True}) else: return HttpResponseForbidden() elif activity_type == "scandim": # informs the webserver that the parcel just had its dimensions scanned. parcel = get_object_or_404( Parcel, tracking_number=request.POST["tracking_number"]) pa = parcel.add_activity( locker_base=lb, activity_type=ParcelActivity.ActivityType.CHECKIN) if pa and isinstance(pa, ParcelActivity): return JsonResponse({"success": True}) else: return HttpResponseForbidden() elif activity_type == "deposit": # is a set of two # UNLOCK - DEPOSIT_REQ if complete=false # LOCK - DEPOSIT if complete=true parcel = get_object_or_404( Parcel, tracking_number=request.POST["tracking_number"]) lu = get_object_or_404(LockerUnit, pk=request.POST["unit_id"]) if "complete" in request.POST: is_complete = eval(request.POST["complete"]) if isinstance(is_complete, bool) and is_complete: pa = parcel.add_activity( locker_base=lb, locker_unit=lu, activity_type=ParcelActivity.ActivityType.DEPOSIT) else: pa = parcel.add_activity(locker_base=lb, locker_unit=lu, activity_type=ParcelActivity. ActivityType.DEPOSITREQ) if pa and isinstance(pa, ParcelActivity): return JsonResponse({"success": True}) else: return JsonResponse({"success": False}) else: return HttpResponseForbidden() elif activity_type == "withdraw": # see above, set of two parcel = Parcel.verify_retrieval_code( qr_data=request.POST["qr_data"]) lu = get_object_or_404(LockerUnit, pk=request.POST["unit_id"]) if "complete" in request.POST: is_complete = eval(request.POST["complete"]) if isinstance(is_complete, bool) and is_complete: pa = parcel.add_activity( locker_base=lb, locker_unit=lu, activity_type=ParcelActivity.ActivityType.WITHDRAW) else: pa = parcel.add_activity(locker_base=lb, locker_unit=lu, activity_type=ParcelActivity. ActivityType.WITHDRAWREQ) if pa and isinstance(pa, ParcelActivity): return JsonResponse({"success": True}) else: return JsonResponse({"success": False}) else: return HttpResponseForbidden() elif activity_type == "withdraw-qr": # when the recipient scans qr code p = Parcel.verify_retrieval_code( qr_data=request.POST["qr_data"]) if p is None: return HttpResponseNotFound() elif p is False: return JsonResponse({"success": False}) elif isinstance(p, Parcel): pa = p.add_activity( locker_base=lb, activity_type=ParcelActivity.ActivityType.WITHDRAWQR) if pa and isinstance(pa, ParcelActivity): return JsonResponse({ "success": True, "unit_id": p.get_deposited_unit().id }) else: return JsonResponse({"success": False}) else: return HttpResponseNotFound() except ObjectDoesNotExist as e: logging.error(e) return HttpResponseBadRequest() except KeyError: return HttpResponseBadRequest()