Example #1
0
def return_file(
    request,
    path: str,
    storage_name: Optional[str] = None
) -> Union[HttpResponseNotFound, StreamingHttpResponse, HttpResponse]:
    try:
        if storage_name:
            storage = get_object_or_404(backends.Storage, name=storage_name)
        else:
            storage = None

        data_item = get_object_or_404(coverages.ArrayDataItem,
                                      location=path,
                                      storage=storage)
        ranges = request.headers.get("Range", None)
        _, ranges = parse_ranges(ranges)

        size = verify_file(data_item, ranges)
        if request.method == "GET":
            response = StreamingHttpResponse(
                iter_file(data_item, ranges=ranges))
            if not ranges:
                response["Content-Type"] = "image/tiff"
            elif len(ranges) == 1:
                _r = ranges[0]
                response["Content-Type"] = "image/tiff"
                response["Content-Range"] = f"bytes {_r[0]}-{_r[1]}/{size}"
                response.status_code = 206
            else:
                boundary = generate_boundary()
                response.streaming_content = wrap_streaming_content(
                    response.streaming_content, ranges, boundary, size)
                response[
                    "Content-Type"] = f"multipart/byteranges; boundary={boundary.decode()}"
                response.status_code = 206
        else:
            response = HttpResponse("")
            response["Content-Type"] = "image/tiff"
            response["Content-Length"] = str(size)

        response["Access-Control-Allow-Origin"] = "*"
        response["Accept-Ranges"] = "bytes"
        response["Access-Control-Allow-Methods"] = "POST, GET, OPTIONS, HEAD"
        response[
            "Access-Control-Allow-Headers"] = "X-PINGOTHER, Content-Type, Range"
        response["Access-Control-Max-Age"] = "86400"

        return response

    except Http404:
        return HttpResponseNotFound("<h1>404 file not found</h1>")

    except RangeNotSatisfiable:
        response = HttpResponse("<h1>416 requested range not satisfiable<h1>")
        response.status_code = 416
        return response
Example #2
0
def stream_song(request, song_key):
    try:
        entry = EphemeralEntry.objects.get(key=song_key)
    except ObjectDoesNotExist:
        raise Http404()

    try:
        song = entry.song
        entry.delete()

        range_header = request.META.get('Range')
        if not range_header:
            response = StreamingHttpResponse(
                (line for line in open(song.path, 'rb')))
            response['Accept-Ranges'] = 'bytes'
            response['Content-Type'] = song.mime_type
            response['Content-Length'] = os.path.getsize(song.path)
            return response

        size = os.path.getsize(song.path)
        byte1, byte2 = 0, None

        m = re.search('(\d+)-(\d*)', range_header)
        g = m.groups()

        if g[0]:
            byte1 = int(g[0])
        if g[1]:
            byte2 = int(g[1])

        length = size - byte1
        if byte2 is not None:
            length = byte2 - byte1

        with open(song.path, 'rb') as f:
            f.seek(byte1)
            data = f.read(length)

        response = HttpResponse(data,
                                content_type=mimetypes.guess_type(
                                    song.path)[0])
        response['Accept-Ranges'] = 'bytes'
        response['Content-Range'] = 'bytes {0}-{1}/{2}'.format(
            byte1, byte1 + length - 1, size)
        response.status_code(206)
        return response
    except FileNotFoundError:
        raise Http404('file does not exist')
Example #3
0
def get_download_response(payload,
                          content_length,
                          content_format,
                          filename,
                          request=None):
    """
    :param payload: File like object.
    :param content_length: Size of payload in bytes
    :param content_format: ``couchexport.models.Format`` instance
    :param filename: Name of the download
    :param request: The request. Used to determine if a range response should be given.
    :return: HTTP response
    """
    ranges = None
    if request and "HTTP_RANGE" in request.META:
        try:
            ranges = parse_range_header(request.META['HTTP_RANGE'],
                                        content_length)
        except ValueError:
            pass

    if ranges and len(ranges.ranges) != 1:
        ranges = None

    response = StreamingHttpResponse(content_type=content_format.mimetype)
    if content_format.download:
        response['Content-Disposition'] = safe_filename_header(filename)

    response["Content-Length"] = content_length
    response["Accept-Ranges"] = "bytes"

    if ranges:
        start, stop = ranges.ranges[0]
        if stop is not None and stop > content_length:
            # requested range not satisfiable
            return HttpResponse(status=416)

        response.streaming_content = RangedFileWrapper(payload,
                                                       start=start,
                                                       stop=stop
                                                       or float("inf"))
        end = stop or content_length
        response["Content-Range"] = "bytes %d-%d/%d" % (start, end - 1,
                                                        content_length)
        response["Content-Length"] = end - start
        response.status_code = 206
    else:
        response.streaming_content = FileWrapper(payload)

    return response
Example #4
0
    def post(self, request, *args, **kwargs):
        """
        The user uploads a csv file of document ids, the server returns a zip file
        containing all those documents' pdfs.
        """
        formset = DocBulkFormSet(request.POST, request.FILES)
        formset.is_valid()
        valid_docs = lambda: (
            f.cleaned_data["id"] for f in formset.forms if f.cleaned_data
        )
        errors = tuple(gather_errors(formset, values=True))
        if any(valid_docs()):
            # This will fail with a 500 if the file doesn't exist. We're not catching
            # that because if we got this far, we know the file SHOULD exist: we are
            # only working with Documents that have match=True. If this fails, we
            # have some kind of data integrity issue (or you're a dev without the files)
            def zip_gen():
                z = zipstream.ZipFile(mode="w")
                for doc in valid_docs():
                    z.write(doc.pdf_url())
                if errors:
                    z.writestr("errors.txt", str.encode("\n".join(errors)))
                for chunk in z:
                    yield chunk

            response = StreamingHttpResponse(zip_gen(), content_type="application/zip")
            response["Content-Disposition"] = 'attachment; filename="datadocuments.zip"'
            if errors:
                response.status_code = 206
            else:
                response.status_code = 200
            return response
        else:
            for e in errors:
                messages.error(request, e)
            return HttpResponseRedirect(reverse("bulk_documents"))
Example #5
0
    def get(self, request, format=None):
        """ Returns a list of API features """
        response = StreamingHttpResponse(content_type='text/csv')
        response[
            'Content-Disposition'] = 'attachment; filename="numbers-formatted.csv"'
        response.status_code = status.HTTP_200_OK
        phone = request.GET.get('number')
        list_of_files = glob.glob('./*.csv')
        latest_file = max(list_of_files, key=os.path.getctime)

        result = services.locate_nearest_numbers('+' + phone, latest_file)
        processes = result[1]
        queue = result[0]

        response.streaming_content = chain(
            header_1(), response_iterator_1(processes, queue, chunk_size=100))

        return response
Example #6
0
def get_download_response(payload, content_length, content_format, filename, request=None):
    """
    :param payload: File like object.
    :param content_length: Size of payload in bytes
    :param content_format: ``couchexport.models.Format`` instance
    :param filename: Name of the download
    :param request: The request. Used to determine if a range response should be given.
    :return: HTTP response
    """
    ranges = None
    if request and "HTTP_RANGE" in request.META:
        try:
            ranges = parse_range_header(request.META['HTTP_RANGE'], content_length)
        except ValueError:
            pass

    if ranges and len(ranges.ranges) != 1:
        ranges = None

    response = StreamingHttpResponse(content_type=content_format.mimetype)
    if content_format.download:
        response['Content-Disposition'] = safe_filename_header(filename)

    response["Content-Length"] = content_length
    response["Accept-Ranges"] = "bytes"

    if ranges:
        start, stop = ranges.ranges[0]
        if stop is not None and stop > content_length:
            # requested range not satisfiable
            return HttpResponse(status=416)

        response.streaming_content = RangedFileWrapper(payload, start=start, stop=stop or float("inf"))
        end = stop or content_length
        response["Content-Range"] = "bytes %d-%d/%d" % (start, end - 1, content_length)
        response["Content-Length"] = end - start
        response.status_code = 206
    else:
        response.streaming_content = FileWrapper(payload)

    return response
Example #7
0
    def post(self, request):
        file_serializer = FileSerializer(data=request.data)

        if file_serializer.is_valid():
            file_serializer.save()
            response = StreamingHttpResponse(content_type='text/csv')
            response[
                'Content-Disposition'] = 'attachment; filename="numbers-formatted.csv"'
            response.status_code = status.HTTP_200_OK
            result = services.reformat_numbers(file_serializer.data['numbers'])

            processes = result[1]
            queue = result[0]

            response.streaming_content = chain(
                header_2(),
                response_iterator_2(processes, queue, chunk_size=1000))

            return response
        else:
            return Response(file_serializer.errors,
                            status=status.HTTP_400_BAD_REQUEST)
Example #8
0
    def post(self, request, *args, **kwargs):
        vol = self.object = self.get_object()

        # don't do anything if user is not logged in
        if self.request.user.is_anonymous():
            response = render(request, self.template_name, self.get_context_data())
            response.status_code = 400  # bad request
            return response

        # get posted form data and use that to generate the export
        export_form = self.get_form()
        if export_form.is_valid():
            cleaned_data = export_form.cleaned_data

            # if github export is requested, make sure user has a
            # github account available to use for access
            if cleaned_data['github']:
                try:
                    github.GithubApi.github_account(self.request.user)
                except github.GithubAccountNotFound:
                    return self.render(request, error=self.github_account_msg)

                # check that oauth token has sufficient permission
                # to do needed export steps
                gh = github.GithubApi.connect_as_user(self.request.user)
                # note: repo would also work here, but currently asking for public_repo
                if 'public_repo' not in gh.oauth_scopes():
                    return self.render(request, error=self.github_scope_msg)
        else:
            return self.render(request)

        # determine which annotations should be loaded
        if cleaned_data['annotations'] == 'user':
            # annotations *by* this user
            # (NOT all annotations the user can view)
            annotations = vol.annotations().filter(user=request.user)
        elif cleaned_data['annotations'].startswith('group:'):
            # all annotations visible to a group this user belongs to
            group_id = cleaned_data['annotations'][len('group:'):]
            # NOTE: object not found error should not occur here,
            # because only valid group ids should be valid choices
            group = AnnotationGroup.objects.get(pk=group_id)
            annotations = vol.annotations().visible_to_group(group)

        # generate annotated tei
        tei = annotate.annotated_tei(vol.generate_volume_tei(),
                                     annotations)

        # check form data to see if github repo is requested
        if cleaned_data['github']:
            try:
                repo_url, ghpages_url = export.website_gitrepo(request.user,
                    cleaned_data['github_repo'], vol, tei,
                    page_one=cleaned_data['page_one'])

                logger.info('Exported %s to GitHub repo %s for user %s',
                    vol.pid, repo_url, request.user.username)

                # NOTE: maybe use a separate template here?
                return self.render(request, repo_url=repo_url,
                    ghpages_url=ghpages_url, github_export=True)
            except export.GithubExportException as err:
                response = self.render(request, error='Export failed: %s' % err)
                response.status_code = 400  # maybe?
                return response
        else:
            # non github export: download zipfile
            try:
                webzipfile = export.website_zip(vol, tei,
                    page_one=cleaned_data['page_one'])
                logger.info('Exported %s as jekyll zipfile for user %s',
                    vol.pid, request.user.username)
                response = StreamingHttpResponse(FileWrapper(webzipfile, 8192),
                    content_type='application/zip')
                response['Content-Disposition'] = 'attachment; filename="%s_annotated_jeyll_site.zip"' % \
                    (vol.noid)
                response['Content-Length'] = os.path.getsize(webzipfile.name)
            except export.ExportException as err:
                # display error to user and redisplay the form
                response = self.render(request, error='Export failed. %s' % err)
                response.status_code = 500

            # set a cookie to indicate download is complete, that can be
            # used by javascript to hide a 'generating' indicator
            completion_cookie_name = request.POST.get('completion-cookie',
                '%s-web-export' % vol.noid)
            response.set_cookie(completion_cookie_name, 'complete', max_age=10)
            return response
Example #9
0
def search(request):
    def genrgb(str, malformed):
        intcol = binascii.crc32(str.strip())

        if not malformed:
            r = "%s" % colors[intcol % len(colors)]
        else:
            r = "%s" % colors_red[intcol % len(colors_red)]
        return r

    ret = {}
    ret["result"] = "unknown"
    ret["msg"] = "unknown msg"
    ret["rows"] = []

    response = StreamingHttpResponse(content_type='application/json')

    try:
        where = []

        QueryDict = request.GET

        limit = QueryDict.get('rows')
        if limit is None or limit == "":
            limit = 10

        colors = list(
            Color("LightGreen").range_to(
                Color("MediumAquaMarine", luminance=0.9), 1000))
        colors_red = list(
            Color("Salmon",
                  luminance=0.5).range_to(Color("Tomato", luminance=0.9),
                                          1000))

        page = QueryDict.get('page')
        if page is None or page == "":
            page = 1

        offset = int(max(int(page) - 1, 0)) * int(limit)

        grouping = QueryDict.get('grouping')
        if grouping is None:
            grouping = "1"

        malformed = QueryDict.get('malformed')
        if malformed == "1":
            where.append("malformed = 1")

        src_ip = QueryDict.get('src_ip')
        if src_ip is not None and src_ip != "":
            where.append("source_ip = INET_ATON('%s') " %
                         MySQLdb.escape_string(src_ip))

        dst_ip = QueryDict.get('dst_ip')
        if dst_ip is not None and dst_ip != "":
            where.append("destination_ip = INET_ATON('%s') " %
                         MySQLdb.escape_string(dst_ip))

        sip_callid = QueryDict.get('sip_callid')
        if sip_callid is not None and sip_callid != "":
            where.append(
                "(callid = '%(callid)s' OR callid_rc = '%(callid)s') " %
                {"callid": MySQLdb.escape_string(sip_callid)})

        sip_method = QueryDict.get('sip_method')
        if sip_method is not None and sip_method != "":
            where.append("request_method = '%s' " %
                         MySQLdb.escape_string(sip_method))

        from_user = QueryDict.get('from_user')
        if from_user is not None and from_user != "":
            where.append("("
                         "`from` = '%(from)s' "
                         "OR `from` = '+%(from)s' "
                         "OR `from` = '+1%(from)s' "
                         "OR `from` = '1%(from)s'"
                         "OR `from` like '%(from)s'"
                         "OR `from` like '+%(from)s'"
                         "OR `from` like '+1%(from)s'"
                         "OR `from` like '1%(from)s'"
                         ")" % {"from": MySQLdb.escape_string(from_user)})

        to_user = QueryDict.get('to_user')
        if to_user is not None and to_user != "":
            where.append("("
                         "`to` = '%(to)s' "
                         "OR `to` = '+1%(to)s' "
                         "OR `to` = '+%(to)s' "
                         "OR `to` = '1%(to)s'"
                         "OR `to` like '%(to)s'"
                         "OR `to` like '+%(to)s'"
                         "OR `to` like '+1%(to)s'"
                         "OR `to` like '1%(to)s'"
                         ")" % {"to": MySQLdb.escape_string(to_user)})

        date_start = QueryDict.get('date_start')
        if date_start is not None and date_start != "":
            where.append("sc.datetime >= '%s' " %
                         MySQLdb.escape_string(date_start))

        date_end = QueryDict.get('date_end')
        if date_end is not None and date_end != "":
            where.append("sc.datetime <= '%s' " %
                         MySQLdb.escape_string(date_end))

        if not where:
            where.append("1=1")

        sql = """
            SELECT SQL_CALC_FOUND_ROWS
                sc.id,
                CAST(UNIX_TIMESTAMP(sc.datetime) AS SIGNED) as datetime,
                CONCAT(INET_NTOA(sc.source_ip), ':', IFNULL(sc.source_port, '0')) as source_ip,
                CONCAT(INET_NTOA(sc.destination_ip), ':', IFNULL(sc.destination_port, '0') ) as destination_ip,
                sc.callid,
                CONCAT(COALESCE(sc.request_method, ''), COALESCE(sc.response_code, '')) as method,
                sc.from,
                sc.to,
                sc.malformed
            FROM
                sip_capture sc
            WHERE
                %s
            %s
            ORDER BY sc.datetime ASC, sc.id ASC LIMIT %u OFFSET %u
        """ % (" \n AND ".join(where), "" if grouping == "0" else
               "GROUP BY sc.callid", int(limit), int(offset))

        db = dbConn()
        cursor = db.cursor()

        try:
            cache_key = sha1(sql).hexdigest()
            #cache.delete(cache_key)
            c = cache.get(cache_key)

            if c is None:
                cursor.execute(sql)
                print(cursor._last_executed)

                if cursor.rowcount == 0:
                    raise ObjectDoesNotExist("Records not found")

                res = {}

                results = cursor.fetchall()
                res['results'] = results

                cursor.execute("SELECT FOUND_ROWS()")
                found_rows = cursor.fetchone()

                res['found_rows'] = found_rows
                cache.add(cache_key, res, 30)
            else:
                found_rows = c['found_rows']
                results = c['results']

            #print sql
            #cursor.execute(sql)
            #print cursor._last_executed

            for row in results:
                ret["rows"].append({
                    'id':
                    row[0],
                    'date':
                    row[1],
                    'source_ip':
                    row[2],
                    'destination_ip':
                    row[3],
                    'callid':
                    row[4],
                    'method':
                    row[5],
                    'from_user':
                    row[6],
                    'to_user':
                    row[7],
                    'color':
                    genrgb("%s %s" % (row[2], row[3]), row[8])
                })

            ret["result"] = "success"
            ret["msg"] = "success msg"
            ret["total"] = found_rows[0]

        except ObjectDoesNotExist as e:
            ret["result"] = "warning"
            ret["msg"] = str(e)

            response.status_code = 404
            pass

        except Exception as e:
            ret["result"] = "error"
            ret["msg"] = repr(e)
            ret['traceback'] = traceback.format_exc()

            response.status_code = 500
            pass

        finally:
            cursor.close()

    except Exception as e:
        ret["result"] = "error"
        ret['traceback'] = traceback.format_exc()
        ret["msg"] = repr(e)
        response.status_code = 500

    json_data = json.dumps(ret)

    response.streaming_content = json_data

    return response
Example #10
0
def serve(request, path, document_root=None, show_indexes=False):
    """
    Serve static files below a given point in the directory structure.

    To use, put a URL pattern such as::

        (r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root': '/path/to/my/files/'})

    in your URLconf. You must provide the ``document_root`` param. You may
    also set ``show_indexes`` to ``True`` if you'd like to serve a basic index
    of the directory.  This index view will use the template hardcoded below,
    but if you'd like to override it, you can create a template called
    ``static/directory_index.html``.
    """
    path = posixpath.normpath(unquote(path))
    path = path.lstrip('/')
    newpath = ''
    for part in path.split('/'):
        if not part:
            # Strip empty path components.
            continue
        drive, part = os.path.splitdrive(part)
        head, part = os.path.split(part)
        if part in (os.curdir, os.pardir):
            # Strip '.' and '..' in path.
            continue
        newpath = os.path.join(newpath, part).replace('\\', '/')
    if newpath and path != newpath:
        return HttpResponseRedirect(newpath)
    fullpath = os.path.join(document_root, newpath)
    if os.path.isdir(fullpath):
        if show_indexes:
            return directory_index(newpath, fullpath)
        raise Http404(_("Directory indexes are not allowed here."))
    if not os.path.exists(fullpath):
        raise Http404(_('"%(path)s" does not exist') % {'path': fullpath})
    # Respect the If-Modified-Since header.
    statobj = os.stat(fullpath)
    if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),
                              statobj.st_mtime, statobj.st_size):
        return HttpResponseNotModified()
    content_type, encoding = mimetypes.guess_type(fullpath)
    content_type = content_type or 'application/octet-stream'
    ranged_file = RangedFileReader(open(fullpath, 'rb'))
    response = StreamingHttpResponse(ranged_file,
                                     content_type=content_type)
    response["Last-Modified"] = http_date(statobj.st_mtime)
    if stat.S_ISREG(statobj.st_mode):
        size = statobj.st_size
        response["Content-Length"] = size
        response["Accept-Ranges"] = "bytes"
        # Respect the Range header.
        if "HTTP_RANGE" in request.META:
            try:
                ranges = parse_range_header(request.META['HTTP_RANGE'], size)
            except ValueError:
                ranges = None
            # only handle syntactically valid headers, that are simple (no
            # multipart byteranges)
            if ranges is not None and len(ranges) == 1:
                start, stop = ranges[0]
                if stop > size:
                    # requested range not satisfiable
                    return HttpResponse(status=416)
                ranged_file.start = start
                ranged_file.stop = stop
                response["Content-Range"] = "bytes %d-%d/%d" % (start, stop - 1, size)
                response["Content-Length"] = stop - start
                response.status_code = 206
    if encoding:
        response["Content-Encoding"] = encoding
    return response
Example #11
0
    def do_request(self, request, url, method):

        url = iri_to_uri(url)

        request_data = {
            "method": method,
            "url": url,
            "data": None,
            "headers": {},
            "cookies": Cookie.SimpleCookie(),
            "user": request.user,
            "original-request": request,
        }

        # Request creation
        proto, host, cgi, param, query = urlparse.urlparse(url)[:5]

        # Extract headers from META
        if 'HTTP_TRANSFER_ENCODING' in request.META:
            return build_error_response(request, 500, "Wirecloud doesn't support requests using Transfer-Encodings")

        for header in request.META.items():
            header_name = header[0].lower()
            if header_name == 'content_type' and header[1]:
                request_data['headers']["content-type"] = header[1]

            elif header_name == 'content_length' and header[1]:
                # Only take into account request body if the request has a
                # Content-Length header (we don't support chunked requests)
                request_data['data'] = request

                # It's better not propagate the Content-Length header as
                # request processors may change final data length. In addition
                # to this, the requests modules ignores the Content-Length
                # header and tries to obtain data length directly from the
                # data parameter. Therefore, providing this value in the len
                # attribute seems to be the best option
                request_data['data'].len = int(header[1])

            elif header_name == 'cookie' or header_name == 'http_cookie':

                cookie_parser = Cookie.SimpleCookie(str(header[1]))

                del cookie_parser[settings.SESSION_COOKIE_NAME]

                if settings.CSRF_COOKIE_NAME in cookie_parser:
                    del cookie_parser[settings.CSRF_COOKIE_NAME]

                request_data['cookies'].update(cookie_parser)

            elif self.http_headerRE.match(header_name) and not header_name in self.blacklisted_http_headers:

                fixed_name = header_name.replace("http_", "", 1).replace('_', '-')
                request_data['headers'][fixed_name] = header[1]

        # Build the Via header
        protocolVersion = self.protocolRE.match(request.META['SERVER_PROTOCOL'])
        if protocolVersion is not None:
            protocolVersion = protocolVersion.group(1)
        else:
            protocolVersion = '1.1'

        via_header = "%s %s (Wirecloud-python-Proxy/1.1)" % (protocolVersion, get_current_domain(request))
        if 'via' in request_data['headers']:
            request_data['headers']['via'] += ', ' + via_header
        else:
            request_data['headers']['via'] = via_header

        # XFF headers
        if 'x-forwarded-for' in request_data['headers']:
            request_data['headers']['x-forwarded-for'] += ', ' + request.META['REMOTE_ADDR']
        else:
            request_data['headers']['x-forwarded-for'] = request.META['REMOTE_ADDR']

        request_data['headers']['x-forwarded-host'] = host
        if 'x-forwarded-server' in request_data['headers']:
            del request_data['headers']['x-forwarded-server']

        # Pass proxy processors to the new request
        try:
            for processor in get_request_proxy_processors():
                processor.process_request(request_data)
        except ValidationError as e:
            return e.get_response()

        # Cookies
        cookie_header_content = ', '.join([cookie_parser[key].OutputString() for key in request_data['cookies']])
        if cookie_header_content != '':
            request_data['headers']['Cookie'] = cookie_header_content

        # Open the request
        try:
            res = requests.request(request_data['method'], request_data['url'], headers=request_data['headers'], data=request_data['data'], stream=True)
        except requests.exceptions.HTTPError:
            return HttpResponse(status=504)
        except requests.exceptions.ConnectionError:
            return HttpResponse(status=502)

        # Build a Django response
        response = StreamingHttpResponse(res.raw.stream(4096, decode_content=False))

        # Set status code to the response
        response.status_code = res.status_code

        # Add all the headers received from the response
        for header in res.headers:

            header_lower = header.lower()
            if header_lower == 'set-cookie':

                for cookie in res.cookies:
                    response.set_cookie(cookie.name, value=cookie.value, expires=cookie.expires, path=cookie.path)

            elif header_lower == 'via':

                via_header = via_header + ', ' + res.headers[header]

            elif is_valid_response_header(header_lower):
                response[header] = res.headers[header]

        # Pass proxy processors to the response
        for processor in get_response_proxy_processors():
            response = processor.process_response(request_data, response)

        response['Via'] = via_header

        return response