def prepare_response(self, cached):
        """Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        """
        # Special case the '*' Vary value as it means we cannot actually
        # determine if the cached response is suitable for this request.
        if "*" in cached.get("vary", {}):
            return

        body_raw = cached["response"].pop("body")

        headers = CaseInsensitiveDict(data=cached['response']['headers'])
        if headers.get('transfer-encoding', '') == 'chunked':
            headers.pop('transfer-encoding')

        cached['response']['headers'] = headers

        try:
            body = io.BytesIO(body_raw)
        except TypeError:
            # This can happen if cachecontrol serialized to v1 format (pickle)
            # using Python 2. A Python 2 str(byte string) will be unpickled as
            # a Python 3 str (unicode string), which will cause the above to
            # fail with:
            #
            #     TypeError: 'str' does not support the buffer interface
            body = io.BytesIO(body_raw.encode('utf8'))

        return HTTPResponse(body=body,
                            preload_content=False,
                            **cached["response"])
Exemple #2
0
    def prepare_response(self, request, cached):
        """Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        """
        # Special case the '*' Vary value as it means we cannot actually
        # determine if the cached response is suitable for this request.
        if "*" in cached.get("vary", {}):
            return

        # Ensure that the Vary headers for the cached response match our
        # request
        for header, value in cached.get("vary", {}).items():
            if request.headers.get(header, None) != value:
                return

        body_file = cached[u'response'].pop(u'body')
        body = open(os.path.join(self.cache.directory, body_file), 'rb')

        headers = CaseInsensitiveDict(data=cached['response']['headers'])
        if headers.get('transfer-encoding', '') == 'chunked':
            headers.pop('transfer-encoding')

        cached['response']['headers'] = headers

        return HTTPResponse(
            body=body,
            preload_content=False,
            **cached["response"]
        )
Exemple #3
0
    def prepare_response(self, cached):
        """Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        """
        # Special case the '*' Vary value as it means we cannot actually
        # determine if the cached response is suitable for this request.
        if "*" in cached.get("vary", {}):
            return

        body_raw = cached["response"].pop("body")

        headers = CaseInsensitiveDict(data=cached['response']['headers'])
        if headers.get('transfer-encoding', '') == 'chunked':
            headers.pop('transfer-encoding')

        cached['response']['headers'] = headers

        try:
            body = io.BytesIO(body_raw)
        except TypeError:
            # This can happen if cachecontrol serialized to v1 format (pickle)
            # using Python 2. A Python 2 str(byte string) will be unpickled as
            # a Python 3 str (unicode string), which will cause the above to
            # fail with:
            #
            #     TypeError: 'str' does not support the buffer interface
            body = io.BytesIO(body_raw.encode('utf8'))

        return HTTPResponse(
            body=body,
            preload_content=False,
            **cached["response"]
        )
Exemple #4
0
    def process_extra_headers(self, headers):
        # Handle some special-case headers, and pass the remainder to set_extra_headers.
        # (Subclasses shouldn't need to override this.)
        headers = CaseInsensitiveDict(headers)  # email headers are case-insensitive per RFC-822 et seq

        reply_to = headers.pop('Reply-To', None)
        if reply_to:
            # message.extra_headers['Reply-To'] will override message.reply_to
            # (because the extra_headers attr is processed after reply_to).
            # This matches the behavior of Django's EmailMessage.message().
            self.set_reply_to(parse_address_list([reply_to], field="extra_headers['Reply-To']"))

        if 'From' in headers:
            # If message.extra_headers['From'] is supplied, it should override message.from_email,
            # but message.from_email should be used as the envelope_sender. See:
            #   - https://code.djangoproject.com/ticket/9214
            #   - https://github.com/django/django/blob/1.8/django/core/mail/message.py#L269
            #   - https://github.com/django/django/blob/1.8/django/core/mail/backends/smtp.py#L118
            header_from = parse_address_list(headers.pop('From'), field="extra_headers['From']")
            envelope_sender = parse_single_address(self.message.from_email, field="from_email")  # must be single
            self.set_from_email_list(header_from)
            self.set_envelope_sender(envelope_sender)

        if 'To' in headers:
            # If message.extra_headers['To'] is supplied, message.to is used only as the envelope
            # recipients (SMTP.sendmail to_addrs), and the header To is spoofed. See:
            #   - https://github.com/django/django/blob/1.8/django/core/mail/message.py#L270
            #   - https://github.com/django/django/blob/1.8/django/core/mail/backends/smtp.py#L119-L120
            # No current ESP supports this, so this code is mainly here to flag
            # the SMTP backend's behavior as an unsupported feature in Anymail:
            header_to = headers.pop('To')
            self.set_spoofed_to_header(header_to)

        if headers:
            self.set_extra_headers(headers)
Exemple #5
0
 def set_extra_headers(self, headers):
     header_dict = CaseInsensitiveDict(headers)
     if 'Reply-To' in header_dict:
         self.data["ReplyTo"] = header_dict.pop('Reply-To')
     self.data["Headers"] = [
         {"Name": key, "Value": value}
         for key, value in header_dict.items()
     ]
Exemple #6
0
 def set_extra_headers(self, headers):
     header_dict = CaseInsensitiveDict(headers)
     if 'Reply-To' in header_dict:
         self.data["ReplyTo"] = header_dict.pop('Reply-To')
     self.data["Headers"] = [
         {"Name": key, "Value": value}
         for key, value in header_dict.items()
     ]
Exemple #7
0
    def retrieve(self, request):

        filename = self._fn(request.url)
        resp = Response()

        headers = utils.read('%s.metadata' % filename)
        if headers:
            try:
                headers = CaseInsensitiveDict(json.loads(headers))
            except ValueError:
                return None
            headers['x-cache'] = 'HIT from %s' % self.__class__.__name__
            resp.url = headers.pop('url', None)
            resp.status_code = headers.pop('status-code', None)
            resp.encoding = headers.pop('encoding', None)
            resp.headers = headers
            resp._content = utils.read(filename)
            return resp
        else:
            return None
Exemple #8
0
    def sync_recipients_to_list(self,
                                list_id,
                                recipients,
                                delete_recipients=True):
        stats = {}
        stats['create_recipients'] = self._create_recipients(recipients)

        all_recipients_lookup = CaseInsensitiveDict(
            {r.email: r.id
             for r in self._all_recipients()})
        list_recipients = list(self._list_recipients(list_id))

        recipients_not_assigned_to_list_lookup = CaseInsensitiveDict({
            r['email']: all_recipients_lookup[r['email']]
            for r in recipients if r['email'] in all_recipients_lookup
        })
        for recipient in list_recipients:
            recipients_not_assigned_to_list_lookup.pop(recipient.email, None)

        recipient_ids_to_add = list(
            recipients_not_assigned_to_list_lookup.values())
        stats['add_recipients_to_list'] = self._add_recipients_to_list(
            list_id, recipient_ids_to_add)

        if delete_recipients:
            contact_to_delete_lookup = CaseInsensitiveDict(
                {c.email: c.id
                 for c in list_recipients})
            for recipient in recipients:
                contact_to_delete_lookup.pop(recipient['email'], None)

            recipients_ids_to_delete = list(contact_to_delete_lookup.values())

            stats[
                'delete_recipient_from_list'] = self._delete_recipient_from_list(
                    list_id, recipients_ids_to_delete)
            stats['delete_recipients'] = self._delete_recipients(
                recipients_ids_to_delete)

        return stats
    def verify_signature(self, query_parameters):
        """Verify the signature provided with the query parameters.

        http://docs.shopify.com/api/tutorials/oauth

        example usage::

            from shopify_trois import Credentials
            from shopify_trois.engines import Json as Shopify
            from urllib.parse import parse_qsl

            credentials = Credentials(
                api_key='your_api_key',
                scope=['read_orders'],
                secret='your_app_secret'
            )

            shopify = Shopify(shop_name="your_store_name", credentials=\
                    credentials)

            query_parameters = parse_qsl("code=238420989938cb70a609f6ece2e2586\
b&shop=yourstore.myshopify.com&timestamp=1373382939&\
signature=6fb122e33c21851c465345b8cb97245e")

            if not shopify.verify_signature(query_parameters):
                raise Exception("invalid signature")

            credentials.code = dict(query_parameters).get('code')

            shopify.setup_access_token()

        :returns: Returns True if the signature is valid.

        """
        params = CaseInsensitiveDict(query_parameters)
        signature = params.pop("signature", None)

        calculated = ["%s=%s" % (k, v) for k, v in params.items()]
        calculated.sort()
        calculated = "".join(calculated)

        calculated = "{secret}{calculated}".format(
            secret=self.credentials.secret,
            calculated=calculated
        )

        md5 = hashlib.md5()
        md5.update(calculated.encode('utf-8'))

        produced = md5.hexdigest()

        return produced == signature
Exemple #10
0
    def prepare_response(self, request, cached):
        """Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        """
        # Special case the '*' Vary value as it means we cannot actually
        # determine if the cached response is suitable for this request.
        # This case is also handled in the controller code when creating
        # a cache entry, but is left here for backwards compatibility.
        if "*" in cached.get("vary", {}):
            return

        # Ensure that the Vary headers for the cached response match our
        # request
        for header, value in cached.get("vary", {}).items():
            if request.headers.get(header, None) != value:
                return

        body_raw = cached["response"].pop("body")

        headers = CaseInsensitiveDict(data=cached["response"]["headers"])
        if headers.get("transfer-encoding", "") == "chunked":
            headers.pop("transfer-encoding")

        cached["response"]["headers"] = headers

        try:
            body = io.BytesIO(body_raw)
        except TypeError:
            # This can happen if cachecontrol serialized to v1 format (pickle)
            # using Python 2. A Python 2 str(byte string) will be unpickled as
            # a Python 3 str (unicode string), which will cause the above to
            # fail with:
            #
            #     TypeError: 'str' does not support the buffer interface
            body = io.BytesIO(body_raw.encode("utf8"))

        return HTTPResponse(body=body,
                            preload_content=False,
                            **cached["response"])
Exemple #11
0
    def verify_signature(self, query_parameters):
        """Verify the signature provided with the query parameters.

        http://docs.shopify.com/api/tutorials/oauth

        example usage::

            from shopify_trois import Credentials
            from shopify_trois.engines import Json as Shopify
            from urllib.parse import parse_qsl

            credentials = Credentials(
                api_key='your_api_key',
                scope=['read_orders'],
                secret='your_app_secret'
            )

            shopify = Shopify(shop_name="your_store_name", credentials=\
                    credentials)

            query_parameters = parse_qsl("code=238420989938cb70a609f6ece2e2586\
b&shop=yourstore.myshopify.com&timestamp=1373382939&\
signature=6fb122e33c21851c465345b8cb97245e")

            if not shopify.verify_signature(query_parameters):
                raise Exception("invalid signature")

            credentials.code = dict(query_parameters).get('code')

            shopify.setup_access_token()

        :returns: Returns True if the signature is valid.

        """
        params = CaseInsensitiveDict(query_parameters)
        signature = params.pop("signature", None)

        calculated = ["%s=%s" % (k, v) for k, v in params.items()]
        calculated.sort()
        calculated = "".join(calculated)

        calculated = "{secret}{calculated}".format(
            secret=self.credentials.secret, calculated=calculated)

        md5 = hashlib.md5()
        md5.update(calculated.encode('utf-8'))

        produced = md5.hexdigest()

        return produced == signature
Exemple #12
0
    def prepare_response(self, request, cached):
        """Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        """
        # Special case the '*' Vary value as it means we cannot actually
        # determine if the cached response is suitable for this request.
        # This case is also handled in the controller code when creating
        # a cache entry, but is left here for backwards compatibility.
        if "*" in cached.get("vary", {}):
            return

        # Ensure that the Vary headers for the cached response match our
        # request
        for header, value in cached.get("vary", {}).items():
            if request.headers.get(header, None) != value:
                return

        body_raw = cached["response"].pop("body")

        headers = CaseInsensitiveDict(data=cached["response"]["headers"])
        if headers.get("transfer-encoding", "") == "chunked":
            headers.pop("transfer-encoding")

        cached["response"]["headers"] = headers

        try:
            body = io.BytesIO(body_raw)
        except TypeError:
            # This can happen if cachecontrol serialized to v1 format (pickle)
            # using Python 2. A Python 2 str(byte string) will be unpickled as
            # a Python 3 str (unicode string), which will cause the above to
            # fail with:
            #
            #     TypeError: 'str' does not support the buffer interface
            body = io.BytesIO(body_raw.encode("utf8"))

        return HTTPResponse(body=body, preload_content=False, **cached["response"])
Exemple #13
0
    def __request_callback(self, request, uri, response_headers):
        self.__readNextRequest(self.__cnx.verb, self.__cnx.url,
                               self.__cnx.input, self.__cnx.headers)

        status = int(readLine(self.__file))
        self.response_headers = CaseInsensitiveDict(eval(readLine(
            self.__file)))
        output = bytearray(readLine(self.__file), "utf-8")
        readLine(self.__file)

        # make a copy of the headers and remove the ones that interfere with the response handling
        adding_headers = CaseInsensitiveDict(self.response_headers)
        adding_headers.pop("content-length", None)
        adding_headers.pop("transfer-encoding", None)
        adding_headers.pop("content-encoding", None)

        response_headers.update(adding_headers)
        return [status, response_headers, output]
Exemple #14
0
    def __request_callback(self, request, uri, response_headers):
        self.__readNextRequest(self.__cnx.verb, self.__cnx.url, self.__cnx.input, self.__cnx.headers)

        status = int(readLine(self.__file))
        self.response_headers = CaseInsensitiveDict(eval(readLine(self.__file)))
        output = readLine(self.__file)
        readLine(self.__file)

        if atLeastPython3:
            output = bytes(output, 'utf-8')

        # make a copy of the headers and remove the ones that interfere with the response handling
        adding_headers = CaseInsensitiveDict(self.response_headers)
        adding_headers.pop('content-length', None)
        adding_headers.pop('transfer-encoding', None)
        adding_headers.pop('content-encoding', None)

        response_headers.update(adding_headers)
        return [status, response_headers, output]
Exemple #15
0
    def reload_config(self, config):
        self._superuser = config['authentication'].get('superuser', {})
        server_parameters = self.get_server_parameters(config)

        conf_changed = hba_changed = ident_changed = local_connection_address_changed = pending_restart = False
        if self._postgresql.state == 'running':
            changes = CaseInsensitiveDict({p: v for p, v in server_parameters.items() if '.' not in p})
            changes.update({p: None for p in self._server_parameters.keys() if not ('.' in p or p in changes)})
            if changes:
                # XXX: query can raise an exception
                for r in self._postgresql.query(('SELECT name, setting, unit, vartype, context '
                                                 + 'FROM pg_catalog.pg_settings ' +
                                                 ' WHERE pg_catalog.lower(name) IN ('
                                                 + ', '.join(['%s'] * len(changes)) +
                                                 ')'), *(k.lower() for k in changes.keys())):
                    if r[4] != 'internal' and r[0] in changes:
                        new_value = changes.pop(r[0])
                        if new_value is None or not compare_values(r[3], r[2], r[1], new_value):
                            if r[4] == 'postmaster':
                                pending_restart = True
                                logger.info('Changed %s from %s to %s (restart required)', r[0], r[1], new_value)
                                if config.get('use_unix_socket') and r[0] == 'unix_socket_directories'\
                                        or r[0] in ('listen_addresses', 'port'):
                                    local_connection_address_changed = True
                            else:
                                logger.info('Changed %s from %s to %s', r[0], r[1], new_value)
                                conf_changed = True
                for param in changes:
                    if param in server_parameters:
                        logger.warning('Removing invalid parameter `%s` from postgresql.parameters', param)
                        server_parameters.pop(param)

            # Check that user-defined-paramters have changed (parameters with period in name)
            if not conf_changed:
                for p, v in server_parameters.items():
                    if '.' in p and (p not in self._server_parameters or str(v) != str(self._server_parameters[p])):
                        logger.info('Changed %s from %s to %s', p, self._server_parameters.get(p), v)
                        conf_changed = True
                        break
                if not conf_changed:
                    for p, v in self._server_parameters.items():
                        if '.' in p and (p not in server_parameters or str(v) != str(server_parameters[p])):
                            logger.info('Changed %s from %s to %s', p, v, server_parameters.get(p))
                            conf_changed = True
                            break

            if not server_parameters.get('hba_file') and config.get('pg_hba'):
                hba_changed = self._config.get('pg_hba', []) != config['pg_hba']

            if not server_parameters.get('ident_file') and config.get('pg_ident'):
                ident_changed = self._config.get('pg_ident', []) != config['pg_ident']

        self._config = config
        self._postgresql.set_pending_restart(pending_restart)
        self._server_parameters = server_parameters
        self._adjust_recovery_parameters()
        self._connect_address = config.get('connect_address')
        self._krbsrvname = config.get('krbsrvname')

        # for not so obvious connection attempts that may happen outside of pyscopg2
        if self._krbsrvname:
            os.environ['PGKRBSRVNAME'] = self._krbsrvname

        if not local_connection_address_changed:
            self.resolve_connection_addresses()

        if conf_changed:
            self.write_postgresql_conf()

        if hba_changed:
            self.replace_pg_hba()

        if ident_changed:
            self.replace_pg_ident()

        if conf_changed or hba_changed or ident_changed:
            logger.info('PostgreSQL configuration items changed, reloading configuration.')
            self._postgresql.reload()
        elif not pending_restart:
            logger.info('No PostgreSQL configuration items changed, nothing to reload.')
Exemple #16
0
    def reload_config(self, config, sighup=False):
        self._superuser = config['authentication'].get('superuser', {})
        server_parameters = self.get_server_parameters(config)

        conf_changed = hba_changed = ident_changed = local_connection_address_changed = pending_restart = False
        if self._postgresql.state == 'running':
            changes = CaseInsensitiveDict(
                {p: v
                 for p, v in server_parameters.items() if '.' not in p})
            changes.update({
                p: None
                for p in self._server_parameters.keys()
                if not ('.' in p or p in changes)
            })
            if changes:
                if 'wal_buffers' in changes:  # we need to calculate the default value of wal_buffers
                    undef = [
                        p for p in ('shared_buffers', 'wal_segment_size',
                                    'wal_block_size') if p not in changes
                    ]
                    changes.update({p: None for p in undef})
                # XXX: query can raise an exception
                old_values = {
                    r[0]: r
                    for r in self._postgresql.query((
                        'SELECT name, setting, unit, vartype, context ' +
                        'FROM pg_catalog.pg_settings ' +
                        ' WHERE pg_catalog.lower(name) = ANY(%s)'
                    ), [k.lower() for k in changes.keys()])
                }
                if 'wal_buffers' in changes:
                    self._handle_wal_buffers(old_values, changes)
                    for p in undef:
                        del changes[p]

                for r in old_values.values():
                    if r[4] != 'internal' and r[0] in changes:
                        new_value = changes.pop(r[0])
                        if new_value is None or not compare_values(
                                r[3], r[2], r[1], new_value):
                            conf_changed = True
                            if r[4] == 'postmaster':
                                pending_restart = True
                                logger.info(
                                    'Changed %s from %s to %s (restart might be required)',
                                    r[0], r[1], new_value)
                                if config.get('use_unix_socket') and r[0] == 'unix_socket_directories'\
                                        or r[0] in ('listen_addresses', 'port'):
                                    local_connection_address_changed = True
                            else:
                                logger.info('Changed %s from %s to %s', r[0],
                                            r[1], new_value)
                for param in changes:
                    if param in server_parameters:
                        logger.warning(
                            'Removing invalid parameter `%s` from postgresql.parameters',
                            param)
                        server_parameters.pop(param)

            # Check that user-defined-paramters have changed (parameters with period in name)
            for p, v in server_parameters.items():
                if '.' in p and (p not in self._server_parameters
                                 or str(v) != str(self._server_parameters[p])):
                    logger.info('Changed %s from %s to %s', p,
                                self._server_parameters.get(p), v)
                    conf_changed = True
            for p, v in self._server_parameters.items():
                if '.' in p and (p not in server_parameters
                                 or str(v) != str(server_parameters[p])):
                    logger.info('Changed %s from %s to %s', p, v,
                                server_parameters.get(p))
                    conf_changed = True

            if not server_parameters.get('hba_file') and config.get('pg_hba'):
                hba_changed = self._config.get('pg_hba',
                                               []) != config['pg_hba']

            if not server_parameters.get('ident_file') and config.get(
                    'pg_ident'):
                ident_changed = self._config.get('pg_ident',
                                                 []) != config['pg_ident']

        self._config = config
        self._postgresql.set_pending_restart(pending_restart)
        self._server_parameters = server_parameters
        self._adjust_recovery_parameters()
        self._krbsrvname = config.get('krbsrvname')

        # for not so obvious connection attempts that may happen outside of pyscopg2
        if self._krbsrvname:
            os.environ['PGKRBSRVNAME'] = self._krbsrvname

        if not local_connection_address_changed:
            self.resolve_connection_addresses()

        if conf_changed:
            self.write_postgresql_conf()

        if hba_changed:
            self.replace_pg_hba()

        if ident_changed:
            self.replace_pg_ident()

        if sighup or conf_changed or hba_changed or ident_changed:
            logger.info('Reloading PostgreSQL configuration.')
            self._postgresql.reload()
            if self._postgresql.major_version >= 90500:
                time.sleep(1)
                try:
                    pending_restart = self._postgresql.query(
                        'SELECT COUNT(*) FROM pg_catalog.pg_settings'
                        ' WHERE pending_restart').fetchone()[0] > 0
                    self._postgresql.set_pending_restart(pending_restart)
                except Exception as e:
                    logger.warning('Exception %r when running query', e)
        else:
            logger.info(
                'No PostgreSQL configuration items changed, nothing to reload.'
            )
Exemple #17
0
def sniffit():
    """
    Perform an HTTP/HTTPS request to the address that the user specifid
    :return:

    TODO Make the Google Verification a separate module with annotion
    """
    parsed_url = urlparse(request.json["url"])
    app.logger.info(request.remote_addr + " " + parsed_url.netloc)

    # Processing the headers to be sent to the URL that the user defined in the interface.
    # What we are doing here is making sure the the user can't override some headers that we want to force such as
    # X-Forwarded-For.
    request_headers = CaseInsensitiveDict({header["key"]: header["value"] for header in request.json["headers"]})

    request_headers["X-Forwarded-For"] = request.remote_addr
    request_headers["X-Anti-Abuse"] = app.config.get("ABUSE_CONTACT")

    request_headers = {string.capwords(k, "-"): v for (k, v) in request_headers.items()}

    # Request Parameters
    if type(request.json["parameters"]) is list:
        request_parameters = "&".join([cgi.escape(header["key"])+"="+cgi.escape(header["value"]) for header in request.json["parameters"]])
    else:
        request_parameters = request.json["parameters"]

    # Base Response JSON
    response_json = {'success': False, 'sniffed': None, 'messages': []}

    try:
        if string.lower(request.json["method"]) in ["get", "head", "options"]:
            response = requests.request(request.json["method"], request.json["url"], verify=False,
                                        params=request_parameters, headers=request_headers)
        else:
            response = requests.request(request.json["method"], request.json["url"],
                                        verify=False, data=request_parameters, headers=request_headers)

        # I prefer to have the capitalized headers in the frontend
        # This will convert the headers from 'content-type' to 'Content-Type'
        response_headers = {string.capwords(k, "-"): v for (k, v) in response.headers.items()}

        # This is for the adrministrators only so there is no need for the end-user to see this
        request_headers.pop("X-Anti-Abuse")
        request_headers.pop("X-Forwarded-For")

        # Create a history of redirects to inform the user
        redirections = [{"url": redirect.headers["location"]} for redirect in response.history]

        # Geo Location
        ipaddress = socket.gethostbyname(parsed_url.netloc)
        geolocation_response = requests.get("http://ip-api.com/json/" + ipaddress);

        response_json["success"] = True
        response_json["showRecaptcha"] = recaptcha_handler.is_token_invalid()
        response_json["sniffed"] = {
            'headers': {
                'response': response_headers,
                'request': request_headers
            },
            'ipaddress': ipaddress,
            'geolocation': geolocation_response.json(),
            'ssl': None,
            'redirect': redirections,
            'body': base64.b64encode(cgi.escape(response.text.encode("UTF-8"))),
            'size': response.headers.get("content-length", False),
            'ssize': len(response.text.encode("UTF-8")),
            'elapsed': response.elapsed.total_seconds(),
            'status': {
                "reason": response.reason,
                "code": str(response.status_code)
            }
        }
    except Exception as e:
        raise RequestFailedException(repr(e))

    return jsonify(response_json)
Exemple #18
0
class HttpRequest(object):
    """Represents a HTTP request.

    URL can be given without query parameters, to be added later using "format_parameters".

    :param str method: HTTP method (GET, HEAD, etc.)
    :param str url: At least complete scheme/host/path
    :param dict[str,str] headers: HTTP headers
    :param files: Files list.
    :param data: Body to be sent.
    :type data: bytes or str.
    """
    def __init__(self, method, url, headers=None, files=None, data=None):
        # type: (str, str, Mapping[str, str], Any, Any) -> None
        self.method = method
        self.url = url
        self.headers = CaseInsensitiveDict(headers)
        self.files = files
        self.data = data

    def __repr__(self):
        return '<HttpRequest [%s]>' % (self.method)

    @property
    def query(self):
        """The query parameters of the request as a dict."""
        query = urlparse(self.url).query
        if query:
            return {
                p[0]: p[-1]
                for p in [p.partition('=') for p in query.split('&')]
            }
        return {}

    @property
    def body(self):
        """Alias to data."""
        return self.data

    @body.setter
    def body(self, value):
        self.data = value

    @staticmethod
    def _format_data(data):
        # type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]]
        """Format field data according to whether it is a stream or
        a string for a form-data request.

        :param data: The request field data.
        :type data: str or file-like object.
        """
        if hasattr(data, 'read'):
            data = cast(IO, data)
            data_name = None
            try:
                if data.name[0] != '<' and data.name[-1] != '>':
                    data_name = os.path.basename(data.name)
            except (AttributeError, TypeError):
                pass
            return (data_name, data, "application/octet-stream")
        return (None, cast(str, data))

    def format_parameters(self, params):
        # type: (Dict[str, str]) -> None
        """Format parameters into a valid query string.
        It's assumed all parameters have already been quoted as
        valid URL strings.

        :param dict params: A dictionary of parameters.
        """
        query = urlparse(self.url).query
        if query:
            self.url = self.url.partition('?')[0]
            existing_params = {
                p[0]: p[-1]
                for p in [p.partition('=') for p in query.split('&')]
            }
            params.update(existing_params)
        query_params = ["{}={}".format(k, v) for k, v in params.items()]
        query = '?' + '&'.join(query_params)
        self.url = self.url + query

    def set_streamed_data_body(self, data):
        """Set a streamable data body.

        :param data: The request field data.
        """
        if not isinstance(data, binary_type) and \
                not any(hasattr(data, attr) for attr in ["read", "__iter__", "__aiter__"]):
            raise TypeError(
                "A streamable data source must be an open file-like object or iterable."
            )
        self.data = data
        self.files = None

    def set_xml_body(self, data):
        """Set an XML element tree as the body of the request.

        :param data: The request field data.
        """
        if data is None:
            self.data = None
        else:
            bytes_data = ET.tostring(data, encoding="utf8")
            self.data = bytes_data.replace(b"encoding='utf8'",
                                           b"encoding='utf-8'")
            self.headers['Content-Length'] = str(len(self.data))
        self.files = None

    def set_json_body(self, data):
        """Set a JSON-friendly object as the body of the request.

        :param data: The request field data.
        """
        if data is None:
            self.data = None
        else:
            self.data = json.dumps(data)
            self.headers['Content-Length'] = str(len(self.data))
        self.files = None

    def set_formdata_body(self, data=None):
        """Set form-encoded data as the body of the request.

        :param data: The request field data.
        """
        if data is None:
            data = {}
        content_type = self.headers.pop('Content-Type',
                                        None) if self.headers else None

        if content_type and content_type.lower(
        ) == 'application/x-www-form-urlencoded':
            self.data = {f: d for f, d in data.items() if d is not None}
            self.files = None
        else:  # Assume "multipart/form-data"
            self.files = {
                f: self._format_data(d)
                for f, d in data.items() if d is not None
            }
            self.data = None

    def set_bytes_body(self, data):
        """Set generic bytes as the body of the request.

        :param data: The request field data.
        """
        if data:
            self.headers['Content-Length'] = str(len(data))
        self.data = data
        self.files = None
Exemple #19
0
class ClientRequest(object):
    """Represents a HTTP request.

    URL can be given without query parameters, to be added later using "format_parameters".

    Instance can be created without data, to be added later using "add_content"

    Instance can be created without files, to be added later using "add_formdata"

    :param str method: HTTP method (GET, HEAD, etc.)
    :param str url: At least complete scheme/host/path
    :param dict[str,str] headers: HTTP headers
    :param files: Files list.
    :param data: Body to be sent.
    :type data: bytes or str.
    """
    def __init__(self, method, url, headers=None, files=None, data=None):
        # type: (str, str, Mapping[str, str], Any, Any) -> None
        self.method = method
        self.url = url
        self.headers = CaseInsensitiveDict(headers)
        self.files = files
        self.data = data

    def __repr__(self):
        return '<ClientRequest [%s]>' % (self.method)

    @property
    def body(self):
        """Alias to data."""
        return self.data

    @body.setter
    def body(self, value):
        self.data = value

    def format_parameters(self, params):
        # type: (Dict[str, str]) -> None
        """Format parameters into a valid query string.
        It's assumed all parameters have already been quoted as
        valid URL strings.

        :param dict params: A dictionary of parameters.
        """
        query = urlparse(self.url).query
        if query:
            self.url = self.url.partition('?')[0]
            existing_params = {
                p[0]: p[-1]
                for p in [p.partition('=') for p in query.split('&')]
            }
            params.update(existing_params)
        query_params = ["{}={}".format(k, v) for k, v in params.items()]
        query = '?' + '&'.join(query_params)
        self.url = self.url + query

    def add_content(self, data):
        # type: (Optional[Union[Dict[str, Any], ET.Element]]) -> None
        """Add a body to the request.

        :param data: Request body data, can be a json serializable
         object (e.g. dictionary) or a generator (e.g. file data).
        """
        if data is None:
            return

        if isinstance(data, ET.Element):
            bytes_data = ET.tostring(data, encoding="utf8")
            self.headers['Content-Length'] = str(len(bytes_data))
            self.data = bytes_data
            return

        # By default, assume JSON
        try:
            self.data = json.dumps(data)
            self.headers['Content-Length'] = str(len(self.data))
        except TypeError:
            self.data = data

    @staticmethod
    def _format_data(data):
        # type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]]
        """Format field data according to whether it is a stream or
        a string for a form-data request.

        :param data: The request field data.
        :type data: str or file-like object.
        """
        if hasattr(data, 'read'):
            data = cast(IO, data)
            data_name = None
            try:
                if data.name[0] != '<' and data.name[-1] != '>':
                    data_name = os.path.basename(data.name)
            except (AttributeError, TypeError):
                pass
            return (data_name, data, "application/octet-stream")
        return (None, cast(str, data))

    def add_formdata(self, content=None):
        # type: (Optional[Dict[str, str]]) -> None
        """Add data as a multipart form-data request to the request.

        We only deal with file-like objects or strings at this point.
        The requests is not yet streamed.

        :param dict headers: Any headers to add to the request.
        :param dict content: Dictionary of the fields of the formdata.
        """
        if content is None:
            content = {}
        content_type = self.headers.pop('Content-Type',
                                        None) if self.headers else None

        if content_type and content_type.lower(
        ) == 'application/x-www-form-urlencoded':
            # Do NOT use "add_content" that assumes input is JSON
            self.data = {f: d for f, d in content.items() if d is not None}
        else:  # Assume "multipart/form-data"
            self.files = {
                f: self._format_data(d)
                for f, d in content.items() if d is not None
            }
Exemple #20
0
    def proxy(self):
        pass

        # special case: process info request
        if self.path == '/headers':
            response = json.dumps(
                {key.lower(): value
                 for key, value in self.headers.items()}).encode('UTF8')
            self.send_response(200)
            self.send_header('content-type', 'application/json')
            self.send_header('content-length', str(len(response)))
            self.end_headers()
            self.wfile.write(response)
            return

        # process request urls
        c_mode, c_target = self.process_url(self.path)
        p_mode, p_target = self.process_url(self.headers['referer'])
        parsed = urlsplit(c_target)

        # handle invalid requests
        if c_mode is None or c_target is None:
            if p_mode is None or p_target is None:
                self.send_error(400)
            else:
                self.send_response(307)
                self.send_header(
                    'location',
                    '/' + p_mode + '/' + urljoin(p_target, self.path))
                self.send_header('vary', 'referer')
                self.send_header('content-length', '0')
                self.end_headers()
            return

        # process request body
        content = None
        if 'content-length' in self.headers:
            content = self.rfile.read(int(self.headers['content-length']))

        # process request headers
        headers = CaseInsensitiveDict(self.headers)
        headers.pop('user-agent', None)
        headers.pop('accept-encoding', None)
        headers.pop('te', None)
        headers.pop('connection', None)
        headers.pop('host', None)
        headers['origin'] = parsed.scheme + '://' + parsed.netloc
        headers['referer'] = p_target

        # process request cookies
        cookies = {
            key: value.value
            for key, value in SimpleCookie(headers.pop('cookie',
                                                       None)).items()
        }
        cookies.pop('__cfduid', None)
        cookies.pop('cf_clearance', None)

        # send upstream request
        resp = ChiselSession().request(
            method=self.command,
            url=c_target,
            data=content,
            headers=headers,
            cookies=cookies,
            allow_redirects=False,
        )
        if resp is None:
            self.send_error(502)
            return

        # send initial response
        self.send_response(resp.status_code)
        for keep in ('set-cookie', 'vary'):
            if keep in resp.headers:
                self.send_header(keep, resp.headers[keep])
        if 'location' in resp.headers:
            self.send_header(
                'location', '/' + c_mode + '/' +
                urljoin(c_target, resp.headers['location']))

        # end for HEAD requests
        if self.command == 'HEAD':
            self.end_headers()
            return
        body = resp.content

        # process response body
        if resp.headers['content-type'].startswith('text/html'):
            if c_mode == 'browser':
                soup = self.make_tasty_soup(resp, True)
                for tag in soup('script'):
                    if tag.string:
                        tag.string = self.expand_urls_in_text(
                            tag.string, parsed.scheme)
                with open('intercept.js', 'r') as fp:
                    tag = soup.new_tag('script')
                    tag.append(fp.read())
                    soup.insert(0, tag)
                body = soup.encode()
            else:
                body = self.make_tasty_soup(resp, False).encode()
            resp.headers['content-length'] = str(len(body))

        elif c_mode == 'browser' and resp.headers['content-type'].startswith(
                'text/') and (resp.encoding or resp.apparent_encoding):
            body = self.expand_urls_in_text(resp.text, parsed.scheme).encode(
                resp.encoding or resp.apparent_encoding)
            resp.headers['content-length'] = str(len(body))

        # send response body and related headers
        self.send_header('content-type', resp.headers['content-type'])
        self.send_header('content-length', resp.headers['content-length'])
        self.end_headers()
        self.wfile.write(body)
Exemple #21
0
class StateMachine(object):
    """ Helper class that tracks the state of different entities. """
    def __init__(self, bus):
        self._states = CaseInsensitiveDict()
        self._bus = bus
        self._lock = threading.Lock()

    def entity_ids(self, domain_filter=None):
        """ List of entity ids that are being tracked. """
        if domain_filter is not None:
            domain_filter = domain_filter.lower()

            return [
                state.entity_id for key, state in self._states.lower_items()
                if util.split_entity_id(key)[0] == domain_filter
            ]
        else:
            return list(self._states.keys())

    def all(self):
        """ Returns a list of all states. """
        return [state.copy() for state in self._states.values()]

    def get(self, entity_id):
        """ Returns the state of the specified entity. """
        state = self._states.get(entity_id)

        # Make a copy so people won't mutate the state
        return state.copy() if state else None

    def get_since(self, point_in_time):
        """
        Returns all states that have been changed since point_in_time.
        """
        point_in_time = util.strip_microseconds(point_in_time)

        with self._lock:
            return [
                state for state in self._states.values()
                if state.last_updated >= point_in_time
            ]

    def is_state(self, entity_id, state):
        """ Returns True if entity exists and is specified state. """
        return (entity_id in self._states
                and self._states[entity_id].state == state)

    def remove(self, entity_id):
        """ Removes an entity from the state machine.

        Returns boolean to indicate if an entity was removed. """
        with self._lock:
            return self._states.pop(entity_id, None) is not None

    def set(self, entity_id, new_state, attributes=None):
        """ Set the state of an entity, add entity if it does not exist.

        Attributes is an optional dict to specify attributes of this state.

        If you just update the attributes and not the state, last changed will
        not be affected.
        """

        new_state = str(new_state)
        attributes = attributes or {}

        with self._lock:
            old_state = self._states.get(entity_id)

            is_existing = old_state is not None
            same_state = is_existing and old_state.state == new_state
            same_attr = is_existing and old_state.attributes == attributes

            # If state did not exist or is different, set it
            if not (same_state and same_attr):
                last_changed = old_state.last_changed if same_state else None

                state = self._states[entity_id] = \
                    State(entity_id, new_state, attributes, last_changed)

                event_data = {'entity_id': entity_id, 'new_state': state}

                if old_state:
                    event_data['old_state'] = old_state

                self._bus.fire(EVENT_STATE_CHANGED, event_data)

    def track_change(self, entity_ids, action, from_state=None, to_state=None):
        """
        Track specific state changes.
        entity_ids, from_state and to_state can be string or list.
        Use list to match multiple.

        Returns the listener that listens on the bus for EVENT_STATE_CHANGED.
        Pass the return value into hass.bus.remove_listener to remove it.
        """
        from_state = _process_match_param(from_state)
        to_state = _process_match_param(to_state)

        # Ensure it is a lowercase list with entity ids we want to match on
        if isinstance(entity_ids, str):
            entity_ids = (entity_ids.lower(), )
        else:
            entity_ids = tuple(entity_id.lower() for entity_id in entity_ids)

        @ft.wraps(action)
        def state_listener(event):
            """ The listener that listens for specific state changes. """
            if event.data['entity_id'].lower() in entity_ids and \
                    'old_state' in event.data and \
                    _matcher(event.data['old_state'].state, from_state) and \
                    _matcher(event.data['new_state'].state, to_state):

                action(event.data['entity_id'], event.data['old_state'],
                       event.data['new_state'])

        self._bus.listen(EVENT_STATE_CHANGED, state_listener)

        return state_listener
Exemple #22
0
def sniffit():
    """
    Perform an HTTP/HTTPS request to the address that the user specifid
    :return:

    TODO Make the Google Verification a separate module with annotion
    """
    parsed_url = urlparse(request.json["url"])
    app.logger.info(request.remote_addr + " " + parsed_url.netloc)

    # Processing the headers to be sent to the URL that the user defined in the interface.
    # What we are doing here is making sure the the user can't override some headers that we want to force such as
    # X-Forwarded-For.
    request_headers = CaseInsensitiveDict(
        {header["key"]: header["value"]
         for header in request.json["headers"]})

    request_headers["X-Forwarded-For"] = request.remote_addr
    request_headers["X-Anti-Abuse"] = app.config.get("ABUSE_CONTACT")

    request_headers = {
        string.capwords(k, "-"): v
        for (k, v) in request_headers.items()
    }

    # Request Parameters
    if type(request.json["parameters"]) is list:
        request_parameters = "&".join([
            cgi.escape(header["key"]) + "=" + cgi.escape(header["value"])
            for header in request.json["parameters"]
        ])
    else:
        request_parameters = request.json["parameters"]

    # Base Response JSON
    response_json = {'success': False, 'sniffed': None, 'messages': []}

    try:
        if string.lower(request.json["method"]) in ["get", "head", "options"]:
            response = requests.request(request.json["method"],
                                        request.json["url"],
                                        verify=False,
                                        params=request_parameters,
                                        headers=request_headers)
        else:
            response = requests.request(request.json["method"],
                                        request.json["url"],
                                        verify=False,
                                        data=request_parameters,
                                        headers=request_headers)

        # I prefer to have the capitalized headers in the frontend
        # This will convert the headers from 'content-type' to 'Content-Type'
        response_headers = {
            string.capwords(k, "-"): v
            for (k, v) in response.headers.items()
        }

        # This is for the adrministrators only so there is no need for the end-user to see this
        request_headers.pop("X-Anti-Abuse")
        request_headers.pop("X-Forwarded-For")

        # Create a history of redirects to inform the user
        redirections = [{
            "url": redirect.headers["location"]
        } for redirect in response.history]

        # Geo Location
        ipaddress = socket.gethostbyname(parsed_url.netloc)
        geolocation_response = requests.get("http://ip-api.com/json/" +
                                            ipaddress)

        response_json["success"] = True
        response_json["showRecaptcha"] = recaptcha_handler.is_token_invalid()
        response_json["sniffed"] = {
            'headers': {
                'response': response_headers,
                'request': request_headers
            },
            'ipaddress': ipaddress,
            'geolocation': geolocation_response.json(),
            'ssl': None,
            'redirect': redirections,
            'body':
            base64.b64encode(cgi.escape(response.text.encode("UTF-8"))),
            'size': response.headers.get("content-length", False),
            'ssize': len(response.text.encode("UTF-8")),
            'elapsed': response.elapsed.total_seconds(),
            'status': {
                "reason": response.reason,
                "code": str(response.status_code)
            }
        }
    except Exception as e:
        raise RequestFailedException(repr(e))

    return jsonify(response_json)
Exemple #23
0
    def get_tasks(sp_result: dict) -> dict:

        out_params = CaseInsensitiveDict(sp_result['OutputParameters'])
        post_sp = out_params.pop(post_sp_outparam, None)
        post_sp_args = json.loads(out_params.pop(post_sp_args_outparam, '{}'))
        if post_sp:
            post_url = urljoin(task_sp_url, '../' + post_sp)
            post_sp_args.update(out_params)
        else:
            post_url = None
            post_sp_args = None

        if db_type and isinstance(db_type,
                                  str) and db_type[:3].lower() == 'ora':
            result_model = 'DictOfList'
        else:
            result_model = 'SqlTvp'

        serial_tasks = []

        for rs in sp_result['ResultSets']:

            parallel_tasks = []

            for row in rs:
                task = CaseInsensitiveDict(row)
                mdx_query = task.get(mdx_column)
                if not mdx_query:
                    if parallel_tasks:
                        continue  # skip a row if mdx_column is missing from a subsequent row
                    else:
                        break  # skip the whole resultset if mdx_column is missing from the first row

                callback_sp = task.get(callback_sp_column)

                if callback_sp:
                    column_map = json.loads(task.get(column_map_column))
                    callback_url = urljoin(task_sp_url, '../' + callback_sp)
                    callback_args = json.loads(
                        task.get(callback_args_column, '{}'))
                    if out_params:
                        callback_args.update(out_params)

                    parallel_tasks.append({
                        "(://)": _url_mdx_reader,
                        "(...)": {
                            "connection_string": mdx_conn_str,
                            "command_text": mdx_query,
                            "result_model": result_model,
                            "column_mapping": column_map,
                            "pass_result_to_url": callback_url,
                            "more_args": callback_args
                        },
                        "(:!!)": timeout
                    })

            if parallel_tasks:
                serial_tasks.append({"[###]": parallel_tasks})

        if serial_tasks:
            if len(serial_tasks) == 1:
                svc_grp = serial_tasks[0]
            else:
                svc_grp = {"[+++]": serial_tasks}
        else:
            svc_grp = None

        return (svc_grp, post_url, post_sp_args)
Exemple #24
0
class APICall(DataContainer):
    HEADER_AUTHORIZATION = "Authorization"
    HEADER_REAL_IP = "X-Real-IP"
    HEADER_FORWARDED_FOR = "X-Forwarded-For"
    """ Standard headers """

    _transaction_headers = _get_headers("Trx")
    """ Transaction ID """

    _redacted_headers = {
        HEADER_AUTHORIZATION: " ",
        "Cookie": "=",
    }
    """ Headers whose value should be redacted. Maps header name to partition char """
    @property
    def HEADER_TRANSACTION(self):
        return self._transaction_headers[0]

    _client_headers = _get_headers("Client")
    """ Client """

    @property
    def HEADER_CLIENT(self):
        return self._client_headers[0]

    _worker_headers = _get_headers("Worker")
    """ Worker (machine) ID """

    @property
    def HEADER_WORKER(self):
        return self._worker_headers[0]

    _impersonate_as_headers = _get_headers("Impersonate-As")
    """ Impersonate as someone else (using his identity and permissions) """

    @property
    def HEADER_IMPERSONATE_AS(self):
        return self._impersonate_as_headers[0]

    _act_as_headers = _get_headers("Act-As")
    """ Act as someone else (using his identity, but with your own role and permissions) """

    @property
    def HEADER_ACT_AS(self):
        return self._act_as_headers[0]

    _async_headers = _get_headers("Async")
    """ Specifies that this call should be done asynchronously """

    @property
    def HEADER_ASYNC(self):
        return self._async_headers[0]

    def __init__(
        self,
        endpoint_name,
        remote_addr=None,
        endpoint_version: PartialVersion = PartialVersion("1.0"),
        data=None,
        batched_data=None,
        headers=None,
        files=None,
        trx=None,
        host=None,
        auth_cookie=None,
    ):
        super().__init__(data=data, batched_data=batched_data)

        self._id = database.utils.id()
        self._files = files  # currently dic of key to flask's FileStorage)
        self._start_ts = time.time()
        self._end_ts = 0
        self._duration = 0
        self._endpoint_name = endpoint_name
        self._remote_addr = remote_addr
        assert isinstance(endpoint_version, PartialVersion), endpoint_version
        self._requested_endpoint_version = endpoint_version
        self._actual_endpoint_version = None
        self._headers = CaseInsensitiveDict()
        self._kpis = {}
        self._log_api = True
        if headers:
            self._headers.update(headers)
        self._result = APICallResult()
        self._auth = None
        self._impersonation = None
        if trx:
            self.set_header(self._transaction_headers, trx)
        self._requires_authorization = True
        self._host = host
        self._auth_cookie = auth_cookie
        self._json_flags = {}

    @property
    def files(self):
        return self._files

    @property
    def id(self):
        return self._id

    @property
    def requires_authorization(self):
        return self._requires_authorization

    @requires_authorization.setter
    def requires_authorization(self, value):
        self._requires_authorization = value

    @property
    def log_api(self):
        return self._log_api

    @log_api.setter
    def log_api(self, value):
        self._log_api = value

    def assign_new_id(self):
        self._id = database.utils.id()

    def get_header(self, header, default=None):
        """
        Get header value
        :param header: Header name options (more than on supported, listed by priority)
        :param default: Default value if no such headers were found
        """
        for option in header if isinstance(header,
                                           (tuple, list)) else (header, ):
            if option in self._headers:
                return self._headers[option]
        return default

    def clear_header(self, header):
        """
        Clear header value
        :param header: Header name options (more than on supported, all will be cleared)
        """
        for value in header if isinstance(header,
                                          (tuple, list)) else (header, ):
            self._headers.pop(value, None)

    def set_header(self, header, value):
        """
        Set header value
        :param header: header name (if a list is provided, first item is used)
        :param value: Value to set
        :return:
        """
        self._headers[header[0] if isinstance(header,
                                              (tuple,
                                               list)) else header] = value

    @property
    def real_ip(self):
        """ Obtain visitor's IP address """
        return (self.get_header(self.HEADER_FORWARDED_FOR)
                or self.get_header(self.HEADER_REAL_IP) or self._remote_addr
                or "untrackable")

    @property
    def failed(self):
        return self.result and self.result.code != 200

    @property
    def duration(self):
        return self._duration

    @property
    def endpoint_name(self):
        return self._endpoint_name

    @property
    def requested_endpoint_version(self) -> PartialVersion:
        return self._requested_endpoint_version

    @property
    def auth(self):
        """ Authenticated payload (Token or Basic) """
        return self._auth

    @auth.setter
    def auth(self, value):
        self._auth = value

    @property
    def impersonation_headers(self):
        return {
            k: v
            for k, v in self._headers.items()
            if k in (self._impersonate_as_headers + self._act_as_headers)
        }

    @property
    def impersonate_as(self):
        return self.get_header(self._impersonate_as_headers)

    @property
    def act_as(self):
        return self.get_header(self._act_as_headers)

    @property
    def impersonation(self):
        return self._impersonation

    @impersonation.setter
    def impersonation(self, value):
        self._impersonation = value

    @property
    def identity(self):
        if self.impersonation:
            if not self.impersonation.identity:
                raise Exception("Missing impersonate identity")
            return self.impersonation.identity
        if self.auth:
            if not self.auth.identity:
                raise Exception(
                    "Missing authorized identity (not authorized?)")
            return self.auth.identity
        raise MissingIdentity("Missing identity")

    @property
    def actual_endpoint_version(self):
        return self._actual_endpoint_version

    @actual_endpoint_version.setter
    def actual_endpoint_version(self, value):
        self._actual_endpoint_version = value

    @property
    def headers(self):
        return dict(self._headers.items())

    @property
    def kpis(self):
        """
        Key Performance Indicators, holding things like number of returned frames/rois, etc.
        :return:
        """
        return self._kpis

    @property
    def trx(self):
        return self.get_header(self._transaction_headers, self.id)

    @trx.setter
    def trx(self, value):
        self.set_header(self._transaction_headers, value)

    @property
    def client(self):
        return self.get_header(self._client_headers)

    @property
    def worker(self):
        return self.get_worker(default="<unknown>")

    def get_worker(self, default=None):
        return self.get_header(self._worker_headers, default)

    @property
    def authorization(self):
        """ Call authorization data used to authenticate the call """
        return self.get_header(self.HEADER_AUTHORIZATION)

    @property
    def result(self):
        return self._result

    @property
    def exec_async(self):
        return self.get_header(self._async_headers) is not None

    @exec_async.setter
    def exec_async(self, value):
        if value:
            self.set_header(self._async_headers, "1")
        else:
            self.clear_header(self._async_headers)

    @property
    def host(self):
        return self._host

    @property
    def auth_cookie(self):
        return self._auth_cookie

    @property
    def json_flags(self):
        return self._json_flags

    @property
    def extra_meta_fields(self):
        return {}

    def mark_end(self):
        self._end_ts = time.time()
        self._duration = int((self._end_ts - self._start_ts) * 1000)

    def get_response(self,
                     include_stack: bool = None
                     ) -> Tuple[Union[dict, str], str]:
        """
        Get the response for this call.
        :param include_stack: If True, stack trace stored in this call's result should
        be included in the response (default follows configuration)
        :return: Response data (encoded according to self.content_type) and the data's content type
        """
        include_stack = (include_stack if include_stack is not None else
                         config.get("apiserver.return_stack_to_caller", False))

        def make_version_number(
                version: PartialVersion) -> Union[None, float, str]:
            """
            Client versions <=2.0 expect expect endpoint versions in float format, otherwise throwing an exception
            """
            if version is None:
                return None
            if self.requested_endpoint_version < PartialVersion("2.1"):
                return float(str(version))
            return str(version)

        if self.result.raw_data and not self.failed:
            # endpoint returned raw data and no error was detected, return raw data, no fancy dicts
            return self.result.raw_data, self.result.content_type

        else:
            res = {
                "meta": {
                    "id": self.id,
                    "trx": self.trx,
                    "endpoint": {
                        "name":
                        self.endpoint_name,
                        "requested_version":
                        make_version_number(self.requested_endpoint_version),
                        "actual_version":
                        make_version_number(self.actual_endpoint_version),
                    },
                    "result_code": self.result.code,
                    "result_subcode": self.result.subcode,
                    "result_msg": self.result.msg,
                    "error_stack":
                    self.result.traceback if include_stack else None,
                    "error_data": self.result.error_data,
                    **self.extra_meta_fields,
                },
                "data": self.result.data,
            }
            if self.content_type.lower() == JSON_CONTENT_TYPE:
                try:
                    func = json.dumps if self._json_flags.pop(
                        "ensure_ascii", True) else json.dumps_notascii
                    res = func(res, **(self._json_flags or {}))
                except Exception as ex:
                    # JSON serialization may fail, probably problem with data or error_data so pop it and try again
                    if not (self.result.data or self.result.error_data):
                        raise
                    self.result.data = None
                    self.result.error_data = None
                    msg = "Error serializing response data: " + str(ex)
                    self.set_error_result(code=500,
                                          subcode=0,
                                          msg=msg,
                                          include_stack=False)
                    return self.get_response()

            return res, self.content_type

    def set_error_result(self,
                         msg,
                         code=500,
                         subcode=0,
                         include_stack=False,
                         error_data=None):
        tb = format_exc() if include_stack else None
        self._result = APICallResult(
            data=self._result.data,
            code=code,
            subcode=subcode,
            msg=msg,
            traceback=tb,
            error_data=error_data,
            cookies=self._result.cookies,
        )

    def get_redacted_headers(self):
        headers = self.headers.copy()
        if not self.requires_authorization or self.auth:
            # We won't log the authorization header if call shouldn't be authorized, or if it was successfully
            #  authorized. This means we'll only log authorization header for calls that failed to authorize (hopefully
            #  this will allow us to debug authorization errors).
            for header, sep in self._redacted_headers.items():
                if header in headers:
                    prefix, _, redact = headers[header].partition(sep)
                    headers[
                        header] = prefix + sep + f"<{len(redact)} bytes redacted>"
        return headers