Ejemplo n.º 1
0
 def understand_request(self, data):
     p = HttpParser()
     recved = len(data)
     try:
         p.execute(data, recved)
         content_type = p.get_headers()["CONTENT-TYPE"]
         method = p.get_method()
         dataOfReq = str(p.recv_body(), "utf-8")
         path = p.get_path()
     except:
         print(sys.exc_info())
         return '400: Bad Request' + sys.exc_info()
     req = myRequest(content_type, method, dataOfReq, path)
     req.toPrint()
     if req._data == '':  # если нет data
         return '204: No Content'
     if req._content_type != 'application/json':
         return '501: Not Implemented'
     if req._method != 'POST':
         return '501: Not Implemented'
     if req._path == '/users/add':
         return self.users_add(req)
     if req._path == '/chats/add':
         return self.chats_add(req)
     if req._path == '/messages/add':
         return self.messages_add(req)
     if req._path == '/chats/get':
         return self.chats_get(req)
     if req._path == '/messages/get':
         return self.messages_get(req)
Ejemplo n.º 2
0
    def run(self) -> None:
        p = HttpParser()
        try:
            p.execute(self.req, len(self.req))

            url = p.get_url()
            metopd = p.get_method()

            http_pos = url.find('://')
            if http_pos == -1:
                temp = url
            else:
                temp = url[(http_pos + 3):]

            port_pos = temp.find(':')
            host_pos = temp.find('/')
            if host_pos == -1:
                host_pos = len(temp)
            if port_pos == -1 or host_pos < port_pos:
                port = 443 if metopd == "CONNECT" else 80
            else:
                port = int((temp[port_pos + 1:])[:host_pos - port_pos - 1])

            host = p.get_headers()['host']
            port_ind = host.find(':')
            if port_ind != -1:
                host = host[:port_ind]
            if metopd == "CONNECT":
                https_proxy(host, port, self.client)
            else:
                proxy(host, port, self.client, self.req)
        except Exception as e:
            print(e)
            pass
Ejemplo n.º 3
0
def parse_start_string(con, data):
    p = HttpParser()
    try:
        p.execute(data, len(data))

        url = p.get_url()
        metopd = p.get_method()

        http_pos = url.find('://')
        if http_pos == -1:
            temp = url
        else:
            temp = url[(http_pos + 3):]

        port_pos = temp.find(':')
        host_pos = temp.find('/')
        if host_pos == -1:
            host_pos = len(temp)
        if port_pos == -1 or host_pos < port_pos:
            port = 443 if metopd == "CONNECT" else 80
        else:
            port = int((temp[port_pos + 1:])[:host_pos - port_pos - 1])

        host = p.get_headers()['host']
        port_ind = host.find(':')
        if port_ind != -1:
            host = host[:port_ind]
        if metopd == "CONNECT":
            https_proxy(host, port, con)
        else:
            proxy(host, port, con, data)
    except Exception as e:
        # print(e)
        pass
Ejemplo n.º 4
0
class JobClient(protocol.Protocol):
    def __init__(self, factory, params):
        self.parser = HttpParser()
        self.params = params
        self.verb = self.params.get_verb()
        self.headers = self.params.get_headers()
        self.uris = {}
        self.uris["getgame"] = "%s/game" % prefix
        self.uris["gethost"] = "%s/host" % prefix
        self.uris["getservice"] = "%s/service" % prefix
        self.recv = ""
        self.request = ""
        self.payload = None

    def no_unicode(self, text):
        if isinstance(text, unicode):
            return text.encode('utf-8')
        else:
            return text

    def check_json(self):
        try:
            return json.loads(self.recv)
        except:
            return False

    def TimedOut(self):
        pass

    def connectionMade(self):
        if self.verb == "GET":
            self.request = "GET %s HTTP/1.1\r\n%s\r\n" % (self.url,
                                                          self.headers)
        elif self.verb == "POST":
            self.payload = self.params.get_payload()
            self.request = "POST %s HTTP/1.1\r\n%s\r\n%s" % \
                             (self.url, self.headers, self.payload)
        self.transport.write(self.request)

    def dataReceived(self, data):
        self.parser.execute(data, len(data))
        if self.parser.is_headers_complete():
            self.headers = self.parser.get_headers()
        if self.parser.is_partial_body():
            self.recv += self.parser.recv_body()
        if self.parser.is_message_complete():
            if self.check_json():
                self.proc_response()
            else:
                print "Problem with %s" % self.recv

    def proc_response(self):
        #Override in subclass
        pass
Ejemplo n.º 5
0
def handle_http(sock, addr):
    """A more complicated handler which detects HTTP headers
    """

    def recv_request(p):
        while True:
            data = sock.recv(8192)

            if not data:
                return False

            nb = len(data)
            nparsed = p.execute(data, nb)
            assert nparsed == nb

            if USING_PYPARSER and p.is_headers_complete():
                h = p.get_headers()
                if not (h.get('content-length') or h.get('transfer-length')):
                    # pass length=0 to signal end of body
                    # TODO: pyparser requires this, but not the C parser for some reason
                    p.execute(data, 0)
                    return True

            if p.is_message_complete():
                return True

    # main request loop
    while True:
        p = HttpParser()

        if not recv_request(p):
            break

        h = p.get_headers()
        ka = p.should_keep_alive()
        h_connection = 'keep-alive' if ka else 'close'

        resp = create_response('Hello, world!', {'Connection': h_connection})
        sock.sendall(resp)

        if not ka:
            break
        else:
            # we should keep-alive, but yield to drastically improve overall request/response
            # latency
            gyield()

    sock.close()
Ejemplo n.º 6
0
def handle_http(sock, addr):
    """A more complicated handler which detects HTTP headers
    """
    def recv_request(p):
        while True:
            data = sock.recv(8192)

            if not data:
                return False

            nb = len(data)
            nparsed = p.execute(data, nb)
            assert nparsed == nb

            if USING_PYPARSER and p.is_headers_complete():
                h = p.get_headers()
                if not (h.get('content-length') or h.get('transfer-length')):
                    # pass length=0 to signal end of body
                    # TODO: pyparser requires this, but not the C parser for some reason
                    p.execute(data, 0)
                    return True

            if p.is_message_complete():
                return True

    # main request loop
    while True:
        p = HttpParser()

        if not recv_request(p):
            break

        h = p.get_headers()
        ka = p.should_keep_alive()
        h_connection = 'keep-alive' if ka else 'close'

        resp = create_response('Hello, world!', {'Connection': h_connection})
        sock.sendall(resp)

        if not ka:
            break
        else:
            # we should keep-alive, but yield to drastically improve overall request/response
            # latency
            gyield()

    sock.close()
Ejemplo n.º 7
0
def repeat(req_id):
    sqlite_con = saver.get_connection()
    cursor = sqlite_con.cursor()
    _, host, port, request, is_https = saver.get_request(cursor, req_id)
    sqlite_con.close()
    # Connecting to server
    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    sock.connect((host, port))
    if is_https:
        context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
        sock = context.wrap_socket(sock, server_hostname=host)
    sock.send(request)

    # Getting response
    parser = HttpParser()
    resp = b''
    while True:
        data = sock.recv(buffer_size)
        if not data:
            break

        received = len(data)
        _ = parser.execute(data, received)
        if parser.is_partial_body():
            resp += parser.recv_body()

        if parser.is_message_complete():
            break
    headers = parser.get_headers()
    # Decode answer
    if headers['CONTENT-ENCODING'] == 'gzip':
        resp = gzip.decompress(resp)
        resp = str(resp, 'utf-8')
    else:
        try:
            resp = resp.decode('utf-8')
        except UnicodeDecodeError:
            print('Body wasn\'t decoded')

    print("{} HTTP/{}.{}".format(parser.get_status_code(), *parser.get_version()))
    for header in headers:
        print('{}: {}'.format(header, headers.get(header)))
    print()
    print(resp)
    print()
Ejemplo n.º 8
0
def tx_req_from_raw_request(filename):
    '''
    把 plaintext 的请求 解析后,做重放
    '''
    headers_raw = ""
    with open(os.path.join(filename), "rb") as fr:
        headers_raw = fr.read()

    hp = HttpParser()
    r = hp.execute(headers_raw, len(headers_raw))
    print("{} len={} parsed={}".format(filename, len(headers_raw), r))
    headers = dict(hp.get_headers())
    body = hp.recv_body()
    url = f'''https://{headers.get("HOST", "")}{hp.get_path()}'''
    method = hp.get_method().lower()
    resp = requests.request(method=method, url=url, headers=headers, data=body)
    print(resp_dump.dump_all(resp))
    print("\n\n")
Ejemplo n.º 9
0
def check_correct_HEAD(host, port):
    #Check if HEAD only returns header but not body
    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    rc_is_headers_complete = False
    rc_no_more_data = True
    try:
        print "1"
        s.connect((host, port))
        s.settimeout(1)
        s.send("HEAD /index.html HTTP/1.1\r\nHost: %s:%d\
            \r\nConnection:Keep-Alive\r\n\r\n" % (host, port))
        print "2 ", s
        while True:
            data = s.recv(1024)
            print('data', data)

            if rc_is_headers_complete and data:
                rc_no_more_data = False
                break

            if not data:
                break

            nparsed = p.execute(data, recved)
            print('nparsed is ' + str(nparsed) + ' recv is ' + str(recved))
            print('p header ', p.get_headers())
            print('recv body', p.recv_body())
            recved = len(data)
            print('length of data', recved)

            assert nparsed == recved

            if p.is_headers_complete():
                rc_is_headers_complete = True

            if p.is_message_complete():
                break
    except socket.timeout:
        pass
    finally:
        s.close()
    return rc_is_headers_complete and rc_no_more_data
Ejemplo n.º 10
0
def main():

    p = HttpParser()
    while True:
        header = read_head()
        if header.decode("utf-8") == "":
            return
        res = p.execute(header, len(header))
        result = None
        length_key = "content-length"
        content_length = p.get_headers()[length_key]
        if content_length != None:
            body = read_body(int(content_length))
            result = handle(body)

        out_buffer = "HTTP/1.1 200 OK\r\n"
        out_buffer += "Content-Length: "+str(len(result))+"\r\n"
        out_buffer += "\r\n"
        out_buffer += result

        sys.stdout.write(out_buffer)
        sys.stdout.flush()
Ejemplo n.º 11
0
#coding=utf-8
'''
Created on 2012-3-24

@author: fengclient
'''
from http_parser.pyparser import HttpParser

if __name__ == '__main__':
    rsp = open('d:\\172_response.txt').read()
    # if your are reading a text file from windows, u may need manually convert \n to \r\n
    # universal newline support: http://docs.python.org/library/functions.html#open
    rsp = rsp.replace('\n', '\r\n')
    p = HttpParser()
    p.execute(rsp, len(rsp))
    print p.get_headers()
Ejemplo n.º 12
0
def fetch(
    url,
    method="GET",
    headers=None,
    body=None,
    connect_timeout=DEFAULT_CONNECT_TIMEOUT,
    request_timeout=DEFAULT_REQUEST_TIMEOUT,
    io_loop=None,
    resolver=resolve,
    max_buffer_size=DEFAULT_BUFFER_SIZE,
    follow_redirects=False,
    max_redirects=DEFAULT_MAX_REDIRECTS,
    validate_cert=config.http_client.validate_certs,
    allow_proxy=False,
    proxies=None,
    user=None,
    password=None,
    content_encoding=None,
    eof_mark=None,
):
    """

    :param url: Fetch URL
    :param method: request method "GET", "POST", "PUT" etc
    :param headers: Dict of additional headers
    :param body: Request body for POST and PUT request
    :param connect_timeout:
    :param request_timeout:
    :param io_loop:
    :param resolver:
    :param follow_redirects:
    :param max_redirects:
    :param validate_cert:
    :param allow_proxy:
    :param proxies:
    :param user:
    :param password:
    :param max_buffer_size:
    :param content_encoding:
    :param eof_mark: Do not consider connection reset as error if
      eof_mark received (string or list)
    :return: code, headers, body
    """
    def get_ssl_options():
        ssl_options = {}
        if validate_cert:
            ssl_options["cert_reqs"] = ssl.CERT_REQUIRED
        return ssl_options

    logger.debug("HTTP %s %s", method, url)
    metrics["httpclient_requests", ("method", method.lower())] += 1
    # Detect proxy when necessary
    io_loop = io_loop or tornado.ioloop.IOLoop.current()
    u = urlparse(str(url))
    use_tls = u.scheme == "https"
    if ":" in u.netloc:
        host, port = u.netloc.rsplit(":")
        port = int(port)
    else:
        host = u.netloc
        port = DEFAULT_PORTS.get(u.scheme)
        if not port:
            raise tornado.gen.Return(
                (ERR_TIMEOUT, {},
                 "Cannot resolve port for scheme: %s" % u.scheme))
    if is_ipv4(host):
        addr = host
    else:
        addr = yield resolver(host)
    if not addr:
        raise tornado.gen.Return(
            (ERR_TIMEOUT, {}, "Cannot resolve host: %s" % host))
    # Detect proxy server
    if allow_proxy:
        proxy = (proxies or SYSTEM_PROXIES).get(u.scheme)
    else:
        proxy = None
    # Connect
    stream = None
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    try:
        if use_tls and not proxy:
            stream = tornado.iostream.SSLIOStream(
                s, io_loop=io_loop, ssl_options=get_ssl_options())
        else:
            stream = tornado.iostream.IOStream(s, io_loop=io_loop)
        try:
            if proxy:
                connect_address = proxy
            elif isinstance(addr, tuple):
                connect_address = addr
            else:
                connect_address = (addr, port)

            if proxy:
                logger.debug("Connecting to proxy %s:%s", connect_address[0],
                             connect_address[1])
            yield tornado.gen.with_timeout(
                io_loop.time() + connect_timeout,
                future=stream.connect(connect_address,
                                      server_hostname=u.netloc),
                io_loop=io_loop,
            )
        except tornado.iostream.StreamClosedError:
            metrics["httpclient_timeouts"] += 1
            raise tornado.gen.Return((ERR_TIMEOUT, {}, "Connection refused"))
        except tornado.gen.TimeoutError:
            metrics["httpclient_timeouts"] += 1
            raise tornado.gen.Return((ERR_TIMEOUT, {}, "Connection timed out"))
        deadline = io_loop.time() + request_timeout
        # Proxy CONNECT
        if proxy:
            logger.debug("Sending CONNECT %s:%s", addr, port)
            # Send CONNECT request
            req = b"CONNECT %s:%s HTTP/1.1\r\nUser-Agent: %s\r\n\r\n" % (
                addr,
                port,
                DEFAULT_USER_AGENT,
            )
            try:
                yield tornado.gen.with_timeout(
                    deadline,
                    future=stream.write(req),
                    io_loop=io_loop,
                    quiet_exceptions=(tornado.iostream.StreamClosedError, ),
                )
            except tornado.iostream.StreamClosedError:
                metrics["httpclient_proxy_timeouts"] += 1
                raise tornado.gen.Return(
                    (ERR_TIMEOUT, {},
                     "Connection reset while connecting to proxy"))
            except tornado.gen.TimeoutError:
                metrics["httpclient_proxy_timeouts"] += 1
                raise tornado.gen.Return(
                    (ERR_TIMEOUT, {},
                     "Timed out while sending request to proxy"))
            # Wait for proxy response
            parser = HttpParser()
            while not parser.is_headers_complete():
                try:
                    data = yield tornado.gen.with_timeout(
                        deadline,
                        future=stream.read_bytes(max_buffer_size,
                                                 partial=True),
                        io_loop=io_loop,
                        quiet_exceptions=(
                            tornado.iostream.StreamClosedError, ),
                    )
                except tornado.iostream.StreamClosedError:
                    metrics["httpclient_proxy_timeouts"] += 1
                    raise tornado.gen.Return(
                        (ERR_TIMEOUT, {},
                         "Connection reset while connecting to proxy"))
                except tornado.gen.TimeoutError:
                    metrics["httpclient_proxy_timeouts"] += 1
                    raise tornado.gen.Return(
                        (ERR_TIMEOUT, {},
                         "Timed out while sending request to proxy"))
                received = len(data)
                parsed = parser.execute(data, received)
                if parsed != received:
                    raise tornado.gen.Return(
                        (ERR_PARSE_ERROR, {}, "Parse error"))
            code = parser.get_status_code()
            logger.debug("Proxy response: %s", code)
            if not 200 <= code <= 299:
                raise tornado.gen.Return(
                    (code, parser.get_headers(), "Proxy error: %s" % code))
            # Switch to TLS when necessary
            if use_tls:
                logger.debug("Starting TLS negotiation")
                try:
                    stream = yield tornado.gen.with_timeout(
                        deadline,
                        future=stream.start_tls(
                            server_side=False,
                            ssl_options=get_ssl_options(),
                            server_hostname=u.netloc,
                        ),
                        io_loop=io_loop,
                        quiet_exceptions=(
                            tornado.iostream.StreamClosedError, ),
                    )
                except tornado.iostream.StreamClosedError:
                    metrics["httpclient_proxy_timeouts"] += 1
                    raise tornado.gen.Return(
                        (ERR_TIMEOUT, {},
                         "Connection reset while connecting to proxy"))
                except tornado.gen.TimeoutError:
                    metrics["httpclient_proxy_timeouts"] += 1
                    raise tornado.gen.Return(
                        (ERR_TIMEOUT, {},
                         "Timed out while sending request to proxy"))
        # Process request
        body = body or ""
        content_type = "application/binary"
        if isinstance(body, unicode):
            body = body.encode("utf-8")
        elif not isinstance(body, six.string_types):
            body = ujson.dumps(body)
            content_type = "text/json"
        h = {
            "Host": str(u.netloc),
            "Connection": "close",
            "User-Agent": DEFAULT_USER_AGENT
        }
        if body and content_encoding:
            if content_encoding == CE_DEFLATE:
                # Deflate compression
                h["Content-Encoding"] = CE_DEFLATE
                compress = zlib.compressobj(
                    zlib.Z_DEFAULT_COMPRESSION,
                    zlib.DEFLATED,
                    -zlib.MAX_WBITS,
                    zlib.DEF_MEM_LEVEL,
                    zlib.Z_DEFAULT_STRATEGY,
                )
                body = compress.compress(body) + compress.flush()
            elif content_encoding == CE_GZIP:
                # gzip compression
                h["Content-Encoding"] = CE_GZIP
                compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS,
                                            zlib.DEF_MEM_LEVEL, 0)
                crc = zlib.crc32(body, 0) & 0xFFFFFFFF
                body = "\x1f\x8b\x08\x00%s\x02\xff%s%s%s%s" % (
                    to32u(int(time.time())),
                    compress.compress(body),
                    compress.flush(),
                    to32u(crc),
                    to32u(len(body)),
                )
        if method in REQUIRE_LENGTH_METHODS:
            h["Content-Length"] = str(len(body))
            h["Content-Type"] = content_type
        if user and password:
            # Include basic auth header
            h["Authorization"] = "Basic %s" % (
                "%s:%s" % (user, password)).encode("base64").strip()
        if headers:
            h.update(headers)
        path = u.path
        if u.query:
            path += "?%s" % u.query
        req = b"%s %s HTTP/1.1\r\n%s\r\n\r\n%s" % (
            method,
            path,
            "\r\n".join(b"%s: %s" % (k, h[k]) for k in h),
            body,
        )
        try:
            yield tornado.gen.with_timeout(
                deadline,
                future=stream.write(req),
                io_loop=io_loop,
                quiet_exceptions=(tornado.iostream.StreamClosedError, ),
            )
        except tornado.iostream.StreamClosedError:
            metrics["httpclient_timeouts"] += 1
            raise tornado.gen.Return(
                (ERR_TIMEOUT, {}, "Connection reset while sending request"))
        except tornado.gen.TimeoutError:
            metrics["httpclient_timeouts"] += 1
            raise tornado.gen.Return(
                (ERR_TIMEOUT, {}, "Timed out while sending request"))
        parser = HttpParser()
        response_body = []
        while not parser.is_message_complete():
            try:
                data = yield tornado.gen.with_timeout(
                    deadline,
                    future=stream.read_bytes(max_buffer_size, partial=True),
                    io_loop=io_loop,
                    quiet_exceptions=(tornado.iostream.StreamClosedError, ),
                )
            except tornado.iostream.StreamClosedError:
                if not response_body and config.features.pypy:
                    break
                if eof_mark and response_body:
                    # Check if EOF mark is in received data
                    response_body = ["".join(response_body)]
                    if isinstance(eof_mark, six.string_types):
                        if eof_mark in response_body[0]:
                            break
                    else:
                        found = False
                        for m in eof_mark:
                            if m in response_body[0]:
                                found = True
                                break
                        if found:
                            break
                metrics["httpclient_timeouts"] += 1
                raise tornado.gen.Return(
                    (ERR_READ_TIMEOUT, {}, "Connection reset"))
            except tornado.gen.TimeoutError:
                metrics["httpclient_timeouts"] += 1
                raise tornado.gen.Return(
                    (ERR_READ_TIMEOUT, {}, "Request timed out"))
            received = len(data)
            parsed = parser.execute(data, received)
            if parsed != received:
                raise tornado.gen.Return((ERR_PARSE_ERROR, {}, "Parse error"))
            if parser.is_partial_body():
                response_body += [parser.recv_body()]
        code = parser.get_status_code()
        parsed_headers = parser.get_headers()
        logger.debug("HTTP Response %s", code)
        if 300 <= code <= 399 and follow_redirects:
            # Process redirects
            if max_redirects > 0:
                new_url = parsed_headers.get("Location")
                if not new_url:
                    raise tornado.gen.Return(
                        (ERR_PARSE_ERROR, {}, "No Location header"))
                logger.debug("HTTP redirect %s %s", code, new_url)
                code, parsed_headers, response_body = yield fetch(
                    new_url,
                    method="GET",
                    headers=headers,
                    connect_timeout=connect_timeout,
                    request_timeout=request_timeout,
                    resolver=resolver,
                    max_buffer_size=max_buffer_size,
                    follow_redirects=follow_redirects,
                    max_redirects=max_redirects - 1,
                    validate_cert=validate_cert,
                    allow_proxy=allow_proxy,
                    proxies=proxies,
                )
                raise tornado.gen.Return((code, parsed_headers, response_body))
            else:
                raise tornado.gen.Return((404, {}, "Redirect limit exceeded"))
        # @todo: Process gzip and deflate Content-Encoding
        raise tornado.gen.Return(
            (code, parsed_headers, "".join(response_body)))
    finally:
        if stream:
            stream.close()
        else:
            s.close()
Ejemplo n.º 13
0
#coding=utf-8
'''
Created on 2012-3-24

@author: fengclient
'''
from http_parser.pyparser import HttpParser

if __name__ == '__main__':
    rsp = open('d:\\172_response.txt').read()
    # if your are reading a text file from windows, u may need manually convert \n to \r\n
    # universal newline support: http://docs.python.org/library/functions.html#open
    rsp = rsp.replace('\n', '\r\n')
    p = HttpParser()
    p.execute(rsp, len(rsp))
    print(p.get_headers())
Ejemplo n.º 14
0
def _parseHttpRequestResponse(model, http_request, http_response, protocol):
    try:
        global i_tag
        global var_i
        """ Parses a HTTP Request/Response and generate it's translation in ASLan++. """
        request_parser = HttpParser()
        request_parser.execute(http_request, len(http_request))

        var_i = 0

        # concretization details
        concrete = dict()

        # concretization TAG
        returntag = "tag{}".format(i_tag)

        # URL for concretization
        url = protocol + "://" + request_parser.get_headers(
        )['Host'] + "/" + request_parser.get_url()
        concrete['url'] = url

        # path (this string should not begin with something different from a character)
        # and replace every non alphanumeric character with _
        # the first re.sub is used to replace every non alphanumeric char
        # the second re.sub is used to remove non character from the begining of the string
        page = re.sub("^[^a-z]*", "",
                      re.sub("[^a-zA-Z0-9]", "_",
                             urlparse(url).path))
        # add page in the array _aslanpp_constants
        model._page_constants.add(page)

        # method for concretization
        method = request_parser.get_method()
        concrete['method'] = method

        # query string
        post_query_string = ""
        get_query_string = request_parser.get_query_string()
        if method == "POST" and "Content-type" in request_parser.get_headers(
        ) and "multipart/form-data" not in request_parser.get_headers(
        )['Content-Type']:
            # POST parameters, multipart/form-data not yet supported
            post_query_string = request_parser.recv_body(
            )  #"&".join(a for a in [query_string, request_parser.recv_body()] if len(a)>0)
        if "Content-type" in request_parser.get_headers(
        ) and "multipart/form-data" in request_parser.get_headers(
        )['Content-Type']:
            print("multipart/form-data not yet supported")

        # for each request\response I need
        aslanpp_params_no_questionmark = ""
        aslanpp_params_questionmark = ""
        aslanpp_cookie_no_questionmark = ""
        aslanpp_cookie_questionmark = ""
        aslanpp_cookie2_no_questionmark = ""
        aslanpp_cookie2_questionmark = ""

        # convert GET parameters
        if get_query_string:
            # saving the concrete parameters
            concrete_get_params = [
                couple.split("=") for couple in get_query_string.split("&")
            ]

            # parse the parameters and retrieve ASLan++ code and mapping
            aslanpp_no_questionmark, aslanpp_questionmark, mapping_get = _parse_parameters(
                model, concrete_get_params)
            aslanpp_params_no_questionmark += aslanpp_no_questionmark
            aslanpp_params_questionmark += aslanpp_questionmark

            # save get param in concretization
            concrete['get_params'] = mapping_get

        # convert POST parameters
        if post_query_string:
            # saving the concrete parameters
            concrete_post_params = [
                couple.split("=") for couple in post_query_string.split("&")
            ]

            # parse the parameters and retrieve ASLan++ code and mapping
            aslanpp_no_questionmark, aslanpp_questionmark, mapping_post = _parse_parameters(
                model, concrete_post_params)
            aslanpp_params_no_questionmark += aslanpp_no_questionmark
            aslanpp_params_questionmark += aslanpp_questionmark

            # save get param in concretization
            concrete['post_params'] = mapping_post

        if aslanpp_params_no_questionmark == "":
            aslanpp_params_no_questionmark = "none"
        else:
            aslanpp_params_no_questionmark = aslanpp_params_no_questionmark[:
                                                                            -5]
        if aslanpp_params_questionmark == "":
            aslanpp_params_questionmark = "none"
        else:
            aslanpp_params_questionmark = aslanpp_params_questionmark[:-5]

        # convert cookie in the request
        try:
            cookie_request = request_parser.get_headers()['Cookie']

            simple_cookie = Cookie.SimpleCookie(cookie_request)
            concrete_cookie = [[item, simple_cookie[item].value]
                               for item in simple_cookie]

            # parse the parameters and retrieve ASLan++ code, constants, variables and mapping
            cookie_no_questionmark, cookie_questionmark, cookie_mapping = _parse_parameters(
                model, concrete_cookie)
            aslanpp_cookie_no_questionmark += cookie_no_questionmark[:-5]
            aslanpp_cookie_questionmark += cookie_questionmark[:-5]

            # save the mapping cookies
            concrete['cookies'] = cookie_mapping
        except KeyError:
            aslanpp_cookie_no_questionmark = "none"
            aslanpp_cookie_questionmark = "none"
            pass

        # check the response
        response_parser = HttpParser()
        response_parser.execute(http_response, len(http_response))

        # Location
        # get the returned page by checking the Location field in
        # the header. If Location is set, it means is a 302 Redirect
        # and the client is receiving a different page back in the response
        try:
            location = response_parser.get_headers()['Location']
            # prepend the letter p since in ASLan++ constants should start with a char
            return_page = "p{}".format(
                urlparse(location).path.partition("?")[0].replace(
                    ".", "_").replace("/", "_"))
            model._page_constants.add(return_page)
        except KeyError:
            return_page = page

        # parse cookie in the response
        try:
            set_cookie_header = response_parser.get_headers()['Set-Cookie']
            # parse new cookie
            simple_cookie = Cookie.SimpleCookie(set_cookie_header)
            cookies = [[item, simple_cookie[item].value]
                       for item in simple_cookie]

            # parse the parameters and retrieve ASLan++ code, constants, variables and mapping
            aslanpp_cookie2_no_questionmark, aslanpp_cookie2_questionmark, cookie2_mapping = _parse_parameters(
                model, cookies)
            aslanpp_cookie2_no_questionmark += cookie_no_questionmark[:-5]
            aslanpp_cookie2_questionmark += cookie_questionmark[:-5]

            # save the mapping cookies
            concrete['cookies'] = cookie2_mapping

        except KeyError:
            aslanpp_cookie2_no_questionmark = "none"
            aslanpp_cookie2_questionmark = "non"
            pass

        model._webapp_branch += request_skeleton.format(
            page, aslanpp_params_questionmark, aslanpp_cookie_questionmark,
            returntag, return_page, "none", aslanpp_cookie2_no_questionmark,
            returntag)

        model._client_branch += client_skeleton.format(
            page, aslanpp_params_no_questionmark,
            aslanpp_cookie_no_questionmark, returntag, return_page, returntag)

        model._concretization[returntag] = concrete

        # save tag in taglist and increment the tag number
        model._taglist.add(returntag)

        # increate tag
        i_tag += 1

        return returntag
    except Exception as e:
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        print(exc_type, fname, exc_tb.tb_lineno)