Exemple #1
0
    def test_invalid_indexed_header(self):
        d = Decoder()

        # Refer to an indexed header that is too large.
        data = b'\xBE\x86\x84\x01\x0fwww.example.com'
        with pytest.raises(InvalidTableIndex):
            d.decode(data)
Exemple #2
0
    def test_invalid_indexed_literal(self):
        d = Decoder()

        # Refer to an index that is too large.
        data = b'\x82\x86\x84\x7f\x0a\x0fwww.example.com'
        with pytest.raises(InvalidTableIndex):
            d.decode(data)
Exemple #3
0
    def test_invalid_indexed_header(self):
        d = Decoder()

        # Refer to an indexed header that is too large.
        data = b'\xBE\x86\x84\x01\x0fwww.example.com'
        with pytest.raises(InvalidTableIndex):
            d.decode(data)
Exemple #4
0
    def test_invalid_indexed_literal(self):
        d = Decoder()

        # Refer to an index that is too large.
        data = b'\x82\x86\x84\x7f\x0a\x0fwww.example.com'
        with pytest.raises(InvalidTableIndex):
            d.decode(data)
Exemple #5
0
    def test_utf8_errors_raise_hpack_decoding_error(self):
        d = Decoder()

        # Invalid UTF-8 data.
        data = b'\x82\x86\x84\x01\x10www.\x07\xaa\xd7\x95\xd7\xa8\xd7\x94.com'

        with pytest.raises(HPACKDecodingError):
            d.decode(data)
Exemple #6
0
    def test_utf8_errors_raise_hpack_decoding_error(self):
        d = Decoder()

        # Invalid UTF-8 data.
        data = b'\x82\x86\x84\x01\x10www.\x07\xaa\xd7\x95\xd7\xa8\xd7\x94.com'

        with pytest.raises(HPACKDecodingError):
            d.decode(data)
Exemple #7
0
 def test_zero_length_header(self):
     """
     If a header has a name of zero length it is invalid and the HPACK
     decoder raises a ZeroLengthHeaderNameError.
     """
     d = Decoder(max_header_list_size=44)
     data = b"@\x80\x80"
     with pytest.raises(ZeroLengthHeaderNameError):
         d.decode(data)
Exemple #8
0
    def test_max_header_list_size(self):
        """
        If the header block is larger than the max_header_list_size, the HPACK
        decoder throws an OversizedHeaderListError.
        """
        d = Decoder(max_header_list_size=44)
        data = b'\x14\x0c/sample/path'

        with pytest.raises(OversizedHeaderListError):
            d.decode(data)
Exemple #9
0
    def test_table_size_middle_rejected(self):
        """
        If a header table size change comes anywhere but first in the header
        block, it is forbidden.
        """
        d = Decoder()
        data = b'\x82?a\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'

        with pytest.raises(HPACKDecodingError):
            d.decode(data)
Exemple #10
0
    def test_max_header_list_size(self):
        """
        If the header block is larger than the max_header_list_size, the HPACK
        decoder throws an OversizedHeaderListError.
        """
        d = Decoder(max_header_list_size=44)
        data = b'\x14\x0c/sample/path'

        with pytest.raises(OversizedHeaderListError):
            d.decode(data)
Exemple #11
0
    def test_table_size_middle_rejected(self):
        """
        If a header table size change comes anywhere but first in the header
        block, it is forbidden.
        """
        d = Decoder()
        data = b'\x82?a\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'

        with pytest.raises(HPACKDecodingError):
            d.decode(data)
Exemple #12
0
    def test_table_size_not_adjusting(self):
        """
        If the header table size is shrunk, and then the remote peer doesn't
        join in the shrinking, then an error is raised.
        """
        d = Decoder()
        d.max_allowed_table_size = 128
        data = b'\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'

        with pytest.raises(InvalidTableSizeError):
            d.decode(data)
Exemple #13
0
    def test_table_size_not_adjusting(self):
        """
        If the header table size is shrunk, and then the remote peer doesn't
        join in the shrinking, then an error is raised.
        """
        d = Decoder()
        d.max_allowed_table_size = 128
        data = b'\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'

        with pytest.raises(InvalidTableSizeError):
            d.decode(data)
Exemple #14
0
    def test_header_table_size_change_above_maximum(self):
        """
        If a header table size change is received that exceeds the maximum
        allowed table size, it is rejected.
        """
        d = Decoder()
        d.max_allowed_table_size = 127
        data = b'?a\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'

        with pytest.raises(InvalidTableSizeError):
            d.decode(data)
Exemple #15
0
    def test_header_table_size_change_above_maximum(self):
        """
        If a header table size change is received that exceeds the maximum
        allowed table size, it is rejected.
        """
        d = Decoder()
        d.max_allowed_table_size = 127
        data = b'?a\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'

        with pytest.raises(InvalidTableSizeError):
            d.decode(data)
Exemple #16
0
    def test_truncated_header_name(self):
        """
        If a header name is truncated an error is raised.
        """
        d = Decoder()
        # This is a simple header block that has a bad ending. The interesting
        # part begins on the second line. This indicates a string that has
        # literal name and value. The name is a 5 character huffman-encoded
        # string that is only three bytes long.
        data = (b'\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
                b'\x00\x85\xf2\xb2J')

        with pytest.raises(HPACKDecodingError):
            d.decode(data)
Exemple #17
0
    def test_request_examples_with_huffman(self):
        """
        This section shows the same examples as the previous section, but
        using Huffman encoding for the literal values.
        """
        d = Decoder()

        first_header_set = [
            (':method', 'GET',),
            (':scheme', 'http',),
            (':path', '/',),
            (':authority', 'www.example.com'),
        ]
        first_header_table = first_header_set[::-1]
        first_data = (
            b'\x82\x86\x84\x01\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff'
        )

        assert d.decode(first_data) == first_header_set
        assert list(d.header_table.dynamic_entries) == []

        second_header_set = [
            (':method', 'GET',),
            (':scheme', 'http',),
            (':path', '/',),
            (':authority', 'www.example.com',),
            ('cache-control', 'no-cache'),
        ]
        second_data = (
            b'\x82\x86\x84\x01\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff'
            b'\x0f\t\x86\xa8\xeb\x10d\x9c\xbf'
        )

        assert d.decode(second_data) == second_header_set
        assert list(d.header_table.dynamic_entries) == []

        third_header_set = [
            (':method', 'GET',),
            (':scheme', 'https',),
            (':path', '/index.html',),
            (':authority', 'www.example.com',),
            ('custom-key', 'custom-value'),
        ]
        third_data = (
            b'\x82\x87\x85\x01\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff@'
            b'\x88%\xa8I\xe9[\xa9}\x7f\x89%\xa8I\xe9[\xb8\xe8\xb4\xbf'
        )

        assert d.decode(third_data) == third_header_set
        assert len(d.header_table.dynamic_entries) == 1
Exemple #18
0
    def test_request_examples_without_huffman(self):
        """
        This section shows several consecutive header sets, corresponding to
        HTTP requests, on the same connection.
        """
        d = Decoder()
        first_header_set = [
            (':method', 'GET',),
            (':scheme', 'http',),
            (':path', '/',),
            (':authority', 'www.example.com'),
        ]
        # The first_header_table doesn't contain 'authority'
        first_data = b'\x82\x86\x84\x01\x0fwww.example.com'

        assert d.decode(first_data) == first_header_set
        assert list(d.header_table.dynamic_entries) == []

        # This request takes advantage of the differential encoding of header
        # sets.
        second_header_set = [
            (':method', 'GET',),
            (':scheme', 'http',),
            (':path', '/',),
            (':authority', 'www.example.com',),
            ('cache-control', 'no-cache'),
        ]
        second_data = (
            b'\x82\x86\x84\x01\x0fwww.example.com\x0f\t\x08no-cache'
        )

        assert d.decode(second_data) == second_header_set
        assert list(d.header_table.dynamic_entries) == []

        third_header_set = [
            (':method', 'GET',),
            (':scheme', 'https',),
            (':path', '/index.html',),
            (':authority', 'www.example.com',),
            ('custom-key', 'custom-value'),
        ]
        third_data = (
            b'\x82\x87\x85\x01\x0fwww.example.com@\ncustom-key\x0ccustom-value'
        )

        assert d.decode(third_data) == third_header_set
        # Don't check the header table here, it's just too complex to be
        # reliable. Check its length though.
        assert len(d.header_table.dynamic_entries) == 1
Exemple #19
0
    def test_truncated_header_name(self):
        """
        If a header name is truncated an error is raised.
        """
        d = Decoder()
        # This is a simple header block that has a bad ending. The interesting
        # part begins on the second line. This indicates a string that has
        # literal name and value. The name is a 5 character huffman-encoded
        # string that is only three bytes long.
        data = (
            b'\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
            b'\x00\x85\xf2\xb2J'
        )

        with pytest.raises(HPACKDecodingError):
            d.decode(data)
Exemple #20
0
    def test_truncated_header_value(self):
        """
        If a header value is truncated an error is raised.
        """
        d = Decoder()
        # This is a simple header block that has a bad ending. The interesting
        # part begins on the second line. This indicates a string that has
        # literal name and value. The name is a 5 character huffman-encoded
        # string, but the entire EOS character has been written over the end.
        # This causes hpack to see the header value as being supposed to be
        # 622462 bytes long, which it clearly is not, and so this must fail.
        data = (b'\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
                b'\x00\x85\xf2\xb2J\x87\xff\xff\xff\xfd%B\x7f')

        with pytest.raises(HPACKDecodingError):
            d.decode(data)
Exemple #21
0
    def test_ordering_applies_to_encoding(self, special_keys, boring_keys):
        """
        When encoding a dictionary the special keys all appear first.
        """
        def _prepend_colon(k):
            if isinstance(k, unicode):
                return u':' + k
            else:
                return b':' + k

        special_keys = set(map(_prepend_colon, special_keys))
        input_dict = {
            k: b'testval'
            for k in itertools.chain(special_keys, boring_keys)
        }
        e = Encoder()
        d = Decoder()
        encoded = e.encode(input_dict)
        decoded = iter(d.decode(encoded, raw=True))

        received_special = set()
        received_boring = set()
        expected_special = set(map(_to_bytes, special_keys))
        expected_boring = set(map(_to_bytes, boring_keys))

        for _ in special_keys:
            k, _ = next(decoded)
            received_special.add(k)
        for _ in boring_keys:
            k, _ = next(decoded)
            received_boring.add(k)

        assert expected_special == received_special
        assert expected_boring == received_boring
    def test_can_decode_a_story(self, story):
        d = Decoder()

        # We test against draft 9 of the HPACK spec.
        if story['draft'] != 9:
            skip("We test against draft 9, not draft %d" % story['draft'])

        for case in story['cases']:
            try:
                d.header_table_size = case['header_table_size']
            except KeyError:
                pass
            decoded_headers = d.decode(unhexlify(case['wire']))

            # The correct headers are a list of dicts, which is annoying.
            correct_headers = [
                (item[0], item[1])
                for header in case['headers']
                for item in header.items()
            ]
            correct_headers = correct_headers
            assert correct_headers == decoded_headers
            assert all(
                isinstance(header, HeaderTuple) for header in decoded_headers
            )
Exemple #23
0
    def test_truncated_header_value(self):
        """
        If a header value is truncated an error is raised.
        """
        d = Decoder()
        # This is a simple header block that has a bad ending. The interesting
        # part begins on the second line. This indicates a string that has
        # literal name and value. The name is a 5 character huffman-encoded
        # string, but the entire EOS character has been written over the end.
        # This causes hpack to see the header value as being supposed to be
        # 622462 bytes long, which it clearly is not, and so this must fail.
        data = (
            b'\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
            b'\x00\x85\xf2\xb2J\x87\xff\xff\xff\xfd%B\x7f'
        )

        with pytest.raises(HPACKDecodingError):
            d.decode(data)
Exemple #24
0
    def test_raw_decoding(self):
        """
        The header field representation is decoded as a raw byte string instead
        of UTF-8
        """
        d = Decoder()
        header_set = [(b'\x00\x01\x99\x30\x11\x22\x55\x21\x89\x14', b'custom-header')]
        data = b'\x40\x0a\x00\x01\x99\x30\x11\x22\x55\x21\x89\x14\x0dcustom-header'

        assert d.decode(data, raw=True) == header_set
Exemple #25
0
    def test_indexed_header_field(self):
        """
        The header field representation uses an indexed header field, from
        the static table.
        """
        d = Decoder()
        header_set = [(':method', 'GET')]
        data = b'\x82'

        assert d.decode(data) == header_set
        assert list(d.header_table.dynamic_entries) == []
Exemple #26
0
    def test_literal_header_field_without_indexing(self):
        """
        The header field representation uses an indexed name and a literal
        value.
        """
        d = Decoder()
        header_set = [(':path', '/sample/path')]
        data = b'\x04\x0c/sample/path'

        assert d.decode(data) == header_set
        assert list(d.header_table.dynamic_entries) == []
Exemple #27
0
    def test_can_decode_multiple_header_table_size_changes(self):
        """
        If multiple header table size changes are sent in at once, they are
        successfully decoded.
        """
        d = Decoder()
        data = b'?a?\xe1\x1f\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
        expect = [(':method', 'GET'), (':scheme', 'https'), (':path', '/'),
                  (':authority', '127.0.0.1:8443')]

        assert d.decode(data) == expect
Exemple #28
0
    def test_resizing_header_table(self):
        # We need to decode a substantial number of headers, to populate the
        # header table. This string isn't magic: it's the output from the
        # equivalent test for the Encoder.
        d = Decoder()
        data = (
            b'\x82\x87D\x87a\x07\xa4\xacV4\xcfA\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0'
            b'\xab\x90\xf4\xff@\x88%\xa8I\xe9[\xa9}\x7f\x89%\xa8I\xe9[\xb8\xe8'
            b'\xb4\xbfz\xbc\xd0\x7ff\xa2\x81\xb0\xda\xe0S\xfa\xd02\x1a\xa4\x9d'
            b'\x13\xfd\xa9\x92\xa4\x96\x854\x0c\x8aj\xdc\xa7\xe2\x81\x02\xef}'
            b'\xa9g{\x81qp\x7fjb):\x9d\x81\x00 \x00@\x150\x9a\xc2\xca\x7f,\x05'
            b'\xc5\xc1S\xb0I|\xa5\x89\xd3M\x1fC\xae\xba\x0cA\xa4\xc7\xa9\x8f3'
            b'\xa6\x9a?\xdf\x9ah\xfa\x1du\xd0b\r&=Ly\xa6\x8f\xbe\xd0\x01w\xfe'
            b'\xbeX\xf9\xfb\xed\x00\x17{@\x8a\xfc[=\xbdF\x81\xad\xbc\xa8O\x84y'
            b'\xe7\xde\x7f')
        d.decode(data)

        # Resize the header table to a size so small that nothing can be in it.
        d.header_table_size = 40
        assert len(d.header_table.dynamic_entries) == 0
Exemple #29
0
    def test_literal_header_field_without_indexing(self):
        """
        The header field representation uses an indexed name and a literal
        value.
        """
        d = Decoder()
        header_set = [(':path', '/sample/path')]
        data = b'\x04\x0c/sample/path'

        assert d.decode(data) == header_set
        assert list(d.header_table.dynamic_entries) == []
Exemple #30
0
    def test_indexed_header_field(self):
        """
        The header field representation uses an indexed header field, from
        the static table.
        """
        d = Decoder()
        header_set = [(':method', 'GET')]
        data = b'\x82'

        assert d.decode(data) == header_set
        assert list(d.header_table.dynamic_entries) == []
Exemple #31
0
    def test_resizing_header_table(self):
        # We need to decode a substantial number of headers, to populate the
        # header table. This string isn't magic: it's the output from the
        # equivalent test for the Encoder.
        d = Decoder()
        data = (
            b'\x82\x87D\x87a\x07\xa4\xacV4\xcfA\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0'
            b'\xab\x90\xf4\xff@\x88%\xa8I\xe9[\xa9}\x7f\x89%\xa8I\xe9[\xb8\xe8'
            b'\xb4\xbfz\xbc\xd0\x7ff\xa2\x81\xb0\xda\xe0S\xfa\xd02\x1a\xa4\x9d'
            b'\x13\xfd\xa9\x92\xa4\x96\x854\x0c\x8aj\xdc\xa7\xe2\x81\x02\xef}'
            b'\xa9g{\x81qp\x7fjb):\x9d\x81\x00 \x00@\x150\x9a\xc2\xca\x7f,\x05'
            b'\xc5\xc1S\xb0I|\xa5\x89\xd3M\x1fC\xae\xba\x0cA\xa4\xc7\xa9\x8f3'
            b'\xa6\x9a?\xdf\x9ah\xfa\x1du\xd0b\r&=Ly\xa6\x8f\xbe\xd0\x01w\xfe'
            b'\xbeX\xf9\xfb\xed\x00\x17{@\x8a\xfc[=\xbdF\x81\xad\xbc\xa8O\x84y'
            b'\xe7\xde\x7f'
        )
        d.decode(data)

        # Resize the header table to a size so small that nothing can be in it.
        d.header_table_size = 40
        assert len(d.header_table.dynamic_entries) == 0
Exemple #32
0
    def test_literal_header_field_with_indexing(self):
        """
        The header field representation uses a literal name and a literal
        value.
        """
        d = Decoder()
        header_set = [('custom-key', 'custom-header')]
        data = b'\x40\x0acustom-key\x0dcustom-header'

        assert d.decode(data) == header_set
        assert list(d.header_table) == [(n.encode('utf-8'), v.encode('utf-8'))
                                        for n, v in header_set]
Exemple #33
0
    def test_raw_decoding(self):
        """
        The header field representation is decoded as a raw byte string instead
        of UTF-8
        """
        d = Decoder()
        header_set = [(b'\x00\x01\x99\x30\x11\x22\x55\x21\x89\x14',
                       b'custom-header')]
        data = (b'\x40\x0a\x00\x01\x99\x30\x11\x22\x55\x21\x89\x14\x0d'
                b'custom-header')

        assert d.decode(data, raw=True) == header_set
    def test_can_encode_a_story_with_huffman(self, raw_story):
        d = Decoder()
        e = Encoder()

        for case in raw_story['cases']:
            # The input headers are a list of dicts, which is annoying.
            input_headers = [(item[0], item[1]) for header in case['headers'] for item in header.items()]

            encoded = e.encode(input_headers, huffman=True)
            decoded_headers = d.decode(encoded)

            assert input_headers == decoded_headers
Exemple #35
0
    def test_resizing_header_table(self):
        # We need to decode a substantial number of headers, to populate the
        # header table. This string isn't magic: it's the output from the
        # equivalent test for the Encoder.
        d = Decoder()
        data = (
            b'\x82\x88F\x87\x087A\x07"9\xffC\x8b\xdbm\x88>h\xd1\xcb\x12%' +
            b'\xba\x7f\x00\x88N\xb0\x8bt\x97\x90\xfa\x7f\x89N\xb0\x8bt\x97\x9a' +
            b'\x17\xa8\xff|\xbe\xefo\xaa\x96\xb4\x05\x04/G\xfa\xefBT\xc8\xb6' +
            b'\x19\xf5t|\x19\x11_Gz\x13\xd1\xf4\xf0\xe8\xfd\xf4\x18\xa4\xaf' +
            b'\xab\xa1\xfc\xfd\x86\xa4\x85\xff}\x1e\xe1O&\x81\xcab\x94\xc57G' +
            b'\x05<qo\x98\x1a\x92\x17U\xaf\x88\xf9\xc43\x8e\x8b\xe9C\x9c\xb5' +
            b'%\x11SX\x1ey\xc7E\xff\xcf=\x17\xd2\x879jJ"\xa6\xb0<\xf4_W\x95' +
            b'\xa5%\x9d?\xd0\x7f]^V\x94\x95\xff\x00\x8a\xfd\xcb\xf2\xd7\x92 ' +
            b'\x89|F\x11\x84\xae\xbb+\xb3'
        )
        d.decode(data)

        # Resize the header table to a size so small that nothing can be in it.
        d.header_table_size = 40
        assert len(d.header_table.dynamic_entries) == 0
Exemple #36
0
    def test_resizing_header_table(self):
        # We need to decode a substantial number of headers, to populate the
        # header table. This string isn't magic: it's the output from the
        # equivalent test for the Encoder.
        d = Decoder()
        data = (
            b'\x82\x88F\x87\x087A\x07"9\xffC\x8b\xdbm\x88>h\xd1\xcb\x12%' +
            b'\xba\x7f\x00\x88N\xb0\x8bt\x97\x90\xfa\x7f\x89N\xb0\x8bt\x97\x9a'
            +
            b'\x17\xa8\xff|\xbe\xefo\xaa\x96\xb4\x05\x04/G\xfa\xefBT\xc8\xb6' +
            b'\x19\xf5t|\x19\x11_Gz\x13\xd1\xf4\xf0\xe8\xfd\xf4\x18\xa4\xaf' +
            b'\xab\xa1\xfc\xfd\x86\xa4\x85\xff}\x1e\xe1O&\x81\xcab\x94\xc57G' +
            b'\x05<qo\x98\x1a\x92\x17U\xaf\x88\xf9\xc43\x8e\x8b\xe9C\x9c\xb5' +
            b'%\x11SX\x1ey\xc7E\xff\xcf=\x17\xd2\x879jJ"\xa6\xb0<\xf4_W\x95' +
            b'\xa5%\x9d?\xd0\x7f]^V\x94\x95\xff\x00\x8a\xfd\xcb\xf2\xd7\x92 ' +
            b'\x89|F\x11\x84\xae\xbb+\xb3')
        d.decode(data)

        # Resize the header table to a size so small that nothing can be in it.
        d.header_table_size = 40
        assert len(d.header_table) == 0
Exemple #37
0
    def test_literal_header_field_with_indexing_emits_headertuple(self):
        """
        A header field with indexing emits a HeaderTuple.
        """
        d = Decoder()
        data = b'\x00\x0acustom-key\x0dcustom-header'

        headers = d.decode(data)
        assert len(headers) == 1

        header = headers[0]
        assert isinstance(header, HeaderTuple)
        assert not isinstance(header, NeverIndexedHeaderTuple)
Exemple #38
0
    def test_literal_never_indexed_emits_neverindexedheadertuple(self):
        """
        A literal header field that must never be indexed emits a
        NeverIndexedHeaderTuple.
        """
        d = Decoder()
        data = b'\x10\x0acustom-key\x0dcustom-header'

        headers = d.decode(data)
        assert len(headers) == 1

        header = headers[0]
        assert isinstance(header, NeverIndexedHeaderTuple)
Exemple #39
0
    def test_indexed_never_indexed_emits_neverindexedheadertuple(self):
        """
        A header field with an indexed name that must never be indexed emits a
        NeverIndexedHeaderTuple.
        """
        d = Decoder()
        data = b'\x14\x0c/sample/path'

        headers = d.decode(data)
        assert len(headers) == 1

        header = headers[0]
        assert isinstance(header, NeverIndexedHeaderTuple)
Exemple #40
0
    def test_literal_header_field_with_indexing(self):
        """
        The header field representation uses a literal name and a literal
        value.
        """
        d = Decoder()
        header_set = [('custom-key', 'custom-header')]
        data = b'\x40\x0acustom-key\x0dcustom-header'

        assert d.decode(data) == header_set
        assert list(d.header_table.dynamic_entries) == [
            (n.encode('utf-8'), v.encode('utf-8')) for n, v in header_set
        ]
Exemple #41
0
    def test_can_encode_a_story_with_huffman(self, raw_story):
        d = Decoder()
        e = Encoder()

        for case in raw_story['cases']:
            # The input headers are a list of dicts, which is annoying.
            input_headers = [(item[0], item[1]) for header in case['headers']
                             for item in header.items()]

            encoded = e.encode(input_headers, huffman=True)
            decoded_headers = d.decode(encoded)

            assert input_headers == decoded_headers
Exemple #42
0
    def test_indexed_never_indexed_emits_neverindexedheadertuple(self):
        """
        A header field with an indexed name that must never be indexed emits a
        NeverIndexedHeaderTuple.
        """
        d = Decoder()
        data = b'\x14\x0c/sample/path'

        headers = d.decode(data)
        assert len(headers) == 1

        header = headers[0]
        assert isinstance(header, NeverIndexedHeaderTuple)
Exemple #43
0
    def test_literal_never_indexed_emits_neverindexedheadertuple(self):
        """
        A literal header field that must never be indexed emits a
        NeverIndexedHeaderTuple.
        """
        d = Decoder()
        data = b'\x10\x0acustom-key\x0dcustom-header'

        headers = d.decode(data)
        assert len(headers) == 1

        header = headers[0]
        assert isinstance(header, NeverIndexedHeaderTuple)
Exemple #44
0
    def test_literal_header_field_with_indexing_emits_headertuple(self):
        """
        A header field with indexing emits a HeaderTuple.
        """
        d = Decoder()
        data = b'\x00\x0acustom-key\x0dcustom-header'

        headers = d.decode(data)
        assert len(headers) == 1

        header = headers[0]
        assert isinstance(header, HeaderTuple)
        assert not isinstance(header, NeverIndexedHeaderTuple)
Exemple #45
0
    def test_can_decode_multiple_header_table_size_changes(self):
        """
        If multiple header table size changes are sent in at once, they are
        successfully decoded.
        """
        d = Decoder()
        data = b'?a?\xe1\x1f\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
        expect = [
            (':method', 'GET'),
            (':scheme', 'https'),
            (':path', '/'),
            (':authority', '127.0.0.1:8443')
        ]

        assert d.decode(data) == expect
Exemple #46
0
    def test_can_encode_a_story_no_huffman(self, raw_story):
        d = Decoder()
        e = Encoder()

        for case in raw_story['cases']:
            # The input headers are a list of dicts, which is annoying.
            input_headers = [
                (item[0], item[1])
                for header in case['headers']
                for item in header.items()
            ]

            encoded = e.encode(input_headers, huffman=False)
            decoded_headers = d.decode(encoded)

            assert input_headers == decoded_headers
            assert all(
                isinstance(header, HeaderTuple) for header in decoded_headers
            )
Exemple #47
0
    def test_apache_trafficserver(self):
        # This test reproduces the bug in #110, using exactly the same header
        # data.
        d = Decoder()
        data = (
            b'\x10\x07:status\x03200@\x06server\tATS/6.0.0'
            b'@\x04date\x1dTue, 31 Mar 2015 08:09:51 GMT'
            b'@\x0ccontent-type\ttext/html@\x0econtent-length\x0542468'
            b'@\rlast-modified\x1dTue, 31 Mar 2015 01:55:51 GMT'
            b'@\x04vary\x0fAccept-Encoding@\x04etag\x0f"5519fea7-a5e4"'
            b'@\x08x-served\x05Nginx@\x14x-subdomain-tryfiles\x04True'
            b'@\x07x-deity\thydra-lts@\raccept-ranges\x05bytes@\x03age\x010'
            b'@\x19strict-transport-security\rmax-age=86400'
            b'@\x03via2https/1.1 ATS (ApacheTrafficServer/6.0.0 [cSsNfU])'
        )
        expect = [
            (':status', '200'),
            ('server', 'ATS/6.0.0'),
            ('date', 'Tue, 31 Mar 2015 08:09:51 GMT'),
            ('content-type', 'text/html'),
            ('content-length', '42468'),
            ('last-modified', 'Tue, 31 Mar 2015 01:55:51 GMT'),
            ('vary', 'Accept-Encoding'),
            ('etag', '"5519fea7-a5e4"'),
            ('x-served', 'Nginx'),
            ('x-subdomain-tryfiles', 'True'),
            ('x-deity', 'hydra-lts'),
            ('accept-ranges', 'bytes'),
            ('age', '0'),
            ('strict-transport-security', 'max-age=86400'),
            ('via', 'https/1.1 ATS (ApacheTrafficServer/6.0.0 [cSsNfU])'),
        ]

        result = d.decode(data)

        assert result == expect
        # The status header shouldn't be indexed.
        assert len(d.header_table.dynamic_entries) == len(expect) - 1
Exemple #48
0
    def test_apache_trafficserver(self):
        # This test reproduces the bug in #110, using exactly the same header
        # data.
        d = Decoder()
        data = (
            b'\x10\x07:status\x03200@\x06server\tATS/6.0.0'
            b'@\x04date\x1dTue, 31 Mar 2015 08:09:51 GMT'
            b'@\x0ccontent-type\ttext/html@\x0econtent-length\x0542468'
            b'@\rlast-modified\x1dTue, 31 Mar 2015 01:55:51 GMT'
            b'@\x04vary\x0fAccept-Encoding@\x04etag\x0f"5519fea7-a5e4"'
            b'@\x08x-served\x05Nginx@\x14x-subdomain-tryfiles\x04True'
            b'@\x07x-deity\thydra-lts@\raccept-ranges\x05bytes@\x03age\x010'
            b'@\x19strict-transport-security\rmax-age=86400'
            b'@\x03via2https/1.1 ATS (ApacheTrafficServer/6.0.0 [cSsNfU])')
        expect = [
            (':status', '200'),
            ('server', 'ATS/6.0.0'),
            ('date', 'Tue, 31 Mar 2015 08:09:51 GMT'),
            ('content-type', 'text/html'),
            ('content-length', '42468'),
            ('last-modified', 'Tue, 31 Mar 2015 01:55:51 GMT'),
            ('vary', 'Accept-Encoding'),
            ('etag', '"5519fea7-a5e4"'),
            ('x-served', 'Nginx'),
            ('x-subdomain-tryfiles', 'True'),
            ('x-deity', 'hydra-lts'),
            ('accept-ranges', 'bytes'),
            ('age', '0'),
            ('strict-transport-security', 'max-age=86400'),
            ('via', 'https/1.1 ATS (ApacheTrafficServer/6.0.0 [cSsNfU])'),
        ]

        result = d.decode(data)

        assert result == expect
        # The status header shouldn't be indexed.
        assert len(d.header_table.dynamic_entries) == len(expect) - 1
Exemple #49
0
    def test_ordering_applies_to_encoding(self, special_keys, boring_keys):
        """
        When encoding a dictionary the special keys all appear first.
        """
        def _prepend_colon(k):
            if isinstance(k, unicode):
                return u':' + k
            else:
                return b':' + k

        special_keys = set(map(_prepend_colon, special_keys))
        input_dict = {
            k: b'testval' for k in itertools.chain(
                special_keys,
                boring_keys
            )
        }
        e = Encoder()
        d = Decoder()
        encoded = e.encode(input_dict)
        decoded = iter(d.decode(encoded, raw=True))

        received_special = set()
        received_boring = set()
        expected_special = set(map(_to_bytes, special_keys))
        expected_boring = set(map(_to_bytes, boring_keys))

        for _ in special_keys:
            k, _ = next(decoded)
            received_special.add(k)
        for _ in boring_keys:
            k, _ = next(decoded)
            received_boring.add(k)

        assert expected_special == received_special
        assert expected_boring == received_boring
Exemple #50
0
class H2Connection(object):
    """
    A low-level HTTP/2 connection object. This handles building and receiving
    frames and maintains both connection and per-stream state for all streams
    on this connection.

    This wraps a HTTP/2 Connection state machine implementation, ensuring that
    frames can only be sent/received when the connection is in a valid state.
    It also builds stream state machines on demand to ensure that the
    constraints of those state machines are met as well. Attempts to create
    frames that cannot be sent will raise a ``ProtocolError``.

    :param client_side: Whether this object is to be used on the client side of
        a connection, or on the server side. Affects the logic used by the
        state machine, the default settings values, the allowable stream IDs,
        and several other properties. Defaults to ``True``.
    :type client_side: ``bool``
    """
    # The initial maximum outbound frame size. This can be changed by receiving
    # a settings frame.
    DEFAULT_MAX_OUTBOUND_FRAME_SIZE = 65535

    # The initial maximum inbound frame size. This is somewhat arbitrarily
    # chosen.
    DEFAULT_MAX_INBOUND_FRAME_SIZE = 2**24

    # The highest acceptable stream ID.
    HIGHEST_ALLOWED_STREAM_ID = 2**31 - 1

    # The largest acceptable window increment.
    MAX_WINDOW_INCREMENT = 2**31 - 1

    def __init__(self, client_side=True):
        self.state_machine = H2ConnectionStateMachine()
        self.streams = {}
        self.highest_inbound_stream_id = 0
        self.highest_outbound_stream_id = 0
        self.encoder = Encoder()
        self.decoder = Decoder()
        self.client_side = client_side

        # Objects that store settings, including defaults.
        self.local_settings = Settings(client=client_side)
        self.remote_settings = Settings(client=not client_side)

        # The curent value of the connection flow control windows on the
        # connection.
        self.outbound_flow_control_window = (
            self.remote_settings.initial_window_size)
        self.inbound_flow_control_window = (
            self.local_settings.initial_window_size)

        #: The maximum size of a frame that can be emitted by this peer, in
        #: bytes.
        self.max_outbound_frame_size = self.remote_settings.max_frame_size

        #: The maximum size of a frame that can be received by this peer, in
        #: bytes.
        self.max_inbound_frame_size = self.local_settings.max_frame_size

        # Buffer for incoming data.
        self.incoming_buffer = FrameBuffer(server=not client_side)

        # A private variable to store a sequence of received header frames
        # until completion.
        self._header_frames = []

        # Data that needs to be sent.
        self._data_to_send = b''

        # When in doubt use dict-dispatch.
        self._frame_dispatch_table = {
            HeadersFrame: self._receive_headers_frame,
            PushPromiseFrame: self._receive_push_promise_frame,
            SettingsFrame: self._receive_settings_frame,
            DataFrame: self._receive_data_frame,
            WindowUpdateFrame: self._receive_window_update_frame,
            PingFrame: self._receive_ping_frame,
            RstStreamFrame: self._receive_rst_stream_frame,
            PriorityFrame: self._receive_priority_frame,
            GoAwayFrame: self._receive_goaway_frame,
            ContinuationFrame: self._receive_naked_continuation,
        }

    def _prepare_for_sending(self, frames):
        if not frames:
            return
        self._data_to_send += b''.join(f.serialize() for f in frames)
        assert all(f.body_len <= self.max_outbound_frame_size for f in frames)

    def _open_streams(self, remainder):
        """
        A common method of counting number of open streams. Returns the number
        of streams that are open *and* that have (stream ID % 2) == remainder.
        While it iterates, also deletes any closed streams.
        """
        count = 0
        to_delete = []

        for stream_id, stream in self.streams.items():
            if stream.open and (stream_id % 2 == remainder):
                count += 1
            elif stream.closed:
                to_delete.append(stream_id)

        for stream_id in to_delete:
            del self.streams[stream_id]

        return count

    @property
    def open_outbound_streams(self):
        """
        The current number of open outbound streams.
        """
        outbound_numbers = int(self.client_side)
        return self._open_streams(outbound_numbers)

    @property
    def open_inbound_streams(self):
        """
        The current number of open inbound streams.
        """
        inbound_numbers = int(not self.client_side)
        return self._open_streams(inbound_numbers)

    def _begin_new_stream(self, stream_id, allowed_ids):
        """
        Initiate a new stream.

        .. versionchanged:: 2.0.0
           Removed this function from the public API.

        :param stream_id: The ID of the stream to open.
        :param allowed_ids: What kind of stream ID is allowed.
        """
        outbound = self._stream_id_is_outbound(stream_id)
        highest_stream_id = (self.highest_outbound_stream_id
                             if outbound else self.highest_inbound_stream_id)

        if stream_id <= highest_stream_id:
            raise StreamIDTooLowError(stream_id, highest_stream_id)

        if allowed_ids != AllowedStreamIDs.ANY:
            if (stream_id % 2) != int(allowed_ids):
                raise ProtocolError("Invalid stream ID for peer.")

        s = H2Stream(stream_id)
        s.max_inbound_frame_size = self.max_inbound_frame_size
        s.max_outbound_frame_size = self.max_outbound_frame_size
        s.outbound_flow_control_window = (
            self.remote_settings.initial_window_size)
        s.inbound_flow_control_window = self.local_settings.initial_window_size

        self.streams[stream_id] = s

        if outbound:
            self.highest_outbound_stream_id = stream_id
        else:
            self.highest_inbound_stream_id = stream_id

        return s

    def initiate_connection(self):
        """
        Provides any data that needs to be sent at the start of the connection.
        Must be called for both clients and servers.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
        if self.client_side:
            preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
        else:
            preamble = b''

        f = SettingsFrame(0)
        for setting, value in self.local_settings.items():
            f.settings[setting] = value

        self._data_to_send += preamble + f.serialize()

    def _get_or_create_stream(self, stream_id, allowed_ids):
        """
        Gets a stream by its stream ID. Will create one if one does not already
        exist. Use allowed_ids to circumvent the usual stream ID rules for
        clients and servers.

        .. versionchanged:: 2.0.0
           Removed this function from the public API.
        """
        try:
            return self.streams[stream_id]
        except KeyError:
            return self._begin_new_stream(stream_id, allowed_ids)

    def _get_stream_by_id(self, stream_id):
        """
        Gets a stream by its stream ID. Raises NoSuchStreamError if the stream
        ID does not correspond to a known stream and is higher than the current
        maximum: raises if it is lower than the current maximum.

        .. versionchanged:: 2.0.0
           Removed this function from the public API.
        """
        try:
            return self.streams[stream_id]
        except KeyError:
            outbound = self._stream_id_is_outbound(stream_id)
            highest_stream_id = (self.highest_outbound_stream_id if outbound
                                 else self.highest_inbound_stream_id)

            if stream_id > highest_stream_id:
                raise NoSuchStreamError(stream_id)
            else:
                raise StreamClosedError(stream_id)

    def get_next_available_stream_id(self):
        """
        Returns an integer suitable for use as the stream ID for the next
        stream created by this endpoint. For server endpoints, this stream ID
        will be even. For client endpoints, this stream ID will be odd. If no
        stream IDs are available, raises :class:`NoAvailableStreamIDError
        <h2.exceptions.NoAvailableStreamIDError>`.

        .. warning:: The return value from this function does not change until
                     the stream ID has actually been used by sending or pushing
                     headers on that stream. For that reason, it should be
                     called as close as possible to the actual use of the
                     stream ID.

        .. versionadded:: 2.0.0

        :raises: :class:`NoAvailableStreamIDError
            <h2.exceptions.NoAvailableStreamIDError>`
        :returns: The next free stream ID this peer can use to initiate a
            stream.
        :rtype: ``int``
        """
        # No streams have been opened yet, so return the lowest allowed stream
        # ID.
        if not self.highest_outbound_stream_id:
            return 1 if self.client_side else 2

        next_stream_id = self.highest_outbound_stream_id + 2
        if next_stream_id > self.HIGHEST_ALLOWED_STREAM_ID:
            raise NoAvailableStreamIDError("Exhausted allowed stream IDs")

        return next_stream_id

    def send_headers(self, stream_id, headers, end_stream=False):
        """
        Send headers on a given stream.

        This function can be used to send request or response headers: the kind
        that are sent depends on whether this connection has been opened as a
        client or server connection, and whether the stream was opened by the
        remote peer or not.

        If this is a client connection, calling ``send_headers`` will send the
        headers as a request. It will also implicitly open the stream being
        used. If this is a client connection and ``send_headers`` has *already*
        been called, this will send trailers instead.

        If this is a server connection, calling ``send_headers`` will send the
        headers as a response. It is a protocol error for a server to open a
        stream by sending headers. If this is a server connection and
        ``send_headers`` has *already* been called, this will send trailers
        instead.

        In all situations it is a protocol error to call ``send_headers`` more
        than twice.

        :param stream_id: The stream ID to send the headers on. If this stream
            does not currently exist, it will be created.
        :type stream_id: ``int``
        :param headers: The request/response headers to send.
        :type headers: An iterable of two tuples of bytestrings.
        :returns: Nothing
        """
        # Check we can open the stream.
        if stream_id not in self.streams:
            max_open_streams = self.remote_settings.max_concurrent_streams
            if (self.open_outbound_streams + 1) > max_open_streams:
                raise TooManyStreamsError(
                    "Max outbound streams is %d, %d open" %
                    (max_open_streams, self.open_outbound_streams))

        self.state_machine.process_input(ConnectionInputs.SEND_HEADERS)
        stream = self._get_or_create_stream(stream_id,
                                            AllowedStreamIDs(self.client_side))
        frames = stream.send_headers(headers, self.encoder, end_stream)
        self._prepare_for_sending(frames)

    def send_data(self, stream_id, data, end_stream=False):
        """
        Send data on a given stream.

        This method does no breaking up of data: if the data is larger than the
        value returned by :meth:`local_flow_control_window
        <h2.connection.H2Connection.local_flow_control_window>` for this stream
        then a :class:`FlowControlError <h2.exceptions.FlowControlError>` will
        be raised. If the data is larger than :data:`max_outbound_frame_size
        <h2.connection.H2Connection.max_outbound_frame_size>` then a
        :class:`FrameTooLargeError <h2.exceptions.FrameTooLargeError>` will be
        raised.

        Hyper-h2 does this to avoid buffering the data internally. If the user
        has more data to send than hyper-h2 will allow, consider breaking it up
        and buffering it externally.

        :param stream_id: The ID of the stream on which to send the data.
        :type stream_id: ``int``
        :param data: The data to send on the stream.
        :type data: ``bytes``
        :param end_stream: (optional) Whether this is the last data to be sent
            on the stream. Defaults to ``False``.
        :type end_stream: ``bool``
        :returns: Nothing
        """
        if len(data) > self.local_flow_control_window(stream_id):
            raise FlowControlError(
                "Cannot send %d bytes, flow control window is %d." %
                (len(data), self.local_flow_control_window(stream_id)))
        elif len(data) > self.max_outbound_frame_size:
            raise FrameTooLargeError(
                "Cannot send frame size %d, max frame size is %d" %
                (len(data), self.max_outbound_frame_size))

        self.state_machine.process_input(ConnectionInputs.SEND_DATA)
        frames = self.streams[stream_id].send_data(data, end_stream)
        self._prepare_for_sending(frames)

        self.outbound_flow_control_window -= len(data)
        assert self.outbound_flow_control_window >= 0

    def end_stream(self, stream_id):
        """
        Cleanly end a given stream.

        This method ends a stream by sending an empty DATA frame on that stream
        with the ``END_STREAM`` flag set.

        :param stream_id: The ID of the stream to end.
        :type stream_id: ``int``
        :returns: Nothing
        """
        self.state_machine.process_input(ConnectionInputs.SEND_DATA)
        frames = self.streams[stream_id].end_stream()
        self._prepare_for_sending(frames)

    def increment_flow_control_window(self, increment, stream_id=None):
        """
        Increment a flow control window, optionally for a single stream. Allows
        the remote peer to send more data.

        .. versionchanged:: 2.0.0
           Rejects attempts to increment the flow control window by out of
           range values with a ``ValueError``.

        :param increment: The amount ot increment the flow control window by.
        :type increment: ``int``
        :param stream_id: (optional) The ID of the stream that should have its
            flow control window opened. If not present or ``None``, the
            connection flow control window will be opened instead.
        :type stream_id: ``int`` or ``None``
        :returns: Nothing
        :raises: ``ValueError``
        """
        if not (1 <= increment <= self.MAX_WINDOW_INCREMENT):
            raise ValueError(
                "Flow control increment must be between 1 and %d" %
                self.MAX_WINDOW_INCREMENT)

        self.state_machine.process_input(ConnectionInputs.SEND_WINDOW_UPDATE)

        if stream_id is not None:
            stream = self.streams[stream_id]
            frames = stream.increase_flow_control_window(increment)
            stream.inbound_flow_control_window = guard_increment_window(
                stream.inbound_flow_control_window, increment)
        else:
            f = WindowUpdateFrame(0)
            f.window_increment = increment
            self.inbound_flow_control_window = guard_increment_window(
                self.inbound_flow_control_window, increment)
            frames = [f]

        self._prepare_for_sending(frames)

    def push_stream(self, stream_id, promised_stream_id, request_headers):
        """
        Push a response to the client by sending a PUSH_PROMISE frame.

        :param stream_id: The ID of the stream that this push is a response to.
        :type stream_id: ``int``
        :param promised_stream_id: The ID of the stream that the pushed
            response will be sent on.
        :type promised_stream_id: ``int``
        :param request_headers: The headers of the request that the pushed
            response will be responding to.
        :type request_headers: An iterable of two tuples of bytestrings.
        :returns: Nothing
        """
        if not self.remote_settings.enable_push:
            raise ProtocolError("Remote peer has disabled stream push")

        self.state_machine.process_input(ConnectionInputs.SEND_PUSH_PROMISE)
        stream = self._get_stream_by_id(stream_id)

        # We need to prevent users pushing streams in response to streams that
        # they themselves have already pushed: see #163 and RFC 7540 § 6.6. The
        # easiest way to do that is to assert that the stream_id is not even:
        # this shortcut works because only servers can push and the state
        # machine will enforce this.
        if (stream_id % 2) == 0:
            raise ProtocolError("Cannot recursively push streams.")

        new_stream = self._begin_new_stream(promised_stream_id,
                                            AllowedStreamIDs.EVEN)
        self.streams[promised_stream_id] = new_stream

        frames = stream.push_stream_in_band(promised_stream_id,
                                            request_headers, self.encoder)
        new_frames = new_stream.locally_pushed()
        self._prepare_for_sending(frames + new_frames)

    def ping(self, opaque_data):
        """
        Send a PING frame.

        :param opaque_data: A bytestring of length 8 that will be sent in the
                            PING frame.
        :returns: Nothing
        """
        if not isinstance(opaque_data, bytes) or len(opaque_data) != 8:
            raise ValueError("Invalid value for ping data: %r" % opaque_data)

        self.state_machine.process_input(ConnectionInputs.SEND_PING)
        f = PingFrame(0)
        f.opaque_data = opaque_data
        self._prepare_for_sending([f])

    def reset_stream(self, stream_id, error_code=0):
        """
        Reset a stream.

        This method forcibly closes a stream by sending a RST_STREAM frame for
        a given stream. This is not a graceful closure. To gracefully end a
        stream, try the :meth:`end_stream
        <h2.connection.H2Connection.end_stream>` method.

        :param stream_id: The ID of the stream to reset.
        :type stream_id: ``int``
        :param error_code: (optional) The error code to use to reset the
            stream. Defaults to :data:`NO_ERROR <h2.errors.NO_ERROR>`.
        :type error_code: ``int``
        :returns: Nothing
        """
        self.state_machine.process_input(ConnectionInputs.SEND_RST_STREAM)
        stream = self._get_stream_by_id(stream_id)
        frames = stream.reset_stream(error_code)
        self._prepare_for_sending(frames)

    def close_connection(self, error_code=0):
        """
        Close a connection, emitting a GOAWAY frame.

        :param error_code: (optional) The error code to send in the GOAWAY
            frame.
        :returns: Nothing
        """
        self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY)

        f = GoAwayFrame(0)
        f.error_code = error_code
        f.last_stream_id = self.highest_inbound_stream_id
        self._prepare_for_sending([f])

    def update_settings(self, new_settings):
        """
        Update the local settings. This will prepare and emit the appropriate
        SETTINGS frame.

        :param new_settings: A dictionary of {setting: new value}
        """
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
        self.local_settings.update(new_settings)
        s = SettingsFrame(0)
        s.settings = new_settings
        self._prepare_for_sending([s])

    def local_flow_control_window(self, stream_id):
        """
        Returns the maximum amount of data that can be sent on stream
        ``stream_id``.

        This value will never be larger than the total data that can be sent on
        the connection: even if the given stream allows more data, the
        connection window provides a logical maximum to the amount of data that
        can be sent.

        The maximum data that can be sent in a single data frame on a stream
        is either this value, or the maximum frame size, whichever is
        *smaller*.

        :param stream_id: The ID of the stream whose flow control window is
            being queried.
        :type stream_id: ``int``
        :returns: The amount of data in bytes that can be sent on the stream
            before the flow control window is exhausted.
        :rtype: ``int``
        """
        stream = self._get_stream_by_id(stream_id)
        return min(self.outbound_flow_control_window,
                   stream.outbound_flow_control_window)

    def remote_flow_control_window(self, stream_id):
        """
        Returns the maximum amount of data the remote peer can send on stream
        ``stream_id``.

        This value will never be larger than the total data that can be sent on
        the connection: even if the given stream allows more data, the
        connection window provides a logical maximum to the amount of data that
        can be sent.

        The maximum data that can be sent in a single data frame on a stream
        is either this value, or the maximum frame size, whichever is
        *smaller*.

        :param stream_id: The ID of the stream whose flow control window is
            being queried.
        :type stream_id: ``int``
        :returns: The amount of data in bytes that can be received on the
            stream before the flow control window is exhausted.
        :rtype: ``int``
        """
        stream = self._get_stream_by_id(stream_id)
        return min(self.inbound_flow_control_window,
                   stream.inbound_flow_control_window)

    def data_to_send(self, amt=None):
        """
        Returns some data for sending out of the internal data buffer.

        This method is analagous to ``read`` on a file-like object, but it
        doesn't block. Instead, it returns as much data as the user asks for,
        or less if that much data is not available. It does not perform any
        I/O, and so uses a different name.

        :param amt: (optional) The maximum amount of data to return. If not
            set, or set to ``None``, will return as much data as possible.
        :type amt: ``int``
        :returns: A bytestring containing the data to send on the wire.
        :rtype: ``bytes``
        """
        if amt is None:
            data = self._data_to_send
            self._data_to_send = b''
            return data
        else:
            data = self._data_to_send[:amt]
            self._data_to_send = self._data_to_send[amt:]
            return data

    def clear_outbound_data_buffer(self):
        """
        Clears the outbound data buffer, such that if this call was immediately
        followed by a call to
        :meth:`data_to_send <h2.connection.H2Connection.data_to_send>`, that
        call would return no data.

        This method should not normally be used, but is made available to avoid
        exposing implementation details.
        """
        self._data_to_send = b''

    def _acknowledge_settings(self):
        """
        Acknowledge settings that have been received.

        .. versionchanged:: 2.0.0
           Removed from public API, removed useless ``event`` parameter, made
           automatic.

        :returns: Nothing
        """
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)

        changes = self.remote_settings.acknowledge()

        if INITIAL_WINDOW_SIZE in changes:
            setting = changes[INITIAL_WINDOW_SIZE]
            self._flow_control_change_from_settings(
                setting.original_value,
                setting.new_value,
            )

        # HEADER_TABLE_SIZE changes by the remote part affect our encoder: cf.
        # RFC 7540 Section 6.5.2.
        if HEADER_TABLE_SIZE in changes:
            setting = changes[HEADER_TABLE_SIZE]
            self.encoder.header_table_size = setting.new_value

        if MAX_FRAME_SIZE in changes:
            setting = changes[MAX_FRAME_SIZE]
            self.max_outbound_frame_size = setting.new_value
            for stream in self.streams.values():
                stream.max_outbound_frame_size = setting.new_value

        f = SettingsFrame(0)
        f.flags.add('ACK')
        return [f]

    def _flow_control_change_from_settings(self, old_value, new_value):
        """
        Update flow control windows in response to a change in the value of
        SETTINGS_INITIAL_WINDOW_SIZE.

        When this setting is changed, it automatically updates all flow control
        windows by the delta in the settings values. Note that it does not
        increment the *connection* flow control window, per section 6.9.2 of
        RFC 7540.
        """
        delta = new_value - old_value

        for stream in self.streams.values():
            stream.outbound_flow_control_window = guard_increment_window(
                stream.outbound_flow_control_window, delta)

    def _inbound_flow_control_change_from_settings(self, old_value, new_value):
        """
        Update remote flow control windows in response to a change in the value
        of SETTINGS_INITIAL_WINDOW_SIZE.

        When this setting is changed, it automatically updates all remote flow
        control windows by the delta in the settings values.
        """
        delta = new_value - old_value

        for stream in self.streams.values():
            stream.inbound_flow_control_window += delta

    def receive_data(self, data):
        """
        Pass some received HTTP/2 data to the connection for handling.

        :param data: The data received from the remote peer on the network.
        :type data: ``bytes``
        :returns: A list of events that the remote peer triggered by sending
            this data.
        """
        events = []
        self.incoming_buffer.add_data(data)
        self.incoming_buffer.max_frame_size = self.max_inbound_frame_size

        try:
            for frame in self.incoming_buffer:
                events.extend(self._receive_frame(frame))
        except InvalidPaddingError:
            self._terminate_connection(PROTOCOL_ERROR)
            raise ProtocolError("Received frame with invalid padding.")
        except ProtocolError as e:
            # For whatever reason, receiving the frame caused a protocol error.
            # We should prepare to emit a GoAway frame before throwing the
            # exception up further. No need for an event: the exception will
            # do fine.
            self._terminate_connection(e.error_code)
            raise

        return events

    def _receive_frame(self, frame):
        """
        Handle a frame received on the connection.

        .. versionchanged:: 2.0.0
           Removed from the public API.
        """
        try:
            # I don't love using __class__ here, maybe reconsider it.
            frames, events = self._frame_dispatch_table[frame.__class__](frame)
        except StreamClosedError as e:
            # We need to send a RST_STREAM frame on behalf of the stream.
            # The frame the stream wants to emit is already present in the
            # exception.
            # This does not require re-raising: it's an expected behaviour.
            f = RstStreamFrame(e.stream_id)
            f.error_code = e.error_code
            self._prepare_for_sending([f])
            events = e._events
        except KeyError as e:
            # We don't have a function for handling this frame. Let's call this
            # a PROTOCOL_ERROR and exit.
            raise UnsupportedFrameError("Unexpected frame: %s" % frame)
        else:
            self._prepare_for_sending(frames)

        return events

    def _terminate_connection(self, error_code):
        """
        Terminate the connection early. Used in error handling blocks to send
        GOAWAY frames.
        """
        f = GoAwayFrame(0)
        f.last_stream_id = self.highest_inbound_stream_id
        f.error_code = error_code
        self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY)
        self._prepare_for_sending([f])

    def _receive_headers_frame(self, frame):
        """
        Receive a headers frame on the connection.
        """
        # If necessary, check we can open the stream. Also validate that the
        # stream ID is valid.
        if frame.stream_id not in self.streams:
            max_open_streams = self.local_settings.max_concurrent_streams
            if (self.open_inbound_streams + 1) > max_open_streams:
                raise TooManyStreamsError(
                    "Max outbound streams is %d, %d open" %
                    (max_open_streams, self.open_outbound_streams))

        # Let's decode the headers.
        try:
            headers = self.decoder.decode(frame.data)
        except (HPACKError, IndexError, TypeError, UnicodeDecodeError) as e:
            # We should only need HPACKError here, but versions of HPACK
            # older than 2.1.0 throw all three others as well. For maximum
            # compatibility, catch all of them.
            raise ProtocolError("Error decoding header block: %s" % e)

        headers = validate_headers(headers)
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_HEADERS)
        stream = self._get_or_create_stream(
            frame.stream_id, AllowedStreamIDs(not self.client_side))
        frames, stream_events = stream.receive_headers(
            headers, 'END_STREAM' in frame.flags)

        if 'PRIORITY' in frame.flags:
            stream_events.extend(stream.priority_changed_remote(frame))

        return frames, events + stream_events

    def _receive_push_promise_frame(self, frame):
        """
        Receive a push-promise frame on the connection.
        """
        if not self.local_settings.enable_push:
            raise ProtocolError("Received pushed stream")

        pushed_headers = self.decoder.decode(frame.data)

        events = self.state_machine.process_input(
            ConnectionInputs.RECV_PUSH_PROMISE)
        stream = self._get_stream_by_id(frame.stream_id)

        # We need to prevent peers pushing streams in response to streams that
        # they themselves have already pushed: see #163 and RFC 7540 § 6.6. The
        # easiest way to do that is to assert that the stream_id is not even:
        # this shortcut works because only servers can push and the state
        # machine will enforce this.
        if (frame.stream_id % 2) == 0:
            raise ProtocolError("Cannot recursively push streams.")

        frames, stream_events = stream.receive_push_promise_in_band(
            frame.promised_stream_id,
            pushed_headers,
        )

        new_stream = self._begin_new_stream(frame.promised_stream_id,
                                            AllowedStreamIDs.EVEN)
        self.streams[frame.promised_stream_id] = new_stream
        new_stream.remotely_pushed()

        return frames, events + stream_events

    def _receive_data_frame(self, frame):
        """
        Receive a data frame on the connection.
        """
        flow_controlled_length = frame.flow_controlled_length
        window_size = self.remote_flow_control_window(frame.stream_id)
        if flow_controlled_length > window_size:
            raise FlowControlError(
                "Cannot receive %d bytes, flow control window is %d." %
                (flow_controlled_length, window_size))

        events = self.state_machine.process_input(ConnectionInputs.RECV_DATA)
        self.inbound_flow_control_window -= flow_controlled_length
        stream = self._get_stream_by_id(frame.stream_id)
        frames, stream_events = stream.receive_data(
            frame.data, 'END_STREAM' in frame.flags, flow_controlled_length)
        return frames, events + stream_events

    def _receive_settings_frame(self, frame):
        """
        Receive a SETTINGS frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_SETTINGS)

        # This is an ack of the local settings.
        if 'ACK' in frame.flags:
            changed_settings = self._local_settings_acked()
            ack_event = SettingsAcknowledged()
            ack_event.changed_settings = changed_settings
            events.append(ack_event)
            return [], events

        # Add the new settings.
        self.remote_settings.update(frame.settings)
        events.append(
            RemoteSettingsChanged.from_settings(self.remote_settings,
                                                frame.settings))
        frames = self._acknowledge_settings()

        return frames, events

    def _receive_window_update_frame(self, frame):
        """
        Receive a WINDOW_UPDATE frame on the connection.
        """
        # Validate the frame.
        if not (1 <= frame.window_increment <= self.MAX_WINDOW_INCREMENT):
            raise ProtocolError(
                "Flow control increment must be between 1 and %d, received %d"
                % (self.MAX_WINDOW_INCREMENT, frame.window_increment))

        events = self.state_machine.process_input(
            ConnectionInputs.RECV_WINDOW_UPDATE)

        if frame.stream_id:
            stream = self._get_stream_by_id(frame.stream_id)
            frames, stream_events = stream.receive_window_update(
                frame.window_increment)
        else:
            # Increment our local flow control window.
            self.outbound_flow_control_window = guard_increment_window(
                self.outbound_flow_control_window, frame.window_increment)

            # FIXME: Should we split this into one event per active stream?
            window_updated_event = WindowUpdated()
            window_updated_event.stream_id = 0
            window_updated_event.delta = frame.window_increment
            stream_events = [window_updated_event]
            frames = []

        return frames, events + stream_events

    def _receive_ping_frame(self, frame):
        """
        Receive a PING frame on the connection.
        """
        events = self.state_machine.process_input(ConnectionInputs.RECV_PING)
        flags = []

        if 'ACK' in frame.flags:
            evt = PingAcknowledged()
            evt.ping_data = frame.opaque_data
            events.append(evt)
        else:
            f = PingFrame(0)
            f.flags = set(['ACK'])
            f.opaque_data = frame.opaque_data
            flags.append(f)

        return flags, events

    def _receive_rst_stream_frame(self, frame):
        """
        Receive a RST_STREAM frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_RST_STREAM)
        try:
            stream = self._get_stream_by_id(frame.stream_id)
        except NoSuchStreamError:
            # The stream is missing. That's ok, we just do nothing here.
            stream_frames = []
            stream_events = []
        else:
            stream_frames, stream_events = stream.stream_reset(frame)

        return stream_frames, events + stream_events

    def _receive_priority_frame(self, frame):
        """
        Receive a PRIORITY frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_PRIORITY)
        stream = self._get_or_create_stream(frame.stream_id,
                                            AllowedStreamIDs.ANY)
        stream_events = stream.priority_changed_remote(frame)

        return [], events + stream_events

    def _receive_goaway_frame(self, frame):
        """
        Receive a GOAWAY frame on the connection.
        """
        events = self.state_machine.process_input(ConnectionInputs.RECV_GOAWAY)

        # Clear the outbound data buffer: we cannot send further data now.
        self.clear_outbound_data_buffer()

        # Fire an appropriate ConnectionTerminated event.
        new_event = ConnectionTerminated()
        new_event.error_code = frame.error_code
        new_event.last_stream_id = frame.last_stream_id
        new_event.additional_data = (frame.additional_data
                                     if frame.additional_data else None)
        events.append(new_event)

        return [], events

    def _receive_naked_continuation(self, frame):
        """
        A naked CONTINUATION frame has been received. This is always an error,
        but the type of error it is depends on the state of the stream and must
        transition the state of the stream, so we need to pass it to the
        appropriate stream.
        """
        stream = self._get_stream_by_id(frame.stream_id)
        stream.receive_continuation()
        assert False, "Should not be reachable"

    def _local_settings_acked(self):
        """
        Handle the local settings being ACKed, update internal state.
        """
        changes = self.local_settings.acknowledge()

        if INITIAL_WINDOW_SIZE in changes:
            setting = changes[INITIAL_WINDOW_SIZE]
            self._inbound_flow_control_change_from_settings(
                setting.original_value,
                setting.new_value,
            )

        return changes

    def _stream_id_is_outbound(self, stream_id):
        """
        Returns ``True`` if the stream ID corresponds to an outbound stream
        (one initiated by this peer), returns ``False`` otherwise.
        """
        return (stream_id % 2 == int(self.client_side))
Exemple #51
0
class H2Connection(object):
    """
    A low-level HTTP/2 stream object. This handles building and receiving
    frames and maintains per-stream state.

    This wraps a HTTP/2 Stream state machine implementation, ensuring that
    frames can only be sent/received when the stream is in a valid state.
    Attempts to create frames that cannot be sent will raise a
    ``ProtocolError``.
    """
    # The initial maximum outbound frame size. This can be changed by receiving
    # a settings frame.
    DEFAULT_MAX_OUTBOUND_FRAME_SIZE = 65535

    # The initial maximum inbound frame size. This is somewhat arbitrarily
    # chosen.
    DEFAULT_MAX_INBOUND_FRAME_SIZE = 2**24

    def __init__(self, client_side=True):
        self.state_machine = H2ConnectionStateMachine()
        self.streams = {}
        self.highest_stream_id = 0
        self.encoder = Encoder()
        self.decoder = Decoder()
        self.client_side = client_side

        # Objects that store settings, including defaults.
        self.local_settings = Settings(client=client_side)
        self.remote_settings = Settings(client=not client_side)

        # The curent value of the connection flow control windows on the
        # connection.
        self.outbound_flow_control_window = (
            self.remote_settings.initial_window_size)
        self.inbound_flow_control_window = (
            self.local_settings.initial_window_size)

        # Maximum frame sizes in each direction.
        self.max_outbound_frame_size = self.remote_settings.max_frame_size
        self.max_inbound_frame_size = self.local_settings.max_frame_size

        # Buffer for incoming data.
        self.incoming_buffer = FrameBuffer(server=not client_side)

        # A private variable to store a sequence of received header frames
        # until completion.
        self._header_frames = []

        # Data that needs to be sent.
        self._data_to_send = b''

        # When in doubt use dict-dispatch.
        self._frame_dispatch_table = {
            HeadersFrame: self._receive_headers_frame,
            PushPromiseFrame: self._receive_push_promise_frame,
            SettingsFrame: self._receive_settings_frame,
            DataFrame: self._receive_data_frame,
            WindowUpdateFrame: self._receive_window_update_frame,
            PingFrame: self._receive_ping_frame,
            RstStreamFrame: self._receive_rst_stream_frame,
            PriorityFrame: self._receive_priority_frame,
            GoAwayFrame: self._receive_goaway_frame,
        }

    def _prepare_for_sending(self, frames):
        if not frames:
            return
        self._data_to_send += b''.join(f.serialize() for f in frames)
        assert all(f.body_len <= self.max_outbound_frame_size for f in frames)

    def _open_streams(self, remainder):
        """
        A common method of counting number of open streams. Returns the number
        of streams that are open *and* that have (stream ID % 2) == remainder.
        While it iterates, also deletes any closed streams.
        """
        count = 0
        to_delete = []

        for stream_id, stream in self.streams.items():
            if stream.open and (stream_id % 2 == remainder):
                count += 1
            elif stream.closed:
                to_delete.append(stream_id)

        for stream_id in to_delete:
            del self.streams[stream_id]

        return count

    @property
    def open_outbound_streams(self):
        """
        The current number of open outbound streams.
        """
        outbound_numbers = int(self.client_side)
        return self._open_streams(outbound_numbers)

    @property
    def open_inbound_streams(self):
        """
        The current number of open inbound streams.
        """
        inbound_numbers = int(not self.client_side)
        return self._open_streams(inbound_numbers)

    def begin_new_stream(self, stream_id):
        """
        Initiate a new stream.
        """
        if stream_id <= self.highest_stream_id:
            raise StreamIDTooLowError("Stream ID must be larger than %s",
                                      self.highest_stream_id)

        s = H2Stream(stream_id)
        s.max_inbound_frame_size = self.max_inbound_frame_size
        s.max_outbound_frame_size = self.max_outbound_frame_size
        s.outbound_flow_control_window = (
            self.remote_settings.initial_window_size)
        s.inbound_flow_control_window = self.local_settings.initial_window_size

        self.streams[stream_id] = s
        self.highest_stream_id = stream_id
        return s

    def initiate_connection(self):
        """
        Provides any data that needs to be sent at the start of the connection.
        Must be called for both clients and servers.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
        if self.client_side:
            preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
        else:
            preamble = b''

        f = SettingsFrame(0)
        for setting, value in self.local_settings.items():
            f.settings[setting] = value

        self._data_to_send += preamble + f.serialize()
        return []

    def get_or_create_stream(self, stream_id):
        """
        Gets a stream by its stream ID. Will create one if one does not already
        exist.
        """
        try:
            return self.streams[stream_id]
        except KeyError:
            return self.begin_new_stream(stream_id)

    def get_stream_by_id(self, stream_id):
        """
        Gets a stream by its stream ID. Raises NoSuchStreamError if the stream
        ID does not correspond to a known stream and is higher than the current
        maximum: raises if it is lower than the current maximum.
        """
        try:
            return self.streams[stream_id]
        except KeyError:
            if stream_id > self.highest_stream_id:
                raise NoSuchStreamError(stream_id)
            else:
                raise StreamClosedError(stream_id)

    def send_headers(self, stream_id, headers, end_stream=False):
        """
        Send headers on a given stream.
        """
        # Check we can open the stream.
        if stream_id not in self.streams:
            max_open_streams = self.remote_settings.max_concurrent_streams
            if (self.open_outbound_streams + 1) > max_open_streams:
                raise TooManyStreamsError(
                    "Max outbound streams is %d, %d open" %
                    (max_open_streams, self.open_outbound_streams))

        self.state_machine.process_input(ConnectionInputs.SEND_HEADERS)
        stream = self.get_or_create_stream(stream_id)
        frames, events = stream.send_headers(headers, self.encoder, end_stream)
        self._prepare_for_sending(frames)
        return events

    def send_data(self, stream_id, data, end_stream=False):
        """
        Send data on a given stream.
        """
        if len(data) > self.local_flow_control_window(stream_id):
            raise FlowControlError(
                "Cannot send %d bytes, flow control window is %d." %
                (len(data), self.local_flow_control_window(stream_id)))
        elif len(data) > self.max_outbound_frame_size:
            raise FrameTooLargeError(
                "Cannot send frame size %d, max frame size is %d" %
                (len(data), self.max_outbound_frame_size))

        self.state_machine.process_input(ConnectionInputs.SEND_DATA)
        frames, events = self.streams[stream_id].send_data(data, end_stream)
        self._prepare_for_sending(frames)

        self.outbound_flow_control_window -= len(data)
        assert self.outbound_flow_control_window >= 0

        return events

    def end_stream(self, stream_id):
        """
        End a given stream.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_DATA)
        frames, events = self.streams[stream_id].end_stream()
        self._prepare_for_sending(frames)
        return events

    def increment_flow_control_window(self, increment, stream_id=None):
        """
        Increment a flow control window, optionally for a single stream.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_WINDOW_UPDATE)

        if stream_id is not None:
            stream = self.streams[stream_id]
            frames, events = stream.increase_flow_control_window(increment)
            stream.inbound_flow_control_window += increment
        else:
            f = WindowUpdateFrame(0)
            f.window_increment = increment
            self.inbound_flow_control_window += increment
            frames = [f]
            events = []

        self._prepare_for_sending(frames)
        return events

    def push_stream(self, stream_id, promised_stream_id, request_headers):
        """
        Send a push promise.
        """
        if not self.remote_settings.enable_push:
            raise ProtocolError("Remote peer has disabled stream push")

        self.state_machine.process_input(ConnectionInputs.SEND_PUSH_PROMISE)
        stream = self.get_stream_by_id(stream_id)

        new_stream = self.begin_new_stream(promised_stream_id)
        self.streams[promised_stream_id] = new_stream

        frames, events = stream.push_stream_in_band(promised_stream_id,
                                                    request_headers,
                                                    self.encoder)
        new_frames, new_events = new_stream.locally_pushed()
        self._prepare_for_sending(frames + new_frames)
        return events + new_events

    def ping(self, opaque_data):
        """
        Send a PING frame.

        :param opaque_data: A bytestring of length 8 that will be sent in the
                            PING frame.
        :returns: A list of events.
        """
        if not isinstance(opaque_data, bytes) or len(opaque_data) != 8:
            raise ValueError("Invalid value for ping data: %r" % opaque_data)

        self.state_machine.process_input(ConnectionInputs.SEND_PING)
        f = PingFrame(0)
        f.opaque_data = opaque_data
        self._prepare_for_sending([f])

        return []

    def reset_stream(self, stream_id, error_code=0):
        """
        Reset a stream frame.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_RST_STREAM)
        stream = self.get_stream_by_id(stream_id)
        frames, events = stream.reset_stream(error_code)

        self._prepare_for_sending(frames)
        return events

    def close_connection(self, error_code=0):
        """
        Close a connection, emitting a GOAWAY frame.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY)

        f = GoAwayFrame(0)
        f.error_code = error_code
        f.last_stream_id = self.highest_stream_id
        self._prepare_for_sending([f])

        return []

    def acknowledge_settings(self, event):
        """
        Acknowledge settings that have been received.

        :param event: The RemoteSettingsChanged event that is being
                      acknowledged.
        :returns: A list of events.
        """
        assert isinstance(event, RemoteSettingsChanged)
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)

        changes = self.remote_settings.acknowledge()

        if SettingsFrame.INITIAL_WINDOW_SIZE in changes:
            setting = changes[SettingsFrame.INITIAL_WINDOW_SIZE]
            self._flow_control_change_from_settings(
                setting.original_value,
                setting.new_value,
            )

        # HEADER_TABLE_SIZE changes by the remote part affect our encoder: cf.
        # RFC 7540 Section 6.5.2.
        if SettingsFrame.HEADER_TABLE_SIZE in changes:
            setting = changes[SettingsFrame.HEADER_TABLE_SIZE]
            self.encoder.header_table_size = setting.new_value

        if SettingsFrame.SETTINGS_MAX_FRAME_SIZE in changes:
            setting = changes[SettingsFrame.SETTINGS_MAX_FRAME_SIZE]
            self.max_outbound_frame_size = setting.new_value
            for stream in self.streams.values():
                stream.max_outbound_frame_size = setting.new_value

        f = SettingsFrame(0)
        f.flags.add('ACK')
        self._prepare_for_sending([f])
        return []

    def update_settings(self, new_settings):
        """
        Update the local settings. This will prepare and emit the appropriate
        SETTINGS frame.

        :param new_settings: A dictionary of {setting: new value}
        """
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
        self.local_settings.update(new_settings)
        s = SettingsFrame(0)
        s.settings = new_settings
        self._prepare_for_sending([s])
        return []

    def local_flow_control_window(self, stream_id):
        """
        Returns the maximum amount of data that can be sent on stream
        ``stream_id``.

        This value will never be larger than the total data that can be sent on
        the connection: even if the given stream allows more data, the
        connection window provides a logical maximum to the amount of data that
        can be sent.

        The maximum data that can be sent in a single data frame on a stream
        is either this value, or the maximum frame size, whichever is
        *smaller*.
        """
        stream = self.get_stream_by_id(stream_id)
        return min(self.outbound_flow_control_window,
                   stream.outbound_flow_control_window)

    def remote_flow_control_window(self, stream_id):
        """
        Returns the maximum amount of data the remote peer can send on stream
        ``stream_id``.

        This value will never be larger than the total data that can be sent on
        the connection: even if the given stream allows more data, the
        connection window provides a logical maximum to the amount of data that
        can be sent.

        The maximum data that can be sent in a single data frame on a stream
        is either this value, or the maximum frame size, whichever is
        *smaller*.
        """
        stream = self.get_stream_by_id(stream_id)
        return min(self.inbound_flow_control_window,
                   stream.inbound_flow_control_window)

    def data_to_send(self, amt=None):
        """
        Returns some data for sending out of the internal data buffer.

        This method is analagous to 'read' on a file-like object, but it
        doesn't block. Instead, it returns as much data as the user asks for,
        or less if that much data is not available. It does not perform any
        I/O, and so uses a different name.
        """
        if amt is None:
            data = self._data_to_send
            self._data_to_send = b''
            return data
        else:
            data = self._data_to_send[:amt]
            self._data_to_send = self._data_to_send[amt:]
            return data

    def clear_outbound_data_buffer(self):
        """
        Clears the outbound data buffer, such that if this call was immediately
        followed by a call to
        :meth:`data_to_send <h2.connection.H2Connection.data_to_send>`, that
        call would return no data.

        This method should not normally be used, but is made available to avoid
        exposing implementation details.
        """
        self._data_to_send = b''

    def _flow_control_change_from_settings(self, old_value, new_value):
        """
        Update flow control windows in response to a change in the value of
        SETTINGS_INITIAL_WINDOW_SIZE.

        When this setting is changed, it automatically updates all flow control
        windows by the delta in the settings values.
        """
        delta = new_value - old_value
        self.outbound_flow_control_window += delta

        for stream in self.streams.values():
            stream.outbound_flow_control_window += delta

        return

    def _inbound_flow_control_change_from_settings(self, old_value, new_value):
        """
        Update remote flow control windows in response to a change in the value
        of SETTINGS_INITIAL_WINDOW_SIZE.

        When this setting is changed, it automatically updates all remote flow
        control windows by the delta in the settings values.
        """
        delta = new_value - old_value
        self.inbound_flow_control_window += delta

        for stream in self.streams.values():
            stream.inbound_flow_control_window += delta

        return

    def receive_data(self, data):
        """
        Pass some received HTTP/2 data to the connection for handling.
        """
        events = []
        self.incoming_buffer.add_data(data)

        for frame in self.incoming_buffer:
            events.extend(self.receive_frame(frame))

        return events

    def receive_frame(self, frame):
        """
        Handle a frame received on the connection.
        """
        try:
            if frame.body_len > self.max_inbound_frame_size:
                raise ProtocolError(
                    "Received overlong frame: length %d, max %d" %
                    (frame.body_len, self.max_inbound_frame_size))

            # I don't love using __class__ here, maybe reconsider it.
            frames, events = self._frame_dispatch_table[frame.__class__](frame)
        except ProtocolError as e:
            # For whatever reason, receiving the frame caused a protocol error.
            # We should prepare to emit a GoAway frame before throwing the
            # exception up further. No need for an event: the exception will
            # do fine.
            f = GoAwayFrame(0)
            f.last_stream_id = sorted(self.streams.keys())[-1]
            f.error_code = e.error_code
            self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY)
            self._prepare_for_sending([f])
            raise
        except StreamClosedError as e:
            # We need to send a RST_STREAM frame on behalf of the stream.
            # The frame the stream wants to emit is already present in the
            # exception.
            # This does not require re-raising: it's an expected behaviour.
            f = RstStreamFrame(e.stream_id)
            f.error_code = e.error_code
            self._prepare_for_sending([f])
            events = []
        else:
            self._prepare_for_sending(frames)

        return events

    def _receive_headers_frame(self, frame):
        """
        Receive a headers frame on the connection.
        """
        # If necessary, check we can open the stream.
        if frame.stream_id not in self.streams:
            max_open_streams = self.local_settings.max_concurrent_streams
            if (self.open_inbound_streams + 1) > max_open_streams:
                raise TooManyStreamsError(
                    "Max outbound streams is %d, %d open" %
                    (max_open_streams, self.open_outbound_streams))

        # Let's decode the headers.
        headers = self.decoder.decode(frame.data)
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_HEADERS)
        stream = self.get_or_create_stream(frame.stream_id)
        frames, stream_events = stream.receive_headers(
            headers, 'END_STREAM' in frame.flags)
        return frames, events + stream_events

    def _receive_push_promise_frame(self, frame):
        """
        Receive a push-promise frame on the connection.
        """
        if not self.local_settings.enable_push:
            raise ProtocolError("Received pushed stream")

        pushed_headers = self.decoder.decode(frame.data)

        events = self.state_machine.process_input(
            ConnectionInputs.RECV_PUSH_PROMISE)
        stream = self.get_stream_by_id(frame.stream_id)
        frames, stream_events = stream.receive_push_promise_in_band(
            frame.promised_stream_id,
            pushed_headers,
        )

        new_stream = self.begin_new_stream(frame.promised_stream_id)
        self.streams[frame.promised_stream_id] = new_stream
        new_stream.remotely_pushed()

        return frames, events + stream_events

    def _receive_data_frame(self, frame):
        """
        Receive a data frame on the connection.
        """
        if frame.body_len > self.remote_flow_control_window(frame.stream_id):
            raise FlowControlError(
                "Cannot receive %d bytes, flow control window is %d." %
                (frame.body_len,
                 self.remote_flow_control_window(frame.stream_id)))

        events = self.state_machine.process_input(ConnectionInputs.RECV_DATA)
        self.inbound_flow_control_window -= frame.body_len
        stream = self.get_stream_by_id(frame.stream_id)
        frames, stream_events = stream.receive_data(
            frame.data, 'END_STREAM' in frame.flags, frame.body_len)
        return frames, events + stream_events

    def _receive_settings_frame(self, frame):
        """
        Receive a SETTINGS frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_SETTINGS)

        # This is an ack of the local settings.
        if 'ACK' in frame.flags:
            changed_settings = self._local_settings_acked()
            ack_event = SettingsAcknowledged()
            ack_event.changed_settings = changed_settings
            events.append(ack_event)
            return [], events

        # Add the new settings.
        self.remote_settings.update(frame.settings)

        events.append(
            RemoteSettingsChanged.from_settings(self.remote_settings,
                                                frame.settings))
        return [], events

    def _receive_window_update_frame(self, frame):
        """
        Receive a WINDOW_UPDATE frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_WINDOW_UPDATE)

        if frame.stream_id:
            stream = self.get_stream_by_id(frame.stream_id)
            frames, stream_events = stream.receive_window_update(
                frame.window_increment)
        else:
            # Increment our local flow control window.
            self.outbound_flow_control_window += frame.window_increment

            # FIXME: Should we split this into one event per active stream?
            window_updated_event = WindowUpdated()
            window_updated_event.stream_id = 0
            window_updated_event.delta = frame.window_increment
            stream_events = [window_updated_event]
            frames = []

        return frames, events + stream_events

    def _receive_ping_frame(self, frame):
        """
        Receive a PING frame on the connection.
        """
        events = self.state_machine.process_input(ConnectionInputs.RECV_PING)
        flags = []

        if 'ACK' in frame.flags:
            evt = PingAcknowledged()
            evt.ping_data = frame.opaque_data
            events.append(evt)
        else:
            f = PingFrame(0)
            f.flags = set(['ACK'])
            f.opaque_data = frame.opaque_data
            flags.append(f)

        return flags, events

    def _receive_rst_stream_frame(self, frame):
        """
        Receive a RST_STREAM frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_RST_STREAM)
        try:
            stream = self.get_stream_by_id(frame.stream_id)
        except NoSuchStreamError:
            # The stream is missing. That's ok, we just do nothing here.
            stream_frames = []
            stream_events = []
        else:
            stream_frames, stream_events = stream.stream_reset(frame)

        return stream_frames, events + stream_events

    def _receive_priority_frame(self, frame):
        """
        Receive a PRIORITY frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_PRIORITY)
        stream = self.get_or_create_stream(frame.stream_id)
        stream_events = stream.priority_changed_remote(frame)

        return [], events + stream_events

    def _receive_goaway_frame(self, frame):
        """
        Receive a GOAWAY frame on the connection.
        """
        events = self.state_machine.process_input(ConnectionInputs.RECV_GOAWAY)

        # Clear the outbound data buffer: we cannot send further data now.
        self.clear_outbound_data_buffer()

        # Fire an appropriate ConnectionTerminated event.
        new_event = ConnectionTerminated()
        new_event.error_code = frame.error_code
        new_event.last_stream_id = frame.last_stream_id
        events.append(new_event)

        return [], events

    def _local_settings_acked(self):
        """
        Handle the local settings being ACKed, update internal state.
        """
        changes = self.local_settings.acknowledge()

        if SettingsFrame.INITIAL_WINDOW_SIZE in changes:
            setting = changes[SettingsFrame.INITIAL_WINDOW_SIZE]
            self._inbound_flow_control_change_from_settings(
                setting.original_value,
                setting.new_value,
            )

        return changes
Exemple #52
0
class H2Connection(object):
    """
    A low-level HTTP/2 stream object. This handles building and receiving
    frames and maintains per-stream state.

    This wraps a HTTP/2 Stream state machine implementation, ensuring that
    frames can only be sent/received when the stream is in a valid state.
    Attempts to create frames that cannot be sent will raise a
    ``ProtocolError``.
    """
    # The initial maximum outbound frame size. This can be changed by receiving
    # a settings frame.
    DEFAULT_MAX_OUTBOUND_FRAME_SIZE = 65535

    # The initial maximum inbound frame size. This is somewhat arbitrarily
    # chosen.
    DEFAULT_MAX_INBOUND_FRAME_SIZE = 2**24

    # The highest acceptable stream ID.
    HIGHEST_ALLOWED_STREAM_ID = 2**31 - 1

    # The largest acceptable window increment.
    MAX_WINDOW_INCREMENT = 2**31 - 1

    def __init__(self, client_side=True):
        self.state_machine = H2ConnectionStateMachine()
        self.streams = {}
        self.highest_inbound_stream_id = 0
        self.highest_outbound_stream_id = 0
        self.encoder = Encoder()
        self.decoder = Decoder()
        self.client_side = client_side

        # Objects that store settings, including defaults.
        self.local_settings = Settings(client=client_side)
        self.remote_settings = Settings(client=not client_side)

        # The curent value of the connection flow control windows on the
        # connection.
        self.outbound_flow_control_window = (
            self.remote_settings.initial_window_size
        )
        self.inbound_flow_control_window = (
            self.local_settings.initial_window_size
        )

        #: The maximum size of a frame that can be emitted by this peer, in
        #: bytes.
        self.max_outbound_frame_size = self.remote_settings.max_frame_size

        #: The maximum size of a frame that can be received by this peer, in
        #: bytes.
        self.max_inbound_frame_size = self.local_settings.max_frame_size

        # Buffer for incoming data.
        self.incoming_buffer = FrameBuffer(server=not client_side)

        # A private variable to store a sequence of received header frames
        # until completion.
        self._header_frames = []

        # Data that needs to be sent.
        self._data_to_send = b''

        # When in doubt use dict-dispatch.
        self._frame_dispatch_table = {
            HeadersFrame: self._receive_headers_frame,
            PushPromiseFrame: self._receive_push_promise_frame,
            SettingsFrame: self._receive_settings_frame,
            DataFrame: self._receive_data_frame,
            WindowUpdateFrame: self._receive_window_update_frame,
            PingFrame: self._receive_ping_frame,
            RstStreamFrame: self._receive_rst_stream_frame,
            PriorityFrame: self._receive_priority_frame,
            GoAwayFrame: self._receive_goaway_frame,
            ContinuationFrame: self._receive_naked_continuation,
        }

    def _prepare_for_sending(self, frames):
        if not frames:
            return
        self._data_to_send += b''.join(f.serialize() for f in frames)
        assert all(f.body_len <= self.max_outbound_frame_size for f in frames)

    def _open_streams(self, remainder):
        """
        A common method of counting number of open streams. Returns the number
        of streams that are open *and* that have (stream ID % 2) == remainder.
        While it iterates, also deletes any closed streams.
        """
        count = 0
        to_delete = []

        for stream_id, stream in self.streams.items():
            if stream.open and (stream_id % 2 == remainder):
                count += 1
            elif stream.closed:
                to_delete.append(stream_id)

        for stream_id in to_delete:
            del self.streams[stream_id]

        return count

    @property
    def open_outbound_streams(self):
        """
        The current number of open outbound streams.
        """
        outbound_numbers = int(self.client_side)
        return self._open_streams(outbound_numbers)

    @property
    def open_inbound_streams(self):
        """
        The current number of open inbound streams.
        """
        inbound_numbers = int(not self.client_side)
        return self._open_streams(inbound_numbers)

    def _begin_new_stream(self, stream_id, allowed_ids):
        """
        Initiate a new stream.

        .. versionchanged:: 2.0.0
           Removed this function from the public API.

        :param stream_id: The ID of the stream to open.
        :param allowed_ids: What kind of stream ID is allowed.
        """
        outbound = self._stream_id_is_outbound(stream_id)
        highest_stream_id = (
            self.highest_outbound_stream_id if outbound else
            self.highest_inbound_stream_id
        )

        if stream_id <= highest_stream_id:
            raise StreamIDTooLowError(stream_id, highest_stream_id)

        if allowed_ids != AllowedStreamIDs.ANY:
            if (stream_id % 2) != int(allowed_ids):
                raise ProtocolError(
                    "Invalid stream ID for peer."
                )

        s = H2Stream(stream_id)
        s.max_inbound_frame_size = self.max_inbound_frame_size
        s.max_outbound_frame_size = self.max_outbound_frame_size
        s.outbound_flow_control_window = (
            self.remote_settings.initial_window_size
        )
        s.inbound_flow_control_window = self.local_settings.initial_window_size

        self.streams[stream_id] = s

        if outbound:
            self.highest_outbound_stream_id = stream_id
        else:
            self.highest_inbound_stream_id = stream_id

        return s

    def initiate_connection(self):
        """
        Provides any data that needs to be sent at the start of the connection.
        Must be called for both clients and servers.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
        if self.client_side:
            preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
        else:
            preamble = b''

        f = SettingsFrame(0)
        for setting, value in self.local_settings.items():
            f.settings[setting] = value

        self._data_to_send += preamble + f.serialize()

    def _get_or_create_stream(self, stream_id, allowed_ids):
        """
        Gets a stream by its stream ID. Will create one if one does not already
        exist. Use allowed_ids to circumvent the usual stream ID rules for
        clients and servers.

        .. versionchanged:: 2.0.0
           Removed this function from the public API.
        """
        try:
            return self.streams[stream_id]
        except KeyError:
            return self._begin_new_stream(stream_id, allowed_ids)

    def _get_stream_by_id(self, stream_id):
        """
        Gets a stream by its stream ID. Raises NoSuchStreamError if the stream
        ID does not correspond to a known stream and is higher than the current
        maximum: raises if it is lower than the current maximum.

        .. versionchanged:: 2.0.0
           Removed this function from the public API.
        """
        try:
            return self.streams[stream_id]
        except KeyError:
            outbound = self._stream_id_is_outbound(stream_id)
            highest_stream_id = (
                self.highest_outbound_stream_id if outbound else
                self.highest_inbound_stream_id
            )

            if stream_id > highest_stream_id:
                raise NoSuchStreamError(stream_id)
            else:
                raise StreamClosedError(stream_id)

    def get_next_available_stream_id(self):
        """
        Returns an integer suitable for use as the stream ID for the next
        stream created by this endpoint. For server endpoints, this stream ID
        will be even. For client endpoints, this stream ID will be odd. If no
        stream IDs are available, raises :class:`NoAvailableStreamIDError
        <h2.exceptions.NoAvailableStreamIDError>`.

        .. warning:: The return value from this function does not change until
                     the stream ID has actually been used by sending or pushing
                     headers on that stream. For that reason, it should be
                     called as close as possible to the actual use of the
                     stream ID.

        .. versionadded:: 2.0.0

        :raises: :class:`NoAvailableStreamIDError
            <h2.exceptions.NoAvailableStreamIDError>`
        :returns: The next free stream ID this peer can use to initiate a
            stream.
        :rtype: ``int``
        """
        # No streams have been opened yet, so return the lowest allowed stream
        # ID.
        if not self.highest_outbound_stream_id:
            return 1 if self.client_side else 2

        next_stream_id = self.highest_outbound_stream_id + 2
        if next_stream_id > self.HIGHEST_ALLOWED_STREAM_ID:
            raise NoAvailableStreamIDError("Exhausted allowed stream IDs")

        return next_stream_id

    def send_headers(self, stream_id, headers, end_stream=False):
        """
        Send headers on a given stream.

        This function can be used to send request or response headers: the kind
        that are sent depends on whether this connection has been opened as a
        client or server connection, and whether the stream was opened by the
        remote peer or not.

        If this is a client connection, calling ``send_headers`` will send the
        headers as a request. It will also implicitly open the stream being
        used. If this is a client connection and ``send_headers`` has *already*
        been called, this will send trailers instead.

        If this is a server connection, calling ``send_headers`` will send the
        headers as a response. It is a protocol error for a server to open a
        stream by sending headers. If this is a server connection and
        ``send_headers`` has *already* been called, this will send trailers
        instead.

        In all situations it is a protocol error to call ``send_headers`` more
        than twice.

        :param stream_id: The stream ID to send the headers on. If this stream
            does not currently exist, it will be created.
        :type stream_id: ``int``
        :param headers: The request/response headers to send.
        :type headers: An iterable of two tuples of bytestrings.
        :returns: Nothing
        """
        # Check we can open the stream.
        if stream_id not in self.streams:
            max_open_streams = self.remote_settings.max_concurrent_streams
            if (self.open_outbound_streams + 1) > max_open_streams:
                raise TooManyStreamsError(
                    "Max outbound streams is %d, %d open" %
                    (max_open_streams, self.open_outbound_streams)
                )

        self.state_machine.process_input(ConnectionInputs.SEND_HEADERS)
        stream = self._get_or_create_stream(
            stream_id, AllowedStreamIDs(self.client_side)
        )
        frames = stream.send_headers(
            headers, self.encoder, end_stream
        )
        self._prepare_for_sending(frames)

    def send_data(self, stream_id, data, end_stream=False):
        """
        Send data on a given stream.

        This method does no breaking up of data: if the data is larger than the
        value returned by :meth:`local_flow_control_window
        <h2.connection.H2Connection.local_flow_control_window>` for this stream
        then a :class:`FlowControlError <h2.exceptions.FlowControlError>` will
        be raised. If the data is larger than :data:`max_outbound_frame_size
        <h2.connection.H2Connection.max_outbound_frame_size>` then a
        :class:`FrameTooLargeError <h2.exceptions.FrameTooLargeError>` will be
        raised.

        Hyper-h2 does this to avoid buffering the data internally. If the user
        has more data to send than hyper-h2 will allow, consider breaking it up
        and buffering it externally.

        :param stream_id: The ID of the stream on which to send the data.
        :type stream_id: ``int``
        :param data: The data to send on the stream.
        :type data: ``bytes``
        :param end_stream: (optional) Whether this is the last data to be sent
            on the stream. Defaults to ``False``.
        :type end_stream: ``bool``
        :returns: Nothing
        """
        if len(data) > self.local_flow_control_window(stream_id):
            raise FlowControlError(
                "Cannot send %d bytes, flow control window is %d." %
                (len(data), self.local_flow_control_window(stream_id))
            )
        elif len(data) > self.max_outbound_frame_size:
            raise FrameTooLargeError(
                "Cannot send frame size %d, max frame size is %d" %
                (len(data), self.max_outbound_frame_size)
            )

        self.state_machine.process_input(ConnectionInputs.SEND_DATA)
        frames = self.streams[stream_id].send_data(data, end_stream)
        self._prepare_for_sending(frames)

        self.outbound_flow_control_window -= len(data)
        assert self.outbound_flow_control_window >= 0

    def end_stream(self, stream_id):
        """
        Cleanly end a given stream.

        This method ends a stream by sending an empty DATA frame on that stream
        with the ``END_STREAM`` flag set.

        :param stream_id: The ID of the stream to end.
        :type stream_id: ``int``
        :returns: Nothing
        """
        self.state_machine.process_input(ConnectionInputs.SEND_DATA)
        frames = self.streams[stream_id].end_stream()
        self._prepare_for_sending(frames)

    def increment_flow_control_window(self, increment, stream_id=None):
        """
        Increment a flow control window, optionally for a single stream. Allows
        the remote peer to send more data.

        .. versionchanged:: 2.0.0
           Rejects attempts to increment the flow control window by out of
           range values with a ``ValueError``.

        :param increment: The amount ot increment the flow control window by.
        :type increment: ``int``
        :param stream_id: (optional) The ID of the stream that should have its
            flow control window opened. If not present or ``None``, the
            connection flow control window will be opened instead.
        :type stream_id: ``int`` or ``None``
        :returns: Nothing
        :raises: ``ValueError``
        """
        if not (1 <= increment <= self.MAX_WINDOW_INCREMENT):
            raise ValueError(
                "Flow control increment must be between 1 and %d" %
                self.MAX_WINDOW_INCREMENT
            )

        self.state_machine.process_input(ConnectionInputs.SEND_WINDOW_UPDATE)

        if stream_id is not None:
            stream = self.streams[stream_id]
            frames = stream.increase_flow_control_window(
                increment
            )
            stream.inbound_flow_control_window = guard_increment_window(
                stream.inbound_flow_control_window,
                increment
            )
        else:
            f = WindowUpdateFrame(0)
            f.window_increment = increment
            self.inbound_flow_control_window = guard_increment_window(
                self.inbound_flow_control_window,
                increment
            )
            frames = [f]

        self._prepare_for_sending(frames)

    def push_stream(self, stream_id, promised_stream_id, request_headers):
        """
        Push a response to the client by sending a PUSH_PROMISE frame.

        :param stream_id: The ID of the stream that this push is a response to.
        :type stream_id: ``int``
        :param promised_stream_id: The ID of the stream that the pushed
            response will be sent on.
        :type promised_stream_id: ``int``
        :param request_headers: The headers of the request that the pushed
            response will be responding to.
        :type request_headers: An iterable of two tuples of bytestrings.
        :returns: Nothing
        """
        if not self.remote_settings.enable_push:
            raise ProtocolError("Remote peer has disabled stream push")

        self.state_machine.process_input(ConnectionInputs.SEND_PUSH_PROMISE)
        stream = self._get_stream_by_id(stream_id)

        new_stream = self._begin_new_stream(
            promised_stream_id, AllowedStreamIDs.EVEN
        )
        self.streams[promised_stream_id] = new_stream

        frames = stream.push_stream_in_band(
            promised_stream_id, request_headers, self.encoder
        )
        new_frames = new_stream.locally_pushed()
        self._prepare_for_sending(frames + new_frames)

    def ping(self, opaque_data):
        """
        Send a PING frame.

        :param opaque_data: A bytestring of length 8 that will be sent in the
                            PING frame.
        :returns: Nothing
        """
        if not isinstance(opaque_data, bytes) or len(opaque_data) != 8:
            raise ValueError("Invalid value for ping data: %r" % opaque_data)

        self.state_machine.process_input(ConnectionInputs.SEND_PING)
        f = PingFrame(0)
        f.opaque_data = opaque_data
        self._prepare_for_sending([f])

    def reset_stream(self, stream_id, error_code=0):
        """
        Reset a stream.

        This method forcibly closes a stream by sending a RST_STREAM frame for
        a given stream. This is not a graceful closure. To gracefully end a
        stream, try the :meth:`end_stream
        <h2.connection.H2Connection.end_stream>` method.

        :param stream_id: The ID of the stream to reset.
        :type stream_id: ``int``
        :param error_code: (optional) The error code to use to reset the
            stream. Defaults to :data:`NO_ERROR <h2.errors.NO_ERROR>`.
        :type error_code: ``int``
        :returns: Nothing
        """
        self.state_machine.process_input(ConnectionInputs.SEND_RST_STREAM)
        stream = self._get_stream_by_id(stream_id)
        frames = stream.reset_stream(error_code)
        self._prepare_for_sending(frames)

    def close_connection(self, error_code=0):
        """
        Close a connection, emitting a GOAWAY frame.

        :param error_code: (optional) The error code to send in the GOAWAY
            frame.
        :returns: Nothing
        """
        self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY)

        f = GoAwayFrame(0)
        f.error_code = error_code
        f.last_stream_id = self.highest_inbound_stream_id
        self._prepare_for_sending([f])

    def update_settings(self, new_settings):
        """
        Update the local settings. This will prepare and emit the appropriate
        SETTINGS frame.

        :param new_settings: A dictionary of {setting: new value}
        """
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
        self.local_settings.update(new_settings)
        s = SettingsFrame(0)
        s.settings = new_settings
        self._prepare_for_sending([s])

    def local_flow_control_window(self, stream_id):
        """
        Returns the maximum amount of data that can be sent on stream
        ``stream_id``.

        This value will never be larger than the total data that can be sent on
        the connection: even if the given stream allows more data, the
        connection window provides a logical maximum to the amount of data that
        can be sent.

        The maximum data that can be sent in a single data frame on a stream
        is either this value, or the maximum frame size, whichever is
        *smaller*.

        :param stream_id: The ID of the stream whose flow control window is
            being queried.
        :type stream_id: ``int``
        :returns: The amount of data in bytes that can be sent on the stream
            before the flow control window is exhausted.
        :rtype: ``int``
        """
        stream = self._get_stream_by_id(stream_id)
        return min(
            self.outbound_flow_control_window,
            stream.outbound_flow_control_window
        )

    def remote_flow_control_window(self, stream_id):
        """
        Returns the maximum amount of data the remote peer can send on stream
        ``stream_id``.

        This value will never be larger than the total data that can be sent on
        the connection: even if the given stream allows more data, the
        connection window provides a logical maximum to the amount of data that
        can be sent.

        The maximum data that can be sent in a single data frame on a stream
        is either this value, or the maximum frame size, whichever is
        *smaller*.

        :param stream_id: The ID of the stream whose flow control window is
            being queried.
        :type stream_id: ``int``
        :returns: The amount of data in bytes that can be received on the
            stream before the flow control window is exhausted.
        :rtype: ``int``
        """
        stream = self._get_stream_by_id(stream_id)
        return min(
            self.inbound_flow_control_window,
            stream.inbound_flow_control_window
        )

    def data_to_send(self, amt=None):
        """
        Returns some data for sending out of the internal data buffer.

        This method is analagous to ``read`` on a file-like object, but it
        doesn't block. Instead, it returns as much data as the user asks for,
        or less if that much data is not available. It does not perform any
        I/O, and so uses a different name.

        :param amt: (optional) The maximum amount of data to return. If not
            set, or set to ``None``, will return as much data as possible.
        :type amt: ``int``
        :returns: A bytestring containing the data to send on the wire.
        :rtype: ``bytes``
        """
        if amt is None:
            data = self._data_to_send
            self._data_to_send = b''
            return data
        else:
            data = self._data_to_send[:amt]
            self._data_to_send = self._data_to_send[amt:]
            return data

    def clear_outbound_data_buffer(self):
        """
        Clears the outbound data buffer, such that if this call was immediately
        followed by a call to
        :meth:`data_to_send <h2.connection.H2Connection.data_to_send>`, that
        call would return no data.

        This method should not normally be used, but is made available to avoid
        exposing implementation details.
        """
        self._data_to_send = b''

    def _acknowledge_settings(self):
        """
        Acknowledge settings that have been received.

        .. versionchanged:: 2.0.0
           Removed from public API, removed useless ``event`` parameter, made
           automatic.

        :returns: Nothing
        """
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)

        changes = self.remote_settings.acknowledge()

        if INITIAL_WINDOW_SIZE in changes:
            setting = changes[INITIAL_WINDOW_SIZE]
            self._flow_control_change_from_settings(
                setting.original_value,
                setting.new_value,
            )

        # HEADER_TABLE_SIZE changes by the remote part affect our encoder: cf.
        # RFC 7540 Section 6.5.2.
        if HEADER_TABLE_SIZE in changes:
            setting = changes[HEADER_TABLE_SIZE]
            self.encoder.header_table_size = setting.new_value

        if MAX_FRAME_SIZE in changes:
            setting = changes[MAX_FRAME_SIZE]
            self.max_outbound_frame_size = setting.new_value
            for stream in self.streams.values():
                stream.max_outbound_frame_size = setting.new_value

        f = SettingsFrame(0)
        f.flags.add('ACK')
        return [f]

    def _flow_control_change_from_settings(self, old_value, new_value):
        """
        Update flow control windows in response to a change in the value of
        SETTINGS_INITIAL_WINDOW_SIZE.

        When this setting is changed, it automatically updates all flow control
        windows by the delta in the settings values. Note that it does not
        increment the *connection* flow control window, per section 6.9.2 of
        RFC 7540.
        """
        delta = new_value - old_value

        for stream in self.streams.values():
            stream.outbound_flow_control_window = guard_increment_window(
                stream.outbound_flow_control_window,
                delta
            )

    def _inbound_flow_control_change_from_settings(self, old_value, new_value):
        """
        Update remote flow control windows in response to a change in the value
        of SETTINGS_INITIAL_WINDOW_SIZE.

        When this setting is changed, it automatically updates all remote flow
        control windows by the delta in the settings values.
        """
        delta = new_value - old_value

        for stream in self.streams.values():
            stream.inbound_flow_control_window += delta

    def receive_data(self, data):
        """
        Pass some received HTTP/2 data to the connection for handling.

        :param data: The data received from the remote peer on the network.
        :type data: ``bytes``
        :returns: A list of events that the remote peer triggered by sending
            this data.
        """
        events = []
        self.incoming_buffer.add_data(data)
        self.incoming_buffer.max_frame_size = self.max_inbound_frame_size

        try:
            for frame in self.incoming_buffer:
                events.extend(self._receive_frame(frame))
        except InvalidPaddingError:
            self._terminate_connection(PROTOCOL_ERROR)
            raise ProtocolError("Received frame with invalid padding.")
        except ProtocolError as e:
            # For whatever reason, receiving the frame caused a protocol error.
            # We should prepare to emit a GoAway frame before throwing the
            # exception up further. No need for an event: the exception will
            # do fine.
            self._terminate_connection(e.error_code)
            raise

        return events

    def _receive_frame(self, frame):
        """
        Handle a frame received on the connection.

        .. versionchanged:: 2.0.0
           Removed from the public API.
        """
        try:
            # I don't love using __class__ here, maybe reconsider it.
            frames, events = self._frame_dispatch_table[frame.__class__](frame)
        except StreamClosedError as e:
            # We need to send a RST_STREAM frame on behalf of the stream.
            # The frame the stream wants to emit is already present in the
            # exception.
            # This does not require re-raising: it's an expected behaviour.
            f = RstStreamFrame(e.stream_id)
            f.error_code = e.error_code
            self._prepare_for_sending([f])
            events = e._events
        except KeyError as e:
            # We don't have a function for handling this frame. Let's call this
            # a PROTOCOL_ERROR and exit.
            raise UnsupportedFrameError("Unexpected frame: %s" % frame)
        else:
            self._prepare_for_sending(frames)

        return events

    def _terminate_connection(self, error_code):
        """
        Terminate the connection early. Used in error handling blocks to send
        GOAWAY frames.
        """
        f = GoAwayFrame(0)
        f.last_stream_id = self.highest_inbound_stream_id
        f.error_code = error_code
        self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY)
        self._prepare_for_sending([f])

    def _receive_headers_frame(self, frame):
        """
        Receive a headers frame on the connection.
        """
        # If necessary, check we can open the stream. Also validate that the
        # stream ID is valid.
        if frame.stream_id not in self.streams:
            max_open_streams = self.local_settings.max_concurrent_streams
            if (self.open_inbound_streams + 1) > max_open_streams:
                raise TooManyStreamsError(
                    "Max outbound streams is %d, %d open" %
                    (max_open_streams, self.open_outbound_streams)
                )

        # Let's decode the headers.
        try:
            headers = self.decoder.decode(frame.data)
        except (HPACKError, IndexError, TypeError, UnicodeDecodeError) as e:
            # We should only need HPACKError here, but versions of HPACK
            # older than 2.1.0 throw all three others as well. For maximum
            # compatibility, catch all of them.
            raise ProtocolError("Error decoding header block: %s" % e)

        headers = validate_headers(headers)
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_HEADERS
        )
        stream = self._get_or_create_stream(
            frame.stream_id, AllowedStreamIDs(not self.client_side)
        )
        frames, stream_events = stream.receive_headers(
            headers,
            'END_STREAM' in frame.flags
        )

        if 'PRIORITY' in frame.flags:
            stream_events.extend(stream.priority_changed_remote(frame))

        return frames, events + stream_events

    def _receive_push_promise_frame(self, frame):
        """
        Receive a push-promise frame on the connection.
        """
        if not self.local_settings.enable_push:
            raise ProtocolError("Received pushed stream")

        pushed_headers = self.decoder.decode(frame.data)

        events = self.state_machine.process_input(
            ConnectionInputs.RECV_PUSH_PROMISE
        )
        stream = self._get_stream_by_id(frame.stream_id)
        frames, stream_events = stream.receive_push_promise_in_band(
            frame.promised_stream_id,
            pushed_headers,
        )

        new_stream = self._begin_new_stream(
            frame.promised_stream_id, AllowedStreamIDs.EVEN
        )
        self.streams[frame.promised_stream_id] = new_stream
        new_stream.remotely_pushed()

        return frames, events + stream_events

    def _receive_data_frame(self, frame):
        """
        Receive a data frame on the connection.
        """
        flow_controlled_length = frame.flow_controlled_length
        window_size = self.remote_flow_control_window(frame.stream_id)
        if flow_controlled_length > window_size:
            raise FlowControlError(
                "Cannot receive %d bytes, flow control window is %d." %
                (
                    flow_controlled_length,
                    window_size
                )
            )

        events = self.state_machine.process_input(
            ConnectionInputs.RECV_DATA
        )
        self.inbound_flow_control_window -= flow_controlled_length
        stream = self._get_stream_by_id(frame.stream_id)
        frames, stream_events = stream.receive_data(
            frame.data,
            'END_STREAM' in frame.flags,
            flow_controlled_length
        )
        return frames, events + stream_events

    def _receive_settings_frame(self, frame):
        """
        Receive a SETTINGS frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_SETTINGS
        )

        # This is an ack of the local settings.
        if 'ACK' in frame.flags:
            changed_settings = self._local_settings_acked()
            ack_event = SettingsAcknowledged()
            ack_event.changed_settings = changed_settings
            events.append(ack_event)
            return [], events

        # Add the new settings.
        self.remote_settings.update(frame.settings)
        events.append(
            RemoteSettingsChanged.from_settings(
                self.remote_settings, frame.settings
            )
        )
        frames = self._acknowledge_settings()

        return frames, events

    def _receive_window_update_frame(self, frame):
        """
        Receive a WINDOW_UPDATE frame on the connection.
        """
        # Validate the frame.
        if not (1 <= frame.window_increment <= self.MAX_WINDOW_INCREMENT):
            raise ProtocolError(
                "Flow control increment must be between 1 and %d, received %d"
                % (self.MAX_WINDOW_INCREMENT, frame.window_increment)
            )

        events = self.state_machine.process_input(
            ConnectionInputs.RECV_WINDOW_UPDATE
        )

        if frame.stream_id:
            stream = self._get_stream_by_id(frame.stream_id)
            frames, stream_events = stream.receive_window_update(
                frame.window_increment
            )
        else:
            # Increment our local flow control window.
            self.outbound_flow_control_window = guard_increment_window(
                self.outbound_flow_control_window,
                frame.window_increment
            )

            # FIXME: Should we split this into one event per active stream?
            window_updated_event = WindowUpdated()
            window_updated_event.stream_id = 0
            window_updated_event.delta = frame.window_increment
            stream_events = [window_updated_event]
            frames = []

        return frames, events + stream_events

    def _receive_ping_frame(self, frame):
        """
        Receive a PING frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_PING
        )
        flags = []

        if 'ACK' in frame.flags:
            evt = PingAcknowledged()
            evt.ping_data = frame.opaque_data
            events.append(evt)
        else:
            f = PingFrame(0)
            f.flags = set(['ACK'])
            f.opaque_data = frame.opaque_data
            flags.append(f)

        return flags, events

    def _receive_rst_stream_frame(self, frame):
        """
        Receive a RST_STREAM frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_RST_STREAM
        )
        try:
            stream = self._get_stream_by_id(frame.stream_id)
        except NoSuchStreamError:
            # The stream is missing. That's ok, we just do nothing here.
            stream_frames = []
            stream_events = []
        else:
            stream_frames, stream_events = stream.stream_reset(frame)

        return stream_frames, events + stream_events

    def _receive_priority_frame(self, frame):
        """
        Receive a PRIORITY frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_PRIORITY
        )
        stream = self._get_or_create_stream(
            frame.stream_id, AllowedStreamIDs.ANY
        )
        stream_events = stream.priority_changed_remote(frame)

        return [], events + stream_events

    def _receive_goaway_frame(self, frame):
        """
        Receive a GOAWAY frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_GOAWAY
        )

        # Clear the outbound data buffer: we cannot send further data now.
        self.clear_outbound_data_buffer()

        # Fire an appropriate ConnectionTerminated event.
        new_event = ConnectionTerminated()
        new_event.error_code = frame.error_code
        new_event.last_stream_id = frame.last_stream_id
        events.append(new_event)

        return [], events

    def _receive_naked_continuation(self, frame):
        """
        A naked CONTINUATION frame has been received. This is always an error,
        but the type of error it is depends on the state of the stream and must
        transition the state of the stream, so we need to pass it to the
        appropriate stream.
        """
        stream = self._get_stream_by_id(frame.stream_id)
        stream.receive_continuation()
        assert False, "Should not be reachable"

    def _local_settings_acked(self):
        """
        Handle the local settings being ACKed, update internal state.
        """
        changes = self.local_settings.acknowledge()

        if INITIAL_WINDOW_SIZE in changes:
            setting = changes[INITIAL_WINDOW_SIZE]
            self._inbound_flow_control_change_from_settings(
                setting.original_value,
                setting.new_value,
            )

        return changes

    def _stream_id_is_outbound(self, stream_id):
        """
        Returns ``True`` if the stream ID corresponds to an outbound stream
        (one initiated by this peer), returns ``False`` otherwise.
        """
        return (stream_id % 2 == int(self.client_side))
Exemple #53
0
class H2Connection(object):
    """
    A low-level HTTP/2 stream object. This handles building and receiving
    frames and maintains per-stream state.

    This wraps a HTTP/2 Stream state machine implementation, ensuring that
    frames can only be sent/received when the stream is in a valid state.
    Attempts to create frames that cannot be sent will raise a
    ``ProtocolError``.
    """
    # The initial maximum outbound frame size. This can be changed by receiving
    # a settings frame.
    DEFAULT_MAX_OUTBOUND_FRAME_SIZE = 65535

    # The initial maximum inbound frame size. This is somewhat arbitrarily
    # chosen.
    DEFAULT_MAX_INBOUND_FRAME_SIZE = 2**24

    def __init__(self, client_side=True):
        self.state_machine = H2ConnectionStateMachine()
        self.streams = {}
        self.highest_stream_id = 0
        self.encoder = Encoder()
        self.decoder = Decoder()
        self.client_side = client_side

        # Objects that store settings, including defaults.
        self.local_settings = Settings(client=client_side)
        self.remote_settings = Settings(client=not client_side)

        # The curent value of the connection flow control windows on the
        # connection.
        self.outbound_flow_control_window = (
            self.remote_settings.initial_window_size
        )
        self.inbound_flow_control_window = (
            self.local_settings.initial_window_size
        )

        # Maximum frame sizes in each direction.
        self.max_outbound_frame_size = self.remote_settings.max_frame_size
        self.max_inbound_frame_size = self.local_settings.max_frame_size

        # Buffer for incoming data.
        self.incoming_buffer = FrameBuffer(server=not client_side)

        # A private variable to store a sequence of received header frames
        # until completion.
        self._header_frames = []

        # Data that needs to be sent.
        self._data_to_send = b''

        # When in doubt use dict-dispatch.
        self._frame_dispatch_table = {
            HeadersFrame: self._receive_headers_frame,
            PushPromiseFrame: self._receive_push_promise_frame,
            SettingsFrame: self._receive_settings_frame,
            DataFrame: self._receive_data_frame,
            WindowUpdateFrame: self._receive_window_update_frame,
            PingFrame: self._receive_ping_frame,
            RstStreamFrame: self._receive_rst_stream_frame,
            PriorityFrame: self._receive_priority_frame,
            GoAwayFrame: self._receive_goaway_frame,
        }

    def _prepare_for_sending(self, frames):
        if not frames:
            return
        self._data_to_send += b''.join(f.serialize() for f in frames)
        assert all(f.body_len <= self.max_outbound_frame_size for f in frames)

    def _open_streams(self, remainder):
        """
        A common method of counting number of open streams. Returns the number
        of streams that are open *and* that have (stream ID % 2) == remainder.
        While it iterates, also deletes any closed streams.
        """
        count = 0
        to_delete = []

        for stream_id, stream in self.streams.items():
            if stream.open and (stream_id % 2 == remainder):
                count += 1
            elif stream.closed:
                to_delete.append(stream_id)

        for stream_id in to_delete:
            del self.streams[stream_id]

        return count

    @property
    def open_outbound_streams(self):
        """
        The current number of open outbound streams.
        """
        outbound_numbers = int(self.client_side)
        return self._open_streams(outbound_numbers)

    @property
    def open_inbound_streams(self):
        """
        The current number of open inbound streams.
        """
        inbound_numbers = int(not self.client_side)
        return self._open_streams(inbound_numbers)

    def begin_new_stream(self, stream_id):
        """
        Initiate a new stream.
        """
        if stream_id <= self.highest_stream_id:
            raise StreamIDTooLowError(
                "Stream ID must be larger than %s", self.highest_stream_id
            )

        s = H2Stream(stream_id)
        s.max_inbound_frame_size = self.max_inbound_frame_size
        s.max_outbound_frame_size = self.max_outbound_frame_size
        s.outbound_flow_control_window = (
            self.remote_settings.initial_window_size
        )
        s.inbound_flow_control_window = self.local_settings.initial_window_size

        self.streams[stream_id] = s
        self.highest_stream_id = stream_id
        return s

    def initiate_connection(self):
        """
        Provides any data that needs to be sent at the start of the connection.
        Must be called for both clients and servers.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
        if self.client_side:
            preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
        else:
            preamble = b''

        f = SettingsFrame(0)
        for setting, value in self.local_settings.items():
            f.settings[setting] = value

        self._data_to_send += preamble + f.serialize()
        return []

    def get_or_create_stream(self, stream_id):
        """
        Gets a stream by its stream ID. Will create one if one does not already
        exist.
        """
        try:
            return self.streams[stream_id]
        except KeyError:
            return self.begin_new_stream(stream_id)

    def get_stream_by_id(self, stream_id):
        """
        Gets a stream by its stream ID. Raises NoSuchStreamError if the stream
        ID does not correspond to a known stream and is higher than the current
        maximum: raises if it is lower than the current maximum.
        """
        try:
            return self.streams[stream_id]
        except KeyError:
            if stream_id > self.highest_stream_id:
                raise NoSuchStreamError(stream_id)
            else:
                raise StreamClosedError(stream_id)

    def send_headers(self, stream_id, headers, end_stream=False):
        """
        Send headers on a given stream.
        """
        # Check we can open the stream.
        if stream_id not in self.streams:
            max_open_streams = self.remote_settings.max_concurrent_streams
            if (self.open_outbound_streams + 1) > max_open_streams:
                raise TooManyStreamsError(
                    "Max outbound streams is %d, %d open" %
                    (max_open_streams, self.open_outbound_streams)
                )

        self.state_machine.process_input(ConnectionInputs.SEND_HEADERS)
        stream = self.get_or_create_stream(stream_id)
        frames, events = stream.send_headers(
            headers, self.encoder, end_stream
        )
        self._prepare_for_sending(frames)
        return events

    def send_data(self, stream_id, data, end_stream=False):
        """
        Send data on a given stream.
        """
        if len(data) > self.local_flow_control_window(stream_id):
            raise FlowControlError(
                "Cannot send %d bytes, flow control window is %d." %
                (len(data), self.local_flow_control_window(stream_id))
            )
        elif len(data) > self.max_outbound_frame_size:
            raise FrameTooLargeError(
                "Cannot send frame size %d, max frame size is %d" %
                (len(data), self.max_outbound_frame_size)
            )

        self.state_machine.process_input(ConnectionInputs.SEND_DATA)
        frames, events = self.streams[stream_id].send_data(data, end_stream)
        self._prepare_for_sending(frames)

        self.outbound_flow_control_window -= len(data)
        assert self.outbound_flow_control_window >= 0

        return events

    def end_stream(self, stream_id):
        """
        End a given stream.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_DATA)
        frames, events = self.streams[stream_id].end_stream()
        self._prepare_for_sending(frames)
        return events

    def increment_flow_control_window(self, increment, stream_id=None):
        """
        Increment a flow control window, optionally for a single stream.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_WINDOW_UPDATE)

        if stream_id is not None:
            stream = self.streams[stream_id]
            frames, events = stream.increase_flow_control_window(
                increment
            )
            stream.inbound_flow_control_window += increment
        else:
            f = WindowUpdateFrame(0)
            f.window_increment = increment
            self.inbound_flow_control_window += increment
            frames = [f]
            events = []

        self._prepare_for_sending(frames)
        return events

    def push_stream(self, stream_id, promised_stream_id, request_headers):
        """
        Send a push promise.
        """
        if not self.remote_settings.enable_push:
            raise ProtocolError("Remote peer has disabled stream push")

        self.state_machine.process_input(ConnectionInputs.SEND_PUSH_PROMISE)
        stream = self.get_stream_by_id(stream_id)

        new_stream = self.begin_new_stream(promised_stream_id)
        self.streams[promised_stream_id] = new_stream

        frames, events = stream.push_stream_in_band(
            promised_stream_id, request_headers, self.encoder
        )
        new_frames, new_events = new_stream.locally_pushed()
        self._prepare_for_sending(frames + new_frames)
        return events + new_events

    def ping(self, opaque_data):
        """
        Send a PING frame.

        :param opaque_data: A bytestring of length 8 that will be sent in the
                            PING frame.
        :returns: A list of events.
        """
        if not isinstance(opaque_data, bytes) or len(opaque_data) != 8:
            raise ValueError("Invalid value for ping data: %r" % opaque_data)

        self.state_machine.process_input(ConnectionInputs.SEND_PING)
        f = PingFrame(0)
        f.opaque_data = opaque_data
        self._prepare_for_sending([f])

        return []

    def reset_stream(self, stream_id, error_code=0):
        """
        Reset a stream frame.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_RST_STREAM)
        stream = self.get_stream_by_id(stream_id)
        frames, events = stream.reset_stream(error_code)

        self._prepare_for_sending(frames)
        return events

    def close_connection(self, error_code=0):
        """
        Close a connection, emitting a GOAWAY frame.
        """
        self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY)

        f = GoAwayFrame(0)
        f.error_code = error_code
        f.last_stream_id = self.highest_stream_id
        self._prepare_for_sending([f])

        return []

    def acknowledge_settings(self, event):
        """
        Acknowledge settings that have been received.

        :param event: The RemoteSettingsChanged event that is being
                      acknowledged.
        :returns: A list of events.
        """
        assert isinstance(event, RemoteSettingsChanged)
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)

        changes = self.remote_settings.acknowledge()

        if SettingsFrame.INITIAL_WINDOW_SIZE in changes:
            setting = changes[SettingsFrame.INITIAL_WINDOW_SIZE]
            self._flow_control_change_from_settings(
                setting.original_value,
                setting.new_value,
            )

        # HEADER_TABLE_SIZE changes by the remote part affect our encoder: cf.
        # RFC 7540 Section 6.5.2.
        if SettingsFrame.HEADER_TABLE_SIZE in changes:
            setting = changes[SettingsFrame.HEADER_TABLE_SIZE]
            self.encoder.header_table_size = setting.new_value

        if SettingsFrame.SETTINGS_MAX_FRAME_SIZE in changes:
            setting = changes[SettingsFrame.SETTINGS_MAX_FRAME_SIZE]
            self.max_outbound_frame_size = setting.new_value
            for stream in self.streams.values():
                stream.max_outbound_frame_size = setting.new_value

        f = SettingsFrame(0)
        f.flags.add('ACK')
        self._prepare_for_sending([f])
        return []

    def update_settings(self, new_settings):
        """
        Update the local settings. This will prepare and emit the appropriate
        SETTINGS frame.

        :param new_settings: A dictionary of {setting: new value}
        """
        self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
        self.local_settings.update(new_settings)
        s = SettingsFrame(0)
        s.settings = new_settings
        self._prepare_for_sending([s])
        return []

    def local_flow_control_window(self, stream_id):
        """
        Returns the maximum amount of data that can be sent on stream
        ``stream_id``.

        This value will never be larger than the total data that can be sent on
        the connection: even if the given stream allows more data, the
        connection window provides a logical maximum to the amount of data that
        can be sent.

        The maximum data that can be sent in a single data frame on a stream
        is either this value, or the maximum frame size, whichever is
        *smaller*.
        """
        stream = self.get_stream_by_id(stream_id)
        return min(
            self.outbound_flow_control_window,
            stream.outbound_flow_control_window
        )

    def remote_flow_control_window(self, stream_id):
        """
        Returns the maximum amount of data the remote peer can send on stream
        ``stream_id``.

        This value will never be larger than the total data that can be sent on
        the connection: even if the given stream allows more data, the
        connection window provides a logical maximum to the amount of data that
        can be sent.

        The maximum data that can be sent in a single data frame on a stream
        is either this value, or the maximum frame size, whichever is
        *smaller*.
        """
        stream = self.get_stream_by_id(stream_id)
        return min(
            self.inbound_flow_control_window,
            stream.inbound_flow_control_window
        )

    def data_to_send(self, amt=None):
        """
        Returns some data for sending out of the internal data buffer.

        This method is analagous to 'read' on a file-like object, but it
        doesn't block. Instead, it returns as much data as the user asks for,
        or less if that much data is not available. It does not perform any
        I/O, and so uses a different name.
        """
        if amt is None:
            data = self._data_to_send
            self._data_to_send = b''
            return data
        else:
            data = self._data_to_send[:amt]
            self._data_to_send = self._data_to_send[amt:]
            return data

    def clear_outbound_data_buffer(self):
        """
        Clears the outbound data buffer, such that if this call was immediately
        followed by a call to
        :meth:`data_to_send <h2.connection.H2Connection.data_to_send>`, that
        call would return no data.

        This method should not normally be used, but is made available to avoid
        exposing implementation details.
        """
        self._data_to_send = b''

    def _flow_control_change_from_settings(self, old_value, new_value):
        """
        Update flow control windows in response to a change in the value of
        SETTINGS_INITIAL_WINDOW_SIZE.

        When this setting is changed, it automatically updates all flow control
        windows by the delta in the settings values.
        """
        delta = new_value - old_value
        self.outbound_flow_control_window += delta

        for stream in self.streams.values():
            stream.outbound_flow_control_window += delta

        return

    def _inbound_flow_control_change_from_settings(self, old_value, new_value):
        """
        Update remote flow control windows in response to a change in the value
        of SETTINGS_INITIAL_WINDOW_SIZE.

        When this setting is changed, it automatically updates all remote flow
        control windows by the delta in the settings values.
        """
        delta = new_value - old_value
        self.inbound_flow_control_window += delta

        for stream in self.streams.values():
            stream.inbound_flow_control_window += delta

        return

    def receive_data(self, data):
        """
        Pass some received HTTP/2 data to the connection for handling.
        """
        events = []
        self.incoming_buffer.add_data(data)

        for frame in self.incoming_buffer:
            events.extend(self.receive_frame(frame))

        return events

    def receive_frame(self, frame):
        """
        Handle a frame received on the connection.
        """
        try:
            if frame.body_len > self.max_inbound_frame_size:
                raise ProtocolError(
                    "Received overlong frame: length %d, max %d" %
                    (frame.body_len, self.max_inbound_frame_size)
                )

            # I don't love using __class__ here, maybe reconsider it.
            frames, events = self._frame_dispatch_table[frame.__class__](frame)
        except ProtocolError as e:
            # For whatever reason, receiving the frame caused a protocol error.
            # We should prepare to emit a GoAway frame before throwing the
            # exception up further. No need for an event: the exception will
            # do fine.
            f = GoAwayFrame(0)
            f.last_stream_id = sorted(self.streams.keys())[-1]
            f.error_code = e.error_code
            self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY)
            self._prepare_for_sending([f])
            raise
        except StreamClosedError as e:
            # We need to send a RST_STREAM frame on behalf of the stream.
            # The frame the stream wants to emit is already present in the
            # exception.
            # This does not require re-raising: it's an expected behaviour.
            f = RstStreamFrame(e.stream_id)
            f.error_code = e.error_code
            self._prepare_for_sending([f])
            events = []
        else:
            self._prepare_for_sending(frames)

        return events

    def _receive_headers_frame(self, frame):
        """
        Receive a headers frame on the connection.
        """
        # If necessary, check we can open the stream.
        if frame.stream_id not in self.streams:
            max_open_streams = self.local_settings.max_concurrent_streams
            if (self.open_inbound_streams + 1) > max_open_streams:
                raise TooManyStreamsError(
                    "Max outbound streams is %d, %d open" %
                    (max_open_streams, self.open_outbound_streams)
                )

        # Let's decode the headers.
        headers = self.decoder.decode(frame.data)
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_HEADERS
        )
        stream = self.get_or_create_stream(frame.stream_id)
        frames, stream_events = stream.receive_headers(
            headers,
            'END_STREAM' in frame.flags
        )
        return frames, events + stream_events

    def _receive_push_promise_frame(self, frame):
        """
        Receive a push-promise frame on the connection.
        """
        if not self.local_settings.enable_push:
            raise ProtocolError("Received pushed stream")

        pushed_headers = self.decoder.decode(frame.data)

        events = self.state_machine.process_input(
            ConnectionInputs.RECV_PUSH_PROMISE
        )
        stream = self.get_stream_by_id(frame.stream_id)
        frames, stream_events = stream.receive_push_promise_in_band(
            frame.promised_stream_id,
            pushed_headers,
        )

        new_stream = self.begin_new_stream(frame.promised_stream_id)
        self.streams[frame.promised_stream_id] = new_stream
        new_stream.remotely_pushed()

        return frames, events + stream_events

    def _receive_data_frame(self, frame):
        """
        Receive a data frame on the connection.
        """
        if frame.body_len > self.remote_flow_control_window(frame.stream_id):
            raise FlowControlError(
                "Cannot receive %d bytes, flow control window is %d." %
                (
                    frame.body_len,
                    self.remote_flow_control_window(frame.stream_id)
                )
            )

        events = self.state_machine.process_input(
            ConnectionInputs.RECV_DATA
        )
        self.inbound_flow_control_window -= frame.body_len
        stream = self.get_stream_by_id(frame.stream_id)
        frames, stream_events = stream.receive_data(
            frame.data,
            'END_STREAM' in frame.flags,
            frame.body_len
        )
        return frames, events + stream_events

    def _receive_settings_frame(self, frame):
        """
        Receive a SETTINGS frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_SETTINGS
        )

        # This is an ack of the local settings.
        if 'ACK' in frame.flags:
            changed_settings = self._local_settings_acked()
            ack_event = SettingsAcknowledged()
            ack_event.changed_settings = changed_settings
            events.append(ack_event)
            return [], events

        # Add the new settings.
        self.remote_settings.update(frame.settings)

        events.append(RemoteSettingsChanged.from_settings(
            self.remote_settings, frame.settings
        ))
        return [], events

    def _receive_window_update_frame(self, frame):
        """
        Receive a WINDOW_UPDATE frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_WINDOW_UPDATE
        )

        if frame.stream_id:
            stream = self.get_stream_by_id(frame.stream_id)
            frames, stream_events = stream.receive_window_update(
                frame.window_increment
            )
        else:
            # Increment our local flow control window.
            self.outbound_flow_control_window += frame.window_increment

            # FIXME: Should we split this into one event per active stream?
            window_updated_event = WindowUpdated()
            window_updated_event.stream_id = 0
            window_updated_event.delta = frame.window_increment
            stream_events = [window_updated_event]
            frames = []

        return frames, events + stream_events

    def _receive_ping_frame(self, frame):
        """
        Receive a PING frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_PING
        )
        flags = []

        if 'ACK' in frame.flags:
            evt = PingAcknowledged()
            evt.ping_data = frame.opaque_data
            events.append(evt)
        else:
            f = PingFrame(0)
            f.flags = set(['ACK'])
            f.opaque_data = frame.opaque_data
            flags.append(f)

        return flags, events

    def _receive_rst_stream_frame(self, frame):
        """
        Receive a RST_STREAM frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_RST_STREAM
        )
        try:
            stream = self.get_stream_by_id(frame.stream_id)
        except NoSuchStreamError:
            # The stream is missing. That's ok, we just do nothing here.
            stream_frames = []
            stream_events = []
        else:
            stream_frames, stream_events = stream.stream_reset(frame)

        return stream_frames, events + stream_events

    def _receive_priority_frame(self, frame):
        """
        Receive a PRIORITY frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_PRIORITY
        )
        stream = self.get_or_create_stream(frame.stream_id)
        stream_events = stream.priority_changed_remote(frame)

        return [], events + stream_events

    def _receive_goaway_frame(self, frame):
        """
        Receive a GOAWAY frame on the connection.
        """
        events = self.state_machine.process_input(
            ConnectionInputs.RECV_GOAWAY
        )

        # Clear the outbound data buffer: we cannot send further data now.
        self.clear_outbound_data_buffer()

        # Fire an appropriate ConnectionTerminated event.
        new_event = ConnectionTerminated()
        new_event.error_code = frame.error_code
        new_event.last_stream_id = frame.last_stream_id
        events.append(new_event)

        return [], events

    def _local_settings_acked(self):
        """
        Handle the local settings being ACKed, update internal state.
        """
        changes = self.local_settings.acknowledge()

        if SettingsFrame.INITIAL_WINDOW_SIZE in changes:
            setting = changes[SettingsFrame.INITIAL_WINDOW_SIZE]
            self._inbound_flow_control_change_from_settings(
                setting.original_value,
                setting.new_value,
            )

        return changes
Exemple #54
0
class HTTP2Protocol(object):

    ERROR_CODES = utils.BiDi(
        NO_ERROR=0x0,
        PROTOCOL_ERROR=0x1,
        INTERNAL_ERROR=0x2,
        FLOW_CONTROL_ERROR=0x3,
        SETTINGS_TIMEOUT=0x4,
        STREAM_CLOSED=0x5,
        FRAME_SIZE_ERROR=0x6,
        REFUSED_STREAM=0x7,
        CANCEL=0x8,
        COMPRESSION_ERROR=0x9,
        CONNECT_ERROR=0xa,
        ENHANCE_YOUR_CALM=0xb,
        INADEQUATE_SECURITY=0xc,
        HTTP_1_1_REQUIRED=0xd
    )

    # "PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n"
    CLIENT_CONNECTION_PREFACE =\
        '505249202a20485454502f322e300d0a0d0a534d0d0a0d0a'.decode('hex')

    ALPN_PROTO_H2 = 'h2'

    def __init__(self, tcp_handler, is_server=False, dump_frames=False):
        self.tcp_handler = tcp_handler
        self.is_server = is_server

        self.http2_settings = frame.HTTP2_DEFAULT_SETTINGS.copy()
        self.current_stream_id = None
        self.encoder = Encoder()
        self.decoder = Decoder()
        self.connection_preface_performed = False
        self.dump_frames = dump_frames

    def check_alpn(self):
        alp = self.tcp_handler.get_alpn_proto_negotiated()
        if alp != self.ALPN_PROTO_H2:
            raise NotImplementedError(
                "HTTP2Protocol can not handle unknown ALP: %s" % alp)
        return True

    def _receive_settings(self, hide=False):
        while True:
            frm = self.read_frame(hide)
            if isinstance(frm, frame.SettingsFrame):
                break

    def _read_settings_ack(self, hide=False):  # pragma no cover
        while True:
            frm = self.read_frame(hide)
            if isinstance(frm, frame.SettingsFrame):
                assert frm.flags & frame.Frame.FLAG_ACK
                assert len(frm.settings) == 0
                break

    def perform_server_connection_preface(self, force=False):
        if force or not self.connection_preface_performed:
            self.connection_preface_performed = True

            magic_length = len(self.CLIENT_CONNECTION_PREFACE)
            magic = self.tcp_handler.rfile.safe_read(magic_length)
            assert magic == self.CLIENT_CONNECTION_PREFACE

            self.send_frame(frame.SettingsFrame(state=self), hide=True)
            self._receive_settings(hide=True)

    def perform_client_connection_preface(self, force=False):
        if force or not self.connection_preface_performed:
            self.connection_preface_performed = True

            self.tcp_handler.wfile.write(self.CLIENT_CONNECTION_PREFACE)

            self.send_frame(frame.SettingsFrame(state=self), hide=True)
            self._receive_settings(hide=True)

    def next_stream_id(self):
        if self.current_stream_id is None:
            if self.is_server:
                # servers must use even stream ids
                self.current_stream_id = 2
            else:
                # clients must use odd stream ids
                self.current_stream_id = 1
        else:
            self.current_stream_id += 2
        return self.current_stream_id

    def send_frame(self, frm, hide=False):
        raw_bytes = frm.to_bytes()
        self.tcp_handler.wfile.write(raw_bytes)
        self.tcp_handler.wfile.flush()
        if not hide and self.dump_frames:  # pragma no cover
            print(frm.human_readable(">>"))

    def read_frame(self, hide=False):
        frm = frame.Frame.from_file(self.tcp_handler.rfile, self)
        if not hide and self.dump_frames:  # pragma no cover
            print(frm.human_readable("<<"))
        if isinstance(frm, frame.SettingsFrame) and not frm.flags & frame.Frame.FLAG_ACK:
            self._apply_settings(frm.settings, hide)

        return frm

    def _apply_settings(self, settings, hide=False):
        for setting, value in settings.items():
            old_value = self.http2_settings[setting]
            if not old_value:
                old_value = '-'
            self.http2_settings[setting] = value

        frm = frame.SettingsFrame(
            state=self,
            flags=frame.Frame.FLAG_ACK)
        self.send_frame(frm, hide)

        # be liberal in what we expect from the other end
        # to be more strict use: self._read_settings_ack(hide)

    def _create_headers(self, headers, stream_id, end_stream=True):
        # TODO: implement max frame size checks and sending in chunks

        flags = frame.Frame.FLAG_END_HEADERS
        if end_stream:
            flags |= frame.Frame.FLAG_END_STREAM

        header_block_fragment = self.encoder.encode(headers)

        frm = frame.HeadersFrame(
            state=self,
            flags=flags,
            stream_id=stream_id,
            header_block_fragment=header_block_fragment)

        if self.dump_frames:  # pragma no cover
            print(frm.human_readable(">>"))

        return [frm.to_bytes()]

    def _create_body(self, body, stream_id):
        if body is None or len(body) == 0:
            return b''

        # TODO: implement max frame size checks and sending in chunks
        # TODO: implement flow-control window

        frm = frame.DataFrame(
            state=self,
            flags=frame.Frame.FLAG_END_STREAM,
            stream_id=stream_id,
            payload=body)

        if self.dump_frames:  # pragma no cover
            print(frm.human_readable(">>"))

        return [frm.to_bytes()]


    def create_request(self, method, path, headers=None, body=None):
        if headers is None:
            headers = []

        authority = self.tcp_handler.sni if self.tcp_handler.sni else self.tcp_handler.address.host
        if self.tcp_handler.address.port != 443:
            authority += ":%d" % self.tcp_handler.address.port

        headers = [
            (b':method', bytes(method)),
            (b':path', bytes(path)),
            (b':scheme', b'https'),
            (b':authority', authority),
        ] + headers

        stream_id = self.next_stream_id()

        return list(itertools.chain(
            self._create_headers(headers, stream_id, end_stream=(body is None)),
            self._create_body(body, stream_id)))

    def read_response(self):
        stream_id_, headers, body = self._receive_transmission()
        return headers[':status'], headers, body

    def read_request(self):
        return self._receive_transmission()

    def _receive_transmission(self):
        body_expected = True

        stream_id = 0
        header_block_fragment = b''
        body = b''

        while True:
            frm = self.read_frame()
            if isinstance(frm, frame.HeadersFrame)\
                    or isinstance(frm, frame.ContinuationFrame):
                stream_id = frm.stream_id
                header_block_fragment += frm.header_block_fragment
                if frm.flags & frame.Frame.FLAG_END_STREAM:
                    body_expected = False
                if frm.flags & frame.Frame.FLAG_END_HEADERS:
                    break

        while body_expected:
            frm = self.read_frame()
            if isinstance(frm, frame.DataFrame):
                body += frm.payload
                if frm.flags & frame.Frame.FLAG_END_STREAM:
                    break
            # TODO: implement window update & flow

        headers = {}
        for header, value in self.decoder.decode(header_block_fragment):
            headers[header] = value

        return stream_id, headers, body

    def create_response(self, code, stream_id=None, headers=None, body=None):
        if headers is None:
            headers = []

        headers = [(b':status', bytes(str(code)))] + headers

        if not stream_id:
            stream_id = self.next_stream_id()

        return list(itertools.chain(
            self._create_headers(headers, stream_id, end_stream=(body is None)),
            self._create_body(body, stream_id),
        ))
Exemple #55
0
    def test_request_examples_without_huffman(self):
        """
        This section shows several consecutive header sets, corresponding to
        HTTP requests, on the same connection.
        """
        d = Decoder()
        first_header_set = [
            (
                ':method',
                'GET',
            ),
            (
                ':scheme',
                'http',
            ),
            (
                ':path',
                '/',
            ),
            (':authority', 'www.example.com'),
        ]
        # The first_header_table doesn't contain 'authority'
        first_data = b'\x82\x86\x84\x01\x0fwww.example.com'

        assert d.decode(first_data) == first_header_set
        assert list(d.header_table.dynamic_entries) == []

        # This request takes advantage of the differential encoding of header
        # sets.
        second_header_set = [
            (
                ':method',
                'GET',
            ),
            (
                ':scheme',
                'http',
            ),
            (
                ':path',
                '/',
            ),
            (
                ':authority',
                'www.example.com',
            ),
            ('cache-control', 'no-cache'),
        ]
        second_data = (
            b'\x82\x86\x84\x01\x0fwww.example.com\x0f\t\x08no-cache')

        assert d.decode(second_data) == second_header_set
        assert list(d.header_table.dynamic_entries) == []

        third_header_set = [
            (
                ':method',
                'GET',
            ),
            (
                ':scheme',
                'https',
            ),
            (
                ':path',
                '/index.html',
            ),
            (
                ':authority',
                'www.example.com',
            ),
            ('custom-key', 'custom-value'),
        ]
        third_data = (
            b'\x82\x87\x85\x01\x0fwww.example.com@\ncustom-key\x0ccustom-value'
        )

        assert d.decode(third_data) == third_header_set
        # Don't check the header table here, it's just too complex to be
        # reliable. Check its length though.
        assert len(d.header_table.dynamic_entries) == 1
Exemple #56
0
    def test_request_examples_with_huffman(self):
        """
        This section shows the same examples as the previous section, but
        using Huffman encoding for the literal values.
        """
        d = Decoder()

        first_header_set = [
            (
                ':method',
                'GET',
            ),
            (
                ':scheme',
                'http',
            ),
            (
                ':path',
                '/',
            ),
            (':authority', 'www.example.com'),
        ]
        first_data = (
            b'\x82\x86\x84\x01\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff')

        assert d.decode(first_data) == first_header_set
        assert list(d.header_table.dynamic_entries) == []

        second_header_set = [
            (
                ':method',
                'GET',
            ),
            (
                ':scheme',
                'http',
            ),
            (
                ':path',
                '/',
            ),
            (
                ':authority',
                'www.example.com',
            ),
            ('cache-control', 'no-cache'),
        ]
        second_data = (
            b'\x82\x86\x84\x01\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff'
            b'\x0f\t\x86\xa8\xeb\x10d\x9c\xbf')

        assert d.decode(second_data) == second_header_set
        assert list(d.header_table.dynamic_entries) == []

        third_header_set = [
            (
                ':method',
                'GET',
            ),
            (
                ':scheme',
                'https',
            ),
            (
                ':path',
                '/index.html',
            ),
            (
                ':authority',
                'www.example.com',
            ),
            ('custom-key', 'custom-value'),
        ]
        third_data = (
            b'\x82\x87\x85\x01\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff@'
            b'\x88%\xa8I\xe9[\xa9}\x7f\x89%\xa8I\xe9[\xb8\xe8\xb4\xbf')

        assert d.decode(third_data) == third_header_set
        assert len(d.header_table.dynamic_entries) == 1