def test_qos_2_queues_message(self): """ The WAMP layer calling send_publish will queue a message up for sending, and send it next time it has a chance. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = (Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise()) for x in iterbytes(data): p.dataReceived(x) # Connect has happened events = cp.data_received(t.value()) t.clear() self.assertFalse(t.disconnecting) self.assertIsInstance(events[0], ConnACK) # WAMP layer calls send_publish, with QoS 2 p.send_publish(u"hello", 2, b'some bytes', False) # Nothing should have been sent yet, it is queued self.assertEqual(t.value(), b'') # Advance the clock r.advance(0.1) # We should now get the sent Publish expected_publish = Publish(duplicate=False, qos_level=2, retain=False, packet_identifier=1, topic_name=u"hello", payload=b"some bytes") events = cp.data_received(t.value()) t.clear() self.assertEqual(len(events), 1) self.assertEqual(events[0], expected_publish) # We send the PubREC, which we should get a PubREL back with pubrec = PubREC(packet_identifier=1) for x in iterbytes(pubrec.serialise()): p.dataReceived(x) events = cp.data_received(t.value()) t.clear() self.assertEqual(len(events), 1) self.assertEqual(events[0], PubREL(packet_identifier=1)) # We send the PubCOMP, which has no response pubcomp = PubCOMP(packet_identifier=1) for x in iterbytes(pubcomp.serialise()): p.dataReceived(x) self.assertFalse(t.disconnecting)
def test_qos_2_queues_message(self): """ The WAMP layer calling send_publish will queue a message up for sending, and send it next time it has a chance. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() ) for x in iterbytes(data): p.dataReceived(x) # Connect has happened events = cp.data_received(t.value()) t.clear() self.assertFalse(t.disconnecting) self.assertIsInstance(events[0], ConnACK) # WAMP layer calls send_publish, with QoS 2 p.send_publish(u"hello", 2, b'some bytes', False) # Nothing should have been sent yet, it is queued self.assertEqual(t.value(), b'') # Advance the clock r.advance(0.1) # We should now get the sent Publish expected_publish = Publish(duplicate=False, qos_level=2, retain=False, packet_identifier=1, topic_name=u"hello", payload=b"some bytes") events = cp.data_received(t.value()) t.clear() self.assertEqual(len(events), 1) self.assertEqual(events[0], expected_publish) # We send the PubREC, which we should get a PubREL back with pubrec = PubREC(packet_identifier=1) for x in iterbytes(pubrec.serialise()): p.dataReceived(x) events = cp.data_received(t.value()) t.clear() self.assertEqual(len(events), 1) self.assertEqual(events[0], PubREL(packet_identifier=1)) # We send the PubCOMP, which has no response pubcomp = PubCOMP(packet_identifier=1) for x in iterbytes(pubcomp.serialise()): p.dataReceived(x) self.assertFalse(t.disconnecting)
def test_keepalive(self): """ If a client connects with a timeout, and sends no data in keep_alive * 1.5, they will be disconnected. Compliance statement MQTT-3.1.2-24 """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( # CONNECT, with keepalive of 2 b"101300044d51545404020002000774657374313233" ) for x in iterbytes(unhexlify(data)): p.dataReceived(x) self.assertEqual(len(r.calls), 1) self.assertEqual(r.calls[0].func, p._lose_connection) self.assertEqual(r.calls[0].getTime(), 3.0) self.assertFalse(t.disconnecting) r.advance(2.9) self.assertFalse(t.disconnecting) r.advance(0.1) self.assertTrue(t.disconnecting)
def test_non_allowed_qos_not_queued(self): """ A non-QoS 0, 1, or 2 message will be rejected by the publish layer. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() ) for x in iterbytes(data): p.dataReceived(x) # Connect has happened events = cp.data_received(t.value()) t.clear() self.assertFalse(t.disconnecting) self.assertIsInstance(events[0], ConnACK) # WAMP layer calls send_publish w/ invalid QoS with self.assertRaises(ValueError): p.send_publish(u"hello", 5, b'some bytes', False) # Nothing will be sent self.assertEqual(t.value(), b'') # Advance the clock r.advance(0.1) # Still nothing self.assertEqual(t.value(), b'')
def test_got_sent_packet(self): """ `process_connect` on the handler will get the correct Connect packet. """ got_packets = [] class SubHandler(BasicHandler): def process_connect(self_, event): got_packets.append(event) return succeed((0, False)) h = SubHandler() r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() ) for x in iterbytes(data): p.dataReceived(x) self.assertEqual(len(got_packets), 1) self.assertEqual(got_packets[0].client_id, u"test123") self.assertEqual(got_packets[0].serialise(), data)
def test_lose_conn_on_unimplemented_packet(self): """ If we get a valid, but unimplemented for that role packet (e.g. SubACK, which we will only ever send, and getting it is a protocol violation), we will drop the connection. Compliance statement: MQTT-4.8.0-1 """ # This shouldn't normally happen, but just in case. from crossbar.bridge.mqtt import protocol protocol.server_packet_handlers[protocol.P_SUBACK] = SubACK self.addCleanup( lambda: protocol.server_packet_handlers.pop(protocol.P_SUBACK)) h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=False)).serialise() + SubACK(1, [1]).serialise() ) with LogCapturer("trace") as logs: for x in iterbytes(data): p.dataReceived(x) sent_logs = logs.get_category("MQ402") self.assertEqual(len(sent_logs), 1) self.assertEqual(sent_logs[0]["log_level"], LogLevel.error) self.assertEqual(sent_logs[0]["packet_id"], "SubACK") self.assertTrue(t.disconnecting)
def test_non_allowed_qos_not_queued(self): """ A non-QoS 0, 1, or 2 message will be rejected by the publish layer. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = (Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise()) for x in iterbytes(data): p.dataReceived(x) # Connect has happened events = cp.data_received(t.value()) t.clear() self.assertFalse(t.disconnecting) self.assertIsInstance(events[0], ConnACK) # WAMP layer calls send_publish w/ invalid QoS with self.assertRaises(ValueError): p.send_publish(u"hello", 5, b'some bytes', False) # Nothing will be sent self.assertEqual(t.value(), b'') # Advance the clock r.advance(0.1) # Still nothing self.assertEqual(t.value(), b'')
def test_connect_not_first(self): """ Sending a packet that is not a CONNECT as the first packet is a protocol violation. Conformance Statement MQTT-3.1.0-1 """ events = [] p = MQTTParser() data = ( # SUBSCRIBE b"820d00010008746573742f31323300") for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) # Reserved packet self._assert_event(events.pop(0), Failure, {'reason': "Connect packet was not first"}) # We want to have consumed all the events self.assertEqual(len(events), 0) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_reserved_packet_0(self): """ Using the reserved packet 0 is a protocol violation. No conformance statement, but see "Table 2.1 - Control packet types". """ events = [] p = MQTTParser() data = ( # CONNECT b"101300044d51545404020002000774657374313233" # Reserved packet #15 b"001300044d51545404020002000774657374313233" ) for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 2) # Regular connect, we don't care about it self.assertIsInstance(events.pop(0), Connect) # Reserved packet self._assert_event(events.pop(0), Failure, { 'reason': "Unimplemented packet type 0" }) # We want to have consumed all the events self.assertEqual(len(events), 0) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_lose_conn_on_unimplemented_packet(self): """ If we get a valid, but unimplemented for that role packet (e.g. SubACK, which we will only ever send, and getting it is a protocol violation), we will drop the connection. Compliance statement: MQTT-4.8.0-1 """ # This shouldn't normally happen, but just in case. from crossbar.bridge.mqtt import protocol protocol.server_packet_handlers[protocol.P_SUBACK] = SubACK self.addCleanup( lambda: protocol.server_packet_handlers.pop(protocol.P_SUBACK)) h = BasicHandler() r, t, p, cp = make_test_items(h) data = (Connect(client_id=u"test123", flags=ConnectFlags(clean_session=False)).serialise() + SubACK(1, [1]).serialise()) with LogCapturer("trace") as logs: for x in iterbytes(data): p.dataReceived(x) sent_logs = logs.get_category("MQ402") self.assertEqual(len(sent_logs), 1) self.assertEqual(sent_logs[0]["log_level"], LogLevel.error) self.assertEqual(sent_logs[0]["packet_id"], "SubACK") self.assertTrue(t.disconnecting)
def test_exception_in_connect_drops_connection(self): """ Transient failures (like an exception from handler.process_connect) will cause the connection it happened on to be dropped. Compliance statement MQTT-4.8.0-2 """ class SubHandler(BasicHandler): def process_unsubscribe(self, event): raise Exception("boom!") h = SubHandler() r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() + Unsubscribe(packet_identifier=1234, topics=[u"foo"]).serialise()) with LogCapturer("trace") as logs: for x in iterbytes(data): p.dataReceived(x) sent_logs = logs.get_category("MQ502") self.assertEqual(len(sent_logs), 1) self.assertEqual(sent_logs[0]["log_level"], LogLevel.critical) self.assertEqual(sent_logs[0]["log_failure"].value.args[0], "boom!") events = cp.data_received(t.value()) self.assertEqual(len(events), 1) self.assertTrue(t.disconnecting) # We got the error, we need to flush it so it doesn't make the test # error self.flushLoggedErrors()
def test_too_large_header(self): """ The reserved section in the CONNECT packet must not be used. Conformance Statement MQTT-3.1.2-3 """ events = [] p = MQTTParser() data = ( # CONNECT using the second nibble, plus junk we should never read b"10ffffffff000000000000000000" ) for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) # Reserved packet self._assert_event(events.pop(0), Failure, { 'reason': "Too big packet size" }) # We want to have consumed all the events self.assertEqual(len(events), 0) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_connect_reserved_area(self): """ The reserved section in the CONNECT packet must not be used. Conformance Statement MQTT-3.1.2-3 """ events = [] p = MQTTParser() data = ( # CONNECT using the second nibble b"111300044d51545404020002000774657374313233" ) for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) # Reserved packet self._assert_event(events.pop(0), Failure, { 'reason': "Bad flags in Connect" }) # We want to have consumed all the events self.assertEqual(len(events), 0) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_multiple_connects(self): """ Sending multiple CONNECT packets is a protocol violation. Conformance Statement MQTT-3.1.0-2 """ events = [] p = MQTTParser() data = ( # CONNECT b"101300044d51545404020002000774657374313233" # CONNECT b"101300044d51545404020002000774657374313233" ) for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 2) # First, successful connect self.assertIsInstance(events.pop(0), Connect) # Reserved packet self._assert_event(events.pop(0), Failure, { 'reason': "Multiple Connect packets" }) # We want to have consumed all the events self.assertEqual(len(events), 0) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_connect_not_first(self): """ Sending a packet that is not a CONNECT as the first packet is a protocol violation. Conformance Statement MQTT-3.1.0-1 """ events = [] p = MQTTParser() data = ( # SUBSCRIBE b"820d00010008746573742f31323300" ) for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) # Reserved packet self._assert_event(events.pop(0), Failure, { 'reason': "Connect packet was not first" }) # We want to have consumed all the events self.assertEqual(len(events), 0) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_keepalive_canceled_on_lost_connection(self): """ If a client connects with a timeout, and disconnects themselves, we will remove the timeout. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( # CONNECT, with keepalive of 2 b"101300044d51545404020002000774657374313233" ) for x in iterbytes(unhexlify(data)): p.dataReceived(x) self.assertEqual(len(r.calls), 1) self.assertEqual(r.calls[0].getTime(), 3.0) timeout = r.calls[0] # Clean connection lost p.connectionLost(None) self.assertEqual(len(r.calls), 0) self.assertTrue(timeout.cancelled) self.assertFalse(timeout.called)
def test_correct_connect(self): """ The most basic possible connect -- MQTT 3.1.1, no QoS/username/password and compliant with the spec. """ events = [] p = MQTTParser() good = b"\x10\x13\x00\x04MQTT\x04\x02\x00x\x00\x07test123" for x in iterbytes(good): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), { 'username': None, 'password': None, 'will_message': None, 'will_topic': None, 'client_id': u"test123", 'keep_alive': 120, 'flags': { 'username': False, 'password': False, 'will': False, 'will_qos': 0, 'will_retain': False, 'clean_session': True, 'reserved': False } } )
def test_lose_conn_on_protocol_violation(self): """ When a protocol violation occurs, the connection to the client will be terminated, and an error will be logged. Compliance statement MQTT-4.8.0-1 """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( # Invalid CONNECT b"111300044d51545404020002000774657374313233") with LogCapturer("trace") as logs: for x in iterbytes(unhexlify(data)): p.dataReceived(x) sent_logs = logs.get_category("MQ401") self.assertEqual(len(sent_logs), 1) self.assertEqual(sent_logs[0]["log_level"], LogLevel.error) self.assertIn("Connect", logs.log_text.getvalue()) self.assertEqual(t.value(), b'') self.assertTrue(t.disconnecting)
def test_keepalive(self): """ If a client connects with a timeout, and sends no data in keep_alive * 1.5, they will be disconnected. Compliance statement MQTT-3.1.2-24 """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( # CONNECT, with keepalive of 2 b"101300044d51545404020002000774657374313233") for x in iterbytes(unhexlify(data)): p.dataReceived(x) self.assertEqual(len(r.calls), 1) self.assertEqual(r.calls[0].func, p._lose_connection) self.assertEqual(r.calls[0].getTime(), 3.0) self.assertFalse(t.disconnecting) r.advance(2.9) self.assertFalse(t.disconnecting) r.advance(0.1) self.assertTrue(t.disconnecting)
def test_exception_in_subscribe_drops_connection(self): """ Transient failures (like an exception from handler.process_subscribe) will cause the connection it happened on to be dropped. Compliance statement MQTT-4.8.0-2 """ class SubHandler(BasicHandler): @inlineCallbacks def process_subscribe(self, event): raise Exception("boom!") h = SubHandler() r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() + Subscribe(packet_identifier=1234, topic_requests=[SubscriptionTopicRequest(u"a", 0)]).serialise() ) with LogCapturer("trace") as logs: for x in iterbytes(data): p.dataReceived(x) sent_logs = logs.get_category("MQ501") self.assertEqual(len(sent_logs), 1) self.assertEqual(sent_logs[0]["log_level"], LogLevel.critical) self.assertEqual(sent_logs[0]["log_failure"].value.args[0], "boom!") events = cp.data_received(t.value()) self.assertEqual(len(events), 1) self.assertTrue(t.disconnecting) # We got the error, we need to flush it so it doesn't make the test # error self.flushLoggedErrors()
def test_lose_conn_on_protocol_violation(self): """ When a protocol violation occurs, the connection to the client will be terminated, and an error will be logged. Compliance statement MQTT-4.8.0-1 """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( # Invalid CONNECT b"111300044d51545404020002000774657374313233" ) with LogCapturer("trace") as logs: for x in iterbytes(unhexlify(data)): p.dataReceived(x) sent_logs = logs.get_category("MQ401") self.assertEqual(len(sent_logs), 1) self.assertEqual(sent_logs[0]["log_level"], LogLevel.error) self.assertIn("Connect", logs.log_text.getvalue()) self.assertEqual(t.value(), b'') self.assertTrue(t.disconnecting)
def test_qos_2_failure_drops_connection(self): """ Transient failures (like an exception from handler.process_publish_qos_2) will cause the connection it happened on to be dropped. Compliance statement MQTT-4.8.0-2 """ class PubHandler(BasicHandler): def process_publish_qos_2(self, event): raise Exception("boom!") h = PubHandler() r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() + Publish(duplicate=False, qos_level=2, retain=False, topic_name=u"foo", packet_identifier=1, payload=b"bar").serialise() ) with LogCapturer("trace") as logs: for x in iterbytes(data): p.dataReceived(x) sent_logs = logs.get_category("MQ505") self.assertEqual(len(sent_logs), 1) self.assertEqual(sent_logs[0]["log_level"], LogLevel.critical) self.assertEqual(sent_logs[0]["log_failure"].value.args[0], "boom!") events = cp.data_received(t.value()) self.assertEqual(len(events), 1) self.assertTrue(t.disconnecting) # We got the error, we need to flush it so it doesn't make the test # error self.flushLoggedErrors()
def test_non_zero_connect_code_must_have_no_present_session(self): """ A non-zero connect code in a CONNACK must be paired with no session present. Compliance statement MQTT-3.2.2-4 """ h = BasicHandler(self.connect_code) r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=False)).serialise() ) for x in iterbytes(data): p.dataReceived(x) events = cp.data_received(t.value()) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), { 'return_code': self.connect_code, 'session_present': False, })
def test_keepalive_canceled_on_lost_connection(self): """ If a client connects with a timeout, and disconnects themselves, we will remove the timeout. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( # CONNECT, with keepalive of 2 b"101300044d51545404020002000774657374313233") for x in iterbytes(unhexlify(data)): p.dataReceived(x) self.assertEqual(len(r.calls), 1) self.assertEqual(r.calls[0].getTime(), 3.0) timeout = r.calls[0] # Clean connection lost p.connectionLost(None) self.assertEqual(len(r.calls), 0) self.assertTrue(timeout.cancelled) self.assertFalse(timeout.called)
def test_multiple_connects(self): """ Sending multiple CONNECT packets is a protocol violation. Conformance Statement MQTT-3.1.0-2 """ events = [] p = MQTTParser() data = ( # CONNECT b"101300044d51545404020002000774657374313233" # CONNECT b"101300044d51545404020002000774657374313233") for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 2) # First, successful connect self.assertIsInstance(events.pop(0), Connect) # Reserved packet self._assert_event(events.pop(0), Failure, {'reason': "Multiple Connect packets"}) # We want to have consumed all the events self.assertEqual(len(events), 0) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_qos_2_failure_drops_connection(self): """ Transient failures (like an exception from handler.process_publish_qos_2) will cause the connection it happened on to be dropped. Compliance statement MQTT-4.8.0-2 """ class PubHandler(BasicHandler): def process_publish_qos_2(self, event): raise Exception("boom!") h = PubHandler() r, t, p, cp = make_test_items(h) data = ( Connect(client_id="test123", flags=ConnectFlags(clean_session=True)).serialise() + Publish(duplicate=False, qos_level=2, retain=False, topic_name="foo", packet_identifier=1, payload=b"bar").serialise() ) with LogCapturer("trace") as logs: for x in iterbytes(data): p.dataReceived(x) sent_logs = logs.get_category("MQ505") self.assertEqual(len(sent_logs), 1) self.assertEqual(sent_logs[0]["log_level"], LogLevel.critical) self.assertEqual(sent_logs[0]["log_failure"].value.args[0], "boom!") events = cp.data_received(t.value()) self.assertEqual(len(events), 1) self.assertTrue(t.disconnecting) # We got the error, we need to flush it so it doesn't make the test # error self.flushLoggedErrors()
def test_send_packet(self): """ On sending a packet, a trace log message is emitted with details of the sent packet. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( # CONNECT b"101300044d51545404020002000774657374313233") with LogCapturer("trace") as logs: for x in iterbytes(unhexlify(data)): p.dataReceived(x) sent_logs = logs.get_category("MQ101") self.assertEqual(len(sent_logs), 1) self.assertEqual(sent_logs[0]["log_level"], LogLevel.debug) self.assertEqual(sent_logs[0]["txaio_trace"], True) self.assertIn("ConnACK", logs.log_text.getvalue()) events = cp.data_received(t.value()) self.assertEqual(len(events), 1) self.assertIsInstance(events[0], ConnACK)
def test_non_zero_connect_code_must_have_no_present_session(self): """ A non-zero connect code in a CONNACK must be paired with no session present. Compliance statement MQTT-3.2.2-4 """ h = BasicHandler(self.connect_code) r, t, p, cp = make_test_items(h) data = ( Connect(client_id="test123", flags=ConnectFlags(clean_session=False)).serialise() ) for x in iterbytes(data): p.dataReceived(x) events = cp.data_received(t.value()) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), { 'return_code': self.connect_code, 'session_present': False, })
def test_got_sent_packet(self): """ `process_connect` on the handler will get the correct Connect packet. """ got_packets = [] class SubHandler(BasicHandler): def process_connect(self_, event): got_packets.append(event) return succeed((0, False)) h = SubHandler() r, t, p, cp = make_test_items(h) data = ( Connect(client_id="test123", flags=ConnectFlags(clean_session=True)).serialise() ) for x in iterbytes(data): p.dataReceived(x) self.assertEqual(len(got_packets), 1) self.assertEqual(got_packets[0].client_id, "test123") self.assertEqual(got_packets[0].serialise(), data)
def test_too_large_header(self): """ The reserved section in the CONNECT packet must not be used. Conformance Statement MQTT-3.1.2-3 """ events = [] p = MQTTParser() data = ( # CONNECT using the second nibble, plus junk we should never read b"10ffffffff000000000000000000") for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) # Reserved packet self._assert_event(events.pop(0), Failure, {'reason': "Too big packet size"}) # We want to have consumed all the events self.assertEqual(len(events), 0) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_connect_reserved_area(self): """ The reserved section in the CONNECT packet must not be used. Conformance Statement MQTT-3.1.2-3 """ events = [] p = MQTTParser() data = ( # CONNECT using the second nibble b"111300044d51545404020002000774657374313233") for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) # Reserved packet self._assert_event(events.pop(0), Failure, {'reason': "Bad flags in Connect"}) # We want to have consumed all the events self.assertEqual(len(events), 0) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_send_packet(self): """ On sending a packet, a trace log message is emitted with details of the sent packet. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( # CONNECT b"101300044d51545404020002000774657374313233" ) with LogCapturer("trace") as logs: for x in iterbytes(unhexlify(data)): p.dataReceived(x) sent_logs = logs.get_category("MQ101") self.assertEqual(len(sent_logs), 1) self.assertEqual(sent_logs[0]["log_level"], LogLevel.debug) self.assertEqual(sent_logs[0]["txaio_trace"], True) self.assertIn("ConnACK", logs.log_text.getvalue()) events = cp.data_received(t.value()) self.assertEqual(len(events), 1) self.assertIsInstance(events[0], ConnACK)
def test_utf8_zwnbsp(self): """ UTF-8 strings containing the sequence 0xEF 0xBB 0xBF must decode to U+FEFF. Conformance statement MQTT-1.5.3-3 """ events = [] p = MQTTParser() bad = b"\x10\x13\x00\x04MQTT\x04\x02\x00x\x00\x07test\xef\xbb\xbf" for x in iterbytes(bad): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), { 'username': None, 'password': None, 'will_message': None, 'will_topic': None, 'client_id': u"test\uFEFF", 'keep_alive': 120, 'flags': { 'username': False, 'password': False, 'will': False, 'will_qos': 0, 'will_retain': False, 'clean_session': True, 'reserved': False } })
def test_reserved_packet_0(self): """ Using the reserved packet 0 is a protocol violation. No conformance statement, but see "Table 2.1 - Control packet types". """ events = [] p = MQTTParser() data = ( # CONNECT b"101300044d51545404020002000774657374313233" # Reserved packet #15 b"001300044d51545404020002000774657374313233") for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 2) # Regular connect, we don't care about it self.assertIsInstance(events.pop(0), Connect) # Reserved packet self._assert_event(events.pop(0), Failure, {'reason': "Unimplemented packet type 0"}) # We want to have consumed all the events self.assertEqual(len(events), 0) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_correct_connect(self): """ The most basic possible connect -- MQTT 3.1.1, no QoS/username/password and compliant with the spec. """ events = [] p = MQTTParser() good = b"\x10\x13\x00\x04MQTT\x04\x02\x00x\x00\x07test123" for x in iterbytes(good): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), { 'username': None, 'password': None, 'will_message': None, 'will_topic': None, 'client_id': u"test123", 'keep_alive': 120, 'flags': { 'username': False, 'password': False, 'will': False, 'will_qos': 0, 'will_retain': False, 'clean_session': True, 'reserved': False } })
def test_keepalive_requires_full_packet(self): """ If a client connects with a keepalive, and sends no FULL packets in keep_alive * 1.5, they will be disconnected. Compliance statement MQTT-3.1.2-24 """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( # CONNECT, with keepalive of 2 b"101300044d51545404020002000774657374313233" ) for x in iterbytes(unhexlify(data)): p.dataReceived(x) self.assertEqual(len(r.calls), 1) self.assertEqual(r.calls[0].func, p._lose_connection) self.assertEqual(r.calls[0].getTime(), 3.0) self.assertFalse(t.disconnecting) r.advance(2.9) self.assertFalse(t.disconnecting) data = ( # PINGREQ header, no body (incomplete packet) b"c0" ) for x in iterbytes(unhexlify(data)): p.dataReceived(x) # Timeout has not changed. If it reset the timeout on data recieved, # the delayed call's trigger time would instead be 2.9 + 3. self.assertEqual(len(r.calls), 1) self.assertEqual(r.calls[0].func, p._lose_connection) self.assertEqual(r.calls[0].getTime(), 3.0) r.advance(0.1) self.assertTrue(t.disconnecting)
def test_connect_subscribe_unsubscribe(self): """ A connect, then a subscribe and an immediate unsubscribe. """ events = [] p = MQTTParser() data = ( # CONNECT b"101300044d51545404020002000774657374313233" # SUBSCRIBE b"820d00010008746573742f31323300" # UNSUBSCRIBE b"a20c00030008746573742f313233") for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 3) self._assert_event( events.pop(0), Connect, { 'username': None, 'password': None, 'will_message': None, 'will_topic': None, 'client_id': u"test123", 'keep_alive': 2, 'flags': { 'username': False, 'password': False, 'will': False, 'will_qos': 0, 'will_retain': False, 'clean_session': True, 'reserved': False } }) self._assert_event( events.pop(0), Subscribe, { 'packet_identifier': 1, 'topic_requests': [{ 'topic_filter': u'test/123', 'max_qos': 0, }] }) self._assert_event(events.pop(0), Unsubscribe, { 'packet_identifier': 3, 'topics': [u'test/123'], }) # We want to have consumed all the events self.assertEqual(len(events), 0)
def test_keepalive_full_packet_resets_timeout(self): """ If a client connects with a keepalive, and sends packets in under keep_alive * 1.5, the connection will remain, and the timeout will be reset. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( # CONNECT, with keepalive of 2 b"101300044d51545404020002000774657374313233" ) for x in iterbytes(unhexlify(data)): p.dataReceived(x) self.assertEqual(len(r.calls), 1) self.assertEqual(r.calls[0].func, p._lose_connection) self.assertEqual(r.calls[0].getTime(), 3.0) self.assertFalse(t.disconnecting) r.advance(2.9) self.assertFalse(t.disconnecting) data = ( # Full PINGREQ packet b"c000" ) for x in iterbytes(unhexlify(data)): p.dataReceived(x) # Timeout has changed, to be 2.9 (the time the packet was recieved) + 3 self.assertEqual(len(r.calls), 1) self.assertEqual(r.calls[0].func, p._lose_connection) self.assertEqual(r.calls[0].getTime(), 2.9 + 3.0) r.advance(0.1) self.assertFalse(t.disconnecting)
def test_qos_1_sends_ack(self): """ When a QoS 1 Publish packet is recieved, we send a PubACK with the same packet identifier as the original Publish. Compliance statement MQTT-3.3.4-1 Spec part 3.4 """ got_packets = [] class PubHandler(BasicHandler): def process_publish_qos_1(self, event): got_packets.append(event) return succeed(None) h = PubHandler() r, t, p, cp = make_test_items(h) pub = Publish(duplicate=False, qos_level=1, retain=False, topic_name=u"foo", packet_identifier=1, payload=b"bar").serialise() data = (Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() + pub) with LogCapturer("trace") as logs: for x in iterbytes(data): p.dataReceived(x) events = cp.data_received(t.value()) self.assertFalse(t.disconnecting) # ConnACK + PubACK with the same packet ID self.assertEqual(len(events), 2) self.assertEqual(events[1], PubACK(packet_identifier=1)) # The publish handler should have been called self.assertEqual(len(got_packets), 1) self.assertEqual(got_packets[0].serialise(), pub) # We should get a debug message saying we got the publish messages = logs.get_category("MQ202") self.assertEqual(len(messages), 1) self.assertEqual(messages[0]["publish"].serialise(), pub)
def test_qos_1_sends_ack(self): """ When a QoS 1 Publish packet is recieved, we send a PubACK with the same packet identifier as the original Publish. Compliance statement MQTT-3.3.4-1 Spec part 3.4 """ got_packets = [] class PubHandler(BasicHandler): def process_publish_qos_1(self, event): got_packets.append(event) return succeed(None) h = PubHandler() r, t, p, cp = make_test_items(h) pub = Publish(duplicate=False, qos_level=1, retain=False, topic_name=u"foo", packet_identifier=1, payload=b"bar").serialise() data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() + pub ) with LogCapturer("trace") as logs: for x in iterbytes(data): p.dataReceived(x) events = cp.data_received(t.value()) self.assertFalse(t.disconnecting) # ConnACK + PubACK with the same packet ID self.assertEqual(len(events), 2) self.assertEqual(events[1], PubACK(packet_identifier=1)) # The publish handler should have been called self.assertEqual(len(got_packets), 1) self.assertEqual(got_packets[0].serialise(), pub) # We should get a debug message saying we got the publish messages = logs.get_category("MQ202") self.assertEqual(len(messages), 1) self.assertEqual(messages[0]["publish"].serialise(), pub)
def test_unknown_connect_code_must_lose_connection(self): """ A non-zero, and non-1-to-5 connect code from the handler must result in a lost connection, and no CONNACK. Compliance statements MQTT-3.2.2-4, MQTT-3.2.2-5 """ h = BasicHandler(6) r, t, p, cp = make_test_items(h) data = (Connect(client_id=u"test123", flags=ConnectFlags(clean_session=False)).serialise()) for x in iterbytes(data): p.dataReceived(x) self.assertTrue(t.disconnecting) self.assertEqual(t.value(), b'')
def test_invalid_utf8_null(self): """ UTF-8 strings may not contain null bytes. Conformance statement MQTT-1.5.3-2 """ events = [] p = MQTTParser() bad = b"\x10\x13\x00\x04\x00QTT\x04\x02\x00x\x00\x07test123" for x in iterbytes(bad): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) self.assertEqual(attr.asdict(events[0]), {'reason': ("Invalid UTF-8 string (contains nulls)")}) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_connect_ping(self): """ A connect, then a ping. """ events = [] p = MQTTParser() data = ( # CONNECT b"101300044d51545404020002000774657374313233" # PINGREQ b"c000" ) for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 2) self._assert_event( events.pop(0), Connect, { 'username': None, 'password': None, 'will_message': None, 'will_topic': None, 'client_id': u"test123", 'keep_alive': 2, 'flags': { 'username': False, 'password': False, 'will': False, 'will_qos': 0, 'will_retain': False, 'clean_session': True, 'reserved': False } } ) self._assert_event(events.pop(0), PingREQ, {}) # We want to have consumed all the events self.assertEqual(len(events), 0)
def test_qos_0_sends_no_ack(self): """ When a QoS 0 Publish packet is recieved, we don't send back a PubACK. """ got_packets = [] class PubHandler(BasicHandler): def process_publish_qos_0(self, event): got_packets.append(event) return succeed(None) h = PubHandler() r, t, p, cp = make_test_items(h) pub = Publish(duplicate=False, qos_level=0, retain=False, topic_name=u"foo", packet_identifier=None, payload=b"bar").serialise() data = (Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() + pub) with LogCapturer("trace") as logs: for x in iterbytes(data): p.dataReceived(x) events = cp.data_received(t.value()) self.assertFalse(t.disconnecting) # Just the connack, no puback. self.assertEqual(len(events), 1) # The publish handler should have been called self.assertEqual(len(got_packets), 1) self.assertEqual(got_packets[0].serialise(), pub) # We should get a debug message saying we got the publish messages = logs.get_category("MQ201") self.assertEqual(len(messages), 1) self.assertEqual(messages[0]["publish"].serialise(), pub)
def test_malformed_packet(self): """ A parsing failure (e.g. an incorrect flag leads us to read off the end) is safely handled. """ events = [] p = MQTTParser() bad = b"\x10\x13\x00\x04MQTT\x04\x02\x00x\x00\x09test123" for x in iterbytes(bad): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), { 'reason': ("Corrupt data, fell off the end: Cannot read 72 " "bits, only 56 available.") }) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_invalid_utf8_continuation(self): """ Invalid UTF-8 sequences (i.e. those containing UTF-16 surrogate pairs encoded in UTF-8) are a protocol violation. Conformance statement MQTT-1.5.3-1 """ events = [] p = MQTTParser() bad = b"\x10\x13\x00\x04\xed\xbf\xbfT\x04\x02\x00x\x00\x07test123" for x in iterbytes(bad): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), {'reason': ("Invalid UTF-8 string (contains surrogates)")}) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_unknown_connect_code_must_lose_connection(self): """ A non-zero, and non-1-to-5 connect code from the handler must result in a lost connection, and no CONNACK. Compliance statements MQTT-3.2.2-4, MQTT-3.2.2-5 """ h = BasicHandler(6) r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=False)).serialise() ) for x in iterbytes(data): p.dataReceived(x) self.assertTrue(t.disconnecting) self.assertEqual(t.value(), b'')
def test_qos_0_sends_no_ack(self): """ When a QoS 0 Publish packet is recieved, we don't send back a PubACK. """ got_packets = [] class PubHandler(BasicHandler): def process_publish_qos_0(self, event): got_packets.append(event) return succeed(None) h = PubHandler() r, t, p, cp = make_test_items(h) pub = Publish(duplicate=False, qos_level=0, retain=False, topic_name=u"foo", packet_identifier=None, payload=b"bar").serialise() data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() + pub ) with LogCapturer("trace") as logs: for x in iterbytes(data): p.dataReceived(x) events = cp.data_received(t.value()) self.assertFalse(t.disconnecting) # Just the connack, no puback. self.assertEqual(len(events), 1) # The publish handler should have been called self.assertEqual(len(got_packets), 1) self.assertEqual(got_packets[0].serialise(), pub) # We should get a debug message saying we got the publish messages = logs.get_category("MQ201") self.assertEqual(len(messages), 1) self.assertEqual(messages[0]["publish"].serialise(), pub)
def test_connect_ping(self): """ A connect, then a ping. """ events = [] p = MQTTParser() data = ( # CONNECT b"101300044d51545404020002000774657374313233" # PINGREQ b"c000") for x in iterbytes(unhexlify(data)): events.extend(p.data_received(x)) self.assertEqual(len(events), 2) self._assert_event( events.pop(0), Connect, { 'username': None, 'password': None, 'will_message': None, 'will_topic': None, 'client_id': u"test123", 'keep_alive': 2, 'flags': { 'username': False, 'password': False, 'will': False, 'will_qos': 0, 'will_retain': False, 'clean_session': True, 'reserved': False } }) self._assert_event(events.pop(0), PingREQ, {}) # We want to have consumed all the events self.assertEqual(len(events), 0)
def test_transport_paused_while_processing(self): """ The transport is paused whilst the MQTT protocol is parsing/handling existing items. """ d = Deferred() h = BasicHandler() h.process_connect = lambda x: d r, t, p, cp = make_test_items(h) data = (Connect(client_id=u"test123", flags=ConnectFlags(clean_session=False)).serialise()) self.assertEqual(t.producerState, 'producing') for x in iterbytes(data): p.dataReceived(x) self.assertEqual(t.producerState, 'paused') d.callback((0, False)) self.assertEqual(t.producerState, 'producing')
def test_quirks_mode_connect(self): """ Nyamuk sends two extra bytes at the end of the CONNECT packet (that cannot mean anything), we should just cope with it. """ events = [] p = MQTTParser() # vv correct length vv why??? good = b"\x10\x15\x00\x04MQTT\x04\x02\x00x\x00\x07test123\x00\x00" for x in iterbytes(good): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), { 'username': None, 'password': None, 'will_message': None, 'will_topic': None, 'client_id': u"test123", 'keep_alive': 120, 'flags': { 'username': False, 'password': False, 'will': False, 'will_qos': 0, 'will_retain': False, 'clean_session': True, 'reserved': False } } ) warnings = self.flushWarnings() self.assertEqual(len(warnings), 1) self.assertEqual(warnings[0]["message"], ("Quirky client CONNECT -- packet length was 152 " "bytes but only had 168 bytes of useful data"))
def test_qos_0_queues_message(self): """ The WAMP layer calling send_publish will queue a message up for sending, and send it next time it has a chance. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = (Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise()) for x in iterbytes(data): p.dataReceived(x) # Connect has happened events = cp.data_received(t.value()) t.clear() self.assertFalse(t.disconnecting) self.assertIsInstance(events[0], ConnACK) # WAMP layer calls send_publish p.send_publish(u"hello", 0, b'some bytes', False) # Nothing should have been sent yet, it is queued self.assertEqual(t.value(), b'') # Advance the clock r.advance(0.1) # We should now get the sent Publish events = cp.data_received(t.value()) self.assertEqual(len(events), 1) self.assertEqual( events[0], Publish(duplicate=False, qos_level=0, retain=False, packet_identifier=None, topic_name=u"hello", payload=b"some bytes"))
def test_qos_0_queues_message(self): """ The WAMP layer calling send_publish will queue a message up for sending, and send it next time it has a chance. """ h = BasicHandler() r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() ) for x in iterbytes(data): p.dataReceived(x) # Connect has happened events = cp.data_received(t.value()) t.clear() self.assertFalse(t.disconnecting) self.assertIsInstance(events[0], ConnACK) # WAMP layer calls send_publish p.send_publish(u"hello", 0, b'some bytes', False) # Nothing should have been sent yet, it is queued self.assertEqual(t.value(), b'') # Advance the clock r.advance(0.1) # We should now get the sent Publish events = cp.data_received(t.value()) self.assertEqual(len(events), 1) self.assertEqual( events[0], Publish(duplicate=False, qos_level=0, retain=False, packet_identifier=None, topic_name=u"hello", payload=b"some bytes"))
def test_invalid_utf8_null(self): """ UTF-8 strings may not contain null bytes. Conformance statement MQTT-1.5.3-2 """ events = [] p = MQTTParser() bad = b"\x10\x13\x00\x04\x00QTT\x04\x02\x00x\x00\x07test123" for x in iterbytes(bad): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), { 'reason': ("Invalid UTF-8 string (contains nulls)") } ) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_malformed_packet(self): """ A parsing failure (e.g. an incorrect flag leads us to read off the end) is safely handled. """ events = [] p = MQTTParser() bad = b"\x10\x13\x00\x04MQTT\x04\x02\x00x\x00\x09test123" for x in iterbytes(bad): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), { 'reason': ("Corrupt data, fell off the end: Cannot read 72 " "bits, only 56 available.") } ) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_unsubscription_gets_unsuback_with_same_id(self): """ When an unsubscription is processed, the UnsubACK has the same ID. Unsubscriptions are always processed. Compliance statements MQTT-3.10.4-4, MQTT-3.10.4-5, MQTT-3.12.4-1 """ got_packets = [] class SubHandler(BasicHandler): def process_unsubscribe(self, event): got_packets.append(event) return succeed(None) h = SubHandler() r, t, p, cp = make_test_items(h) unsub = Unsubscribe(packet_identifier=1234, topics=[u"foo"]).serialise() data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=True)).serialise() + unsub ) for x in iterbytes(data): p.dataReceived(x) events = cp.data_received(t.value()) self.assertEqual(len(events), 2) self.assertFalse(t.disconnecting) # UnsubACK that has the same ID self.assertIsInstance(events[1], UnsubACK) self.assertEqual(events[1].packet_identifier, 1234) # The unsubscribe handler should have been called self.assertEqual(len(got_packets), 1) self.assertEqual(got_packets[0].serialise(), unsub)
def test_invalid_utf8_continuation(self): """ Invalid UTF-8 sequences (i.e. those containing UTF-16 surrogate pairs encoded in UTF-8) are a protocol violation. Conformance statement MQTT-1.5.3-1 """ events = [] p = MQTTParser() bad = b"\x10\x13\x00\x04\xed\xbf\xbfT\x04\x02\x00x\x00\x07test123" for x in iterbytes(bad): events.extend(p.data_received(x)) self.assertEqual(len(events), 1) self.assertEqual( attr.asdict(events[0]), { 'reason': ("Invalid UTF-8 string (contains surrogates)") } ) self.assertEqual(p._state, PROTOCOL_VIOLATION)
def test_transport_paused_while_processing(self): """ The transport is paused whilst the MQTT protocol is parsing/handling existing items. """ d = Deferred() h = BasicHandler() h.process_connect = lambda x: d r, t, p, cp = make_test_items(h) data = ( Connect(client_id=u"test123", flags=ConnectFlags(clean_session=False)).serialise() ) self.assertEqual(t.producerState, 'producing') for x in iterbytes(data): p.dataReceived(x) self.assertEqual(t.producerState, 'paused') d.callback((0, False)) self.assertEqual(t.producerState, 'producing')