Exemplo n.º 1
0
def test_deserialize_messages_valid_messages(request_monitoring_message):
    message = MessageSerializer.serialize(request_monitoring_message)
    raw_string = message + "\n" + message

    messages = deserialize_messages(
        data=raw_string, peer_address=request_monitoring_message.sender)
    assert len(messages) == 2
Exemplo n.º 2
0
def test_message_identical() -> None:
    """ Will fail if the messages changed since the committed version

    If you intend to change the serialized messages, then update the messages
    on disc (see comment inside test). This test exists only to prevent
    accidental breaking of compatibility.

    If many values change in unexpected ways, that might have to do with the
    pseudo-random initialization of the messages (see random.seed() above).
    """
    signer = LocalSigner(bytes(range(32)))
    for message in messages:
        # The messages contain only random signatures. We don't want to test
        # only the serialization itself, but also prevent accidental changes of
        # the signature. To do this, we have to create proper signatures.
        message.sign(signer)

        filename = os.path.join(
            os.path.dirname(__file__), "serialized_messages", message.__class__.__name__ + ".json"
        )

        # Uncomment this for one run if you intentionally changed the message
        # with open(filename, "w") as f:
        #     json_msg = MessageSerializer.serialize(message)
        #     # pretty print for more readable diffs
        #     json_msg = json.dumps(json.loads(json_msg), indent=4, sort_keys=True)
        #     f.write(json_msg)

        with open(filename) as f:
            saved_message_dict = JSONSerializer.deserialize(f.read())

        # The assert output is more readable when we used dicts than with plain JSON
        message_dict = JSONSerializer.deserialize(MessageSerializer.serialize(message))
        assert message_dict == saved_message_dict
Exemplo n.º 3
0
def make_message_text(sign=True, overwrite_data=None):
    room = Room(None, "!roomID:server")  # type: ignore
    if not overwrite_data:
        data = MessageSerializer.serialize(make_message(sign=sign))
    else:
        data = overwrite_data

    event = dict(
        type="m.room.message", sender=USERID1, content={"msgtype": "m.text", "body": data}
    )
    return room, event
Exemplo n.º 4
0
def test_deserialize_checks_datetimes_in_messages():
    invalid_fee_update = get_fee_update_message(
        updating_participant=PRIVATE_KEY_1_ADDRESS,
        privkey_signer=PRIVATE_KEY_1,
        timestamp=datetime.now(timezone.utc),
    )
    message = MessageSerializer.serialize(invalid_fee_update)

    messages = deserialize_messages(data=message, peer_address=PRIVATE_KEY_1_ADDRESS)
    assert len(messages) == 0

    valid_fee_update = get_fee_update_message(
        updating_participant=PRIVATE_KEY_1_ADDRESS,
        privkey_signer=PRIVATE_KEY_1,
        timestamp=datetime.utcnow(),
    )
    message = MessageSerializer.serialize(valid_fee_update)

    messages = deserialize_messages(data=message, peer_address=PRIVATE_KEY_1_ADDRESS)
    assert len(messages) == 1
Exemplo n.º 5
0
def test_retry_queue_does_not_resend_removed_messages(
        mock_matrix: MatrixTransport, retry_interval_initial: float) -> None:
    """
    Ensure the ``RetryQueue`` doesn't unnecessarily re-send messages.

    Messages should only be retried while they are present in the respective Raiden queue.
    Once they have been removed they should not be sent again.

    In the past they could have been sent twice.
    See: https://github.com/raiden-network/raiden/issue/4111
    """
    # Pretend the Transport greenlet is running
    mock_matrix.greenlet = True

    # This is intentionally not using ``MatrixTransport._get_retrier()`` since we don't want the
    # greenlet to run but instead manually call its `_check_and_send()` method.
    retry_queue = _RetryQueue(transport=mock_matrix,
                              receiver=Address(factories.HOP1))

    message = make_message()
    serialized_message = MessageSerializer.serialize(message)
    queue_identifier = QueueIdentifier(
        recipient=Address(factories.HOP1),
        canonical_identifier=CANONICAL_IDENTIFIER_UNORDERED_QUEUE,
    )
    retry_queue.enqueue(queue_identifier, [message])

    # TODO: Fix the code below, the types are not matching.
    mock_matrix._queueids_to_queues[queue_identifier] = [message
                                                         ]  # type: ignore

    with retry_queue._lock:
        retry_queue._check_and_send()

    assert len(mock_matrix.sent_messages) == 1  # type: ignore
    assert (factories.HOP1,
            serialized_message) in mock_matrix.sent_messages  # type: ignore

    mock_matrix._queueids_to_queues[queue_identifier].clear()

    # Make sure the retry interval has elapsed
    gevent.sleep(retry_interval_initial * 5)

    with retry_queue._lock:
        # The message has been removed from the raiden queue and should therefore not be sent again
        retry_queue._check_and_send()

    assert len(mock_matrix.sent_messages) == 1  # type: ignore
Exemplo n.º 6
0
def validate_and_parse_message(data: Any,
                               peer_address: Address) -> List[Message]:
    messages: List[Message] = list()

    if not isinstance(data, str):
        log.warning(
            "Received Message body not a string",
            message_data=data,
            peer_address=to_checksum_address(peer_address),
        )
        return []

    for line in data.splitlines():
        line = line.strip()
        if not line:
            continue
        try:
            message = MessageSerializer.deserialize(line)
        except SerializationError as ex:
            log.warning(
                "Not a valid Message",
                message_data=line,
                peer_address=to_checksum_address(peer_address),
                _exc=ex,
            )
            continue
        if not isinstance(message, SignedMessage):
            log.warning(
                "Message not a SignedMessage!",
                message=message,
                peer_address=to_checksum_address(peer_address),
            )
            continue
        if message.sender != peer_address:
            log.warning(
                "Message not signed by sender!",
                message=message,
                signer=message.sender,
                peer_address=to_checksum_address(peer_address),
            )
            continue
        messages.append(message)

    return messages
Exemplo n.º 7
0
def deserialize_messages(
        data: str,
        peer_address: Address,
        rate_limiter: Optional[RateLimiter] = None) -> List[SignedMessage]:
    messages: List[SignedMessage] = list()

    if rate_limiter:
        rate_limiter.reset_if_it_is_time()
        # This size includes some bytes of overhead for python. But otherwise we
        # would have to either count characters for decode the whole string before
        # checking the rate limiting.
        size = sys.getsizeof(data)
        if not rate_limiter.check_and_count(peer_address, size):
            log.warning("Sender is rate limited", sender=peer_address)
            return []

    for line in data.splitlines():
        line = line.strip()
        if not line:
            continue

        logger = log.bind(peer_address=to_checksum_address(peer_address))
        try:
            message = MessageSerializer.deserialize(line)
        except (SerializationError, ValidationError, KeyError,
                ValueError) as ex:
            logger.warning("Message data JSON is not a valid message",
                           message_data=line,
                           _exc=ex)
            continue

        if not isinstance(message, SignedMessage):
            logger.warning("Received invalid message", message=message)
            continue

        if message.sender != peer_address:
            logger.warning("Message not signed by sender!",
                           message=message,
                           signer=message.sender)
            continue

        messages.append(message)

    return messages
Exemplo n.º 8
0
def make_message(sign=True, overwrite_data=None):
    room = Room(None, "!roomID:server")
    if not overwrite_data:
        message = SecretRequest(
            message_identifier=random.randint(0, UINT64_MAX),
            payment_identifier=1,
            secrethash=factories.UNIT_SECRETHASH,
            amount=1,
            expiration=10,
            signature=EMPTY_SIGNATURE,
        )
        if sign:
            message.sign(LocalSigner(factories.HOP1_KEY))
        data = MessageSerializer.serialize(message)
    else:
        data = overwrite_data

    event = dict(
        type="m.room.message", sender=USERID1, content={"msgtype": "m.text", "body": data}
    )
    return room, event
Exemplo n.º 9
0
def test_deserialize_messages_invalid_sender(request_monitoring_message):
    message = MessageSerializer.serialize(request_monitoring_message)

    messages = deserialize_messages(data=message,
                                    peer_address=INVALID_PEER_ADDRESS)
    assert len(messages) == 0
Exemplo n.º 10
0
def test_deserialize_messages_valid_message(request_monitoring_message):
    message = MessageSerializer.serialize(request_monitoring_message)

    messages = deserialize_messages(
        data=message, peer_address=request_monitoring_message.sender)
    assert len(messages) == 1
def test_transport_does_not_receive_broadcast_rooms_updates(matrix_transports):
    """ Ensure that matrix server-side filters take effect on sync for broadcast room content.

    The test sets up 3 transports where:
    Transport0 sends a message to the PFS broadcast room.
    Transport1 has an active sync filter ID that filters out broadcast room messages.
    Transport2 has NO active sync filter so it receives everything.

    The test should wait for Transport0 to send a message and then
    verify that Transport2 has received the message while Transport1
    did not.
    """
    raiden_service0 = MockRaidenService(None)
    raiden_service1 = MockRaidenService(None)
    raiden_service2 = MockRaidenService(None)

    transport0, transport1, transport2 = matrix_transports

    received_sync_events: Dict[str, List[Dict[str, Any]]] = {"t1": [], "t2": []}

    def _handle_responses(
        name: str, responses: List[Dict[str, Any]], first_sync: bool = False
    ):  # pylint: disable=unused-argument
        for response in responses:
            joined_rooms = response.get("rooms", {}).get("join", {})
            for joined_room in joined_rooms.values():
                timeline_events = joined_room.get("timeline").get("events", [])
                message_events = [
                    event for event in timeline_events if event["type"] == "m.room.message"
                ]
                received_sync_events[name].extend(message_events)

    # Replace the transport's handle_response method
    # Should be able to detect if sync delivered a message
    transport1._client._handle_responses = partial(_handle_responses, "t1")
    transport2._client._handle_responses = partial(_handle_responses, "t2")

    transport0.start(raiden_service0, [], None)
    transport1.start(raiden_service1, [], None)
    transport2.start(raiden_service2, [], None)

    pfs_broadcast_room_alias = make_room_alias(transport0.chain_id, PATH_FINDING_BROADCASTING_ROOM)
    pfs_broadcast_room_t0 = transport0._broadcast_rooms[pfs_broadcast_room_alias]

    # Get the sync helper to control flow of asynchronous syncs
    sync_progress1 = transport1._client.sync_progress
    sync_progress2 = transport2._client.sync_progress

    # Reset transport2 sync filter identifier so that
    # we can receive broadcast messages
    assert transport2._client._sync_filter_id is not None
    transport2._client._sync_filter_id = None

    # get the last sync tokens to control the processed state later
    last_synced_token1 = sync_progress1.last_synced
    # for T2 we need to make sure that the current sync used the filter reset -> wait()
    last_synced_token2 = sync_progress2.synced_event.wait()[0]
    # Send another message to the broadcast room, if transport1 listens on the room it will
    # throw an exception
    message = Processed(message_identifier=1, signature=EMPTY_SIGNATURE)
    message_text = MessageSerializer.serialize(message)
    pfs_broadcast_room_t0.send_text(message_text)

    # wait for the current tokens to be processed + 1 additional sync
    # this must be done because the message should be in the sync after the stored token
    sync_progress1.wait_for_processed(last_synced_token1, 1)
    sync_progress2.wait_for_processed(last_synced_token2, 1)

    # Transport2 should have received the message
    assert received_sync_events["t2"]
    event_body = received_sync_events["t2"][0]["content"]["body"]
    assert message_text == event_body

    # Transport1 used the filter so nothing was received
    assert not received_sync_events["t1"]
Exemplo n.º 12
0
def test_encoding_and_decoding():
    message_factories = (
        factories.LockedTransferProperties(),
        factories.RefundTransferProperties(),
        factories.LockExpiredProperties(),
        factories.UnlockProperties(),
    )
    messages = [factories.create(factory) for factory in message_factories]

    # TODO Handle these with factories once #5091 is implemented
    messages.append(
        Delivered(
            delivered_message_identifier=factories.make_message_identifier(),
            signature=factories.make_signature(),
        ))
    messages.append(
        Processed(
            message_identifier=factories.make_message_identifier(),
            signature=factories.make_signature(),
        ))
    messages.append(
        RevealSecret(
            message_identifier=factories.make_message_identifier(),
            secret=factories.make_secret(),
            signature=factories.make_signature(),
        ))
    messages.append(
        SecretRequest(
            message_identifier=factories.make_message_identifier(),
            payment_identifier=factories.make_payment_id(),
            secrethash=factories.make_secret_hash(),
            amount=factories.make_token_amount(),
            expiration=factories.make_block_number(),
            signature=factories.make_signature(),
        ))
    messages.append(
        WithdrawRequest(
            message_identifier=factories.make_message_identifier(),
            chain_id=factories.make_chain_id(),
            token_network_address=factories.make_token_network_address(),
            channel_identifier=factories.make_channel_identifier(),
            participant=factories.make_address(),
            total_withdraw=factories.make_token_amount(),
            nonce=factories.make_nonce(),
            expiration=factories.make_block_number(),
            signature=factories.make_signature(),
        ))
    messages.append(
        WithdrawConfirmation(
            message_identifier=factories.make_message_identifier(),
            chain_id=factories.make_chain_id(),
            token_network_address=factories.make_token_network_address(),
            channel_identifier=factories.make_channel_identifier(),
            participant=factories.make_address(),
            total_withdraw=factories.make_token_amount(),
            nonce=factories.make_nonce(),
            expiration=factories.make_block_number(),
            signature=factories.make_signature(),
        ))
    messages.append(
        WithdrawExpired(
            message_identifier=factories.make_message_identifier(),
            chain_id=factories.make_chain_id(),
            token_network_address=factories.make_token_network_address(),
            channel_identifier=factories.make_channel_identifier(),
            participant=factories.make_address(),
            total_withdraw=factories.make_token_amount(),
            nonce=factories.make_nonce(),
            expiration=factories.make_block_number(),
            signature=factories.make_signature(),
        ))
    messages.append(
        PFSCapacityUpdate(
            canonical_identifier=factories.make_canonical_identifier(),
            updating_participant=factories.make_address(),
            other_participant=factories.make_address(),
            updating_nonce=factories.make_nonce(),
            other_nonce=factories.make_nonce(),
            updating_capacity=factories.make_token_amount(),
            other_capacity=factories.make_token_amount(),
            reveal_timeout=factories.make_uint64(),
            signature=factories.make_signature(),
        ))
    messages.append(
        PFSFeeUpdate(
            canonical_identifier=factories.make_canonical_identifier(),
            updating_participant=factories.make_address(),
            fee_schedule=factories.create(
                factories.FeeScheduleStateProperties()),
            timestamp=datetime.now(),
            signature=factories.make_signature(),
        ))
    messages.append(
        RequestMonitoring(
            reward_amount=factories.make_token_amount(),
            balance_proof=SignedBlindedBalanceProof.
            from_balance_proof_signed_state(
                factories.create(
                    factories.BalanceProofSignedStateProperties())),
            monitoring_service_contract_address=factories.make_address(),
            non_closing_participant=factories.make_address(),
            non_closing_signature=factories.make_signature(),
            signature=factories.make_signature(),
        ))

    for message in messages:
        serialized = MessageSerializer.serialize(message)
        deserialized = MessageSerializer.deserialize(serialized)
        assert deserialized == message
Exemplo n.º 13
0
def test_bad_messages():
    "SerializationErrors should be raised on all kinds of wrong messages"
    for message in ["{}", "[]", '"foo"', "123"]:
        with pytest.raises(SerializationError):
            MessageSerializer.deserialize(message)
Exemplo n.º 14
0
def test_encoding_and_decoding():
    for message in messages:
        serialized = MessageSerializer.serialize(message)
        deserialized = MessageSerializer.deserialize(serialized)
        assert deserialized == message