Exemplo n.º 1
0
def test_actioninitchain_restore():
    """ ActionInitChain *must* restore the previous pseudo random generator
    state.

    Message identifiers are used for confirmation messages, e.g. delivered and
    processed messages, therefore it's important for each message identifier to
    not collide with a previous identifier, for this reason the PRNG is used.

    Additionally, during restarts the state changes are reapplied, and it's
    really important for the re-execution of the state changes to be
    deterministic, otherwise undefined behavior may happen. For this reason the
    state of the PRNG must be restored.

    If the above is not respected, the message ids generated during restart
    will not match the previous IDs and the message queues won't be properly
    cleared up.
    """
    pseudo_random_generator = random.Random()
    block_number = 577
    our_address = factories.make_address()
    chain_id = 777

    original_obj = state_change.ActionInitChain(
        pseudo_random_generator,
        block_number,
        our_address,
        chain_id,
    )

    decoded_obj = JSONSerializer.deserialize(
        JSONSerializer.serialize(original_obj),
    )

    assert original_obj == decoded_obj
Exemplo n.º 2
0
def test_actioninitchain_restore():
    """ ActionInitChain *must* restore the previous pseudo random generator
    state.

    Message identifiers are used for confirmation messages, e.g. delivered and
    processed messages, therefore it's important for each message identifier to
    not collide with a previous identifier, for this reason the PRNG is used.

    Additionally, during restarts the state changes are reapplied, and it's
    really important for the re-execution of the state changes to be
    deterministic, otherwise undefined behavior may happen. For this reason the
    state of the PRNG must be restored.

    If the above is not respected, the message ids generated during restart
    will not match the previous IDs and the message queues won't be properly
    cleared up.
    """
    pseudo_random_generator = random.Random()
    block_number = 577
    our_address = factories.make_address()
    chain_id = 777

    original_obj = state_change.ActionInitChain(
        pseudo_random_generator=pseudo_random_generator,
        block_number=block_number,
        block_hash=factories.make_block_hash(),
        our_address=our_address,
        chain_id=chain_id,
    )

    decoded_obj = JSONSerializer.deserialize(
        JSONSerializer.serialize(original_obj))

    assert original_obj == decoded_obj
Exemplo n.º 3
0
    def dispatch(self, state_change: StateChange) -> List[Event]:
        """ Apply the `state_change` in the current machine and return the
        resulting events.

        Args:
            state_change: An object representation of a state
            change.

        Return:
            A list of events produced by the state transition.
            It's the upper layer's responsibility to decided how to handle
            these events.
        """
        assert isinstance(state_change, StateChange)

        # check state change serialization
        try:
            json = JSONSerializer.serialize(state_change)
            restored = JSONSerializer.deserialize(json)

            if state_change != restored:
                log.error('Serialisation failed for:',
                          state_change.__class__.__name__)

        except Exception:
            log.error('Serialisation failed for:',
                      state_change.__class__.__name__)

        # the state objects must be treated as immutable, so make a copy of the
        # current state and pass the copy to the state machine to be modified.
        next_state = deepcopy(self.current_state)

        # update the current state by applying the change
        iteration = self.state_transition(
            next_state,
            state_change,
        )

        assert isinstance(iteration, TransitionResult)

        self.current_state = iteration.new_state
        events = iteration.events

        # check state serialization
        state = self.current_state
        if state is not None:
            json = JSONSerializer.serialize(state)
            restored = JSONSerializer.deserialize(json)

            if state != restored:
                compare_state_trees(state, restored)

        assert isinstance(self.current_state, (State, type(None)))
        assert all(isinstance(e, Event) for e in events)

        return events
Exemplo n.º 4
0
def test_upgrade_manager_restores_backup(tmp_path):
    db_path = tmp_path / Path('v17_log.db')
    upgrade_manager = UpgradeManager(db_filename=db_path)

    old_db_filename = tmp_path / Path('v16_log.db')
    storage = None

    storage = setup_storage(old_db_filename)

    with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=16):
        storage.update_version()

    with patch('raiden.utils.upgrades.older_db_file') as older_db_file:
        older_db_file.return_value = str(old_db_filename)
        upgrade_manager.run()

    # Once restored, the state changes written above should be
    # in the restored database
    storage = SerializedSQLiteStorage(str(db_path), JSONSerializer())
    state_change_record = storage.get_latest_state_change_by_data_field(
        {'_type': 'raiden.transfer.state_change.ActionInitChain'}, )
    assert state_change_record.data is not None
    assert not old_db_filename.exists()
    assert Path(str(old_db_filename).replace('_log.db',
                                             '_log.backup')).exists()
Exemplo n.º 5
0
def setup_storage(db_path):
    storage = SerializedSQLiteStorage(str(db_path), JSONSerializer())

    chain_state_data = Path(__file__).parent / 'data/v16_chainstate.json'
    chain_state = chain_state_data.read_text()

    storage.write_state_change(
        ActionInitChain(
            pseudo_random_generator=random.Random(),
            block_number=1,
            block_hash=factories.make_block_hash(),
            our_address=factories.make_address(),
            chain_id=1,
        ),
        datetime.utcnow().isoformat(timespec='milliseconds'),
    )

    cursor = storage.conn.cursor()
    cursor.execute(
        """
        INSERT INTO state_snapshot(identifier, statechange_id, data)
        VALUES(1, 1, ?)
        """,
        (chain_state, ),
    )
    storage.conn.commit()
    return storage
Exemplo n.º 6
0
def test_chainstate_restore():
    pseudo_random_generator = random.Random()
    block_number = 577
    our_address = factories.make_address()
    chain_id = 777

    original_obj = state.ChainState(
        pseudo_random_generator=pseudo_random_generator,
        block_number=block_number,
        our_address=our_address,
        chain_id=chain_id,
    )

    decoded_obj = JSONSerializer.deserialize(
        JSONSerializer.serialize(original_obj), )

    assert original_obj == decoded_obj
Exemplo n.º 7
0
def test_object_custom_serialization():
    # Simple encode/decode
    original_obj = MockObject(attr1="Hello", attr2="World")
    decoded_obj = JSONSerializer.deserialize(
        JSONSerializer.serialize(original_obj))

    assert original_obj == decoded_obj

    # Encode/Decode with embedded objects
    embedded_obj = MockObject(amount=1, identifier="123")
    original_obj = MockObject(embedded=embedded_obj)
    decoded_obj = JSONSerializer.deserialize(
        JSONSerializer.serialize(original_obj))

    assert original_obj == decoded_obj
    assert decoded_obj.embedded.amount == 1
    assert decoded_obj.embedded.identifier == "123"
Exemplo n.º 8
0
def test_chainstate_restore():
    pseudo_random_generator = random.Random()
    block_number = 577
    our_address = factories.make_address()
    chain_id = 777

    original_obj = state.ChainState(
        pseudo_random_generator=pseudo_random_generator,
        block_number=block_number,
        our_address=our_address,
        chain_id=chain_id,
    )

    decoded_obj = JSONSerializer.deserialize(
        JSONSerializer.serialize(original_obj),
    )

    assert original_obj == decoded_obj
Exemplo n.º 9
0
def test_object_custom_serialization():
    # Simple encode/decode
    original_obj = MockObject(attr1="Hello", attr2="World")
    decoded_obj = JSONSerializer.deserialize(
        JSONSerializer.serialize(original_obj),
    )

    assert original_obj == decoded_obj

    # Encode/Decode with embedded objects
    embedded_obj = MockObject(amount=1, identifier='123')
    original_obj = MockObject(embedded=embedded_obj)
    decoded_obj = JSONSerializer.deserialize(
        JSONSerializer.serialize(original_obj),
    )

    assert original_obj == decoded_obj
    assert decoded_obj.embedded.amount == 1
    assert decoded_obj.embedded.identifier == '123'
Exemplo n.º 10
0
def test_decode_with_unknown_type():
    test_str = """
{
    "_type": "some.non.existent.package",
    "attr1": "test"
}
"""
    with pytest.raises(TypeError) as m:
        JSONSerializer.deserialize(test_str)
        assert str(m) == 'Module some.non.existent.package does not exist'

    test_str = """
{
    "_type": "raiden.tests.unit.test_serialization.NonExistentClass",
    "attr1": "test"
}
"""
    with pytest.raises(TypeError) as m:
        JSONSerializer.deserialize(test_str)
        assert str(m) == 'raiden.tests.unit.test_serialization.NonExistentClass'
Exemplo n.º 11
0
def test_decode_with_unknown_type():
    test_str = """
{
    "_type": "some.non.existent.package",
    "attr1": "test"
}
"""
    with pytest.raises(TypeError) as m:
        JSONSerializer.deserialize(test_str)
        assert str(m) == 'Module some.non.existent.package does not exist'

    test_str = """
{
    "_type": "raiden.tests.unit.test_serialization.NonExistentClass",
    "attr1": "test"
}
"""
    with pytest.raises(TypeError) as m:
        JSONSerializer.deserialize(test_str)
        assert str(m) == 'raiden.tests.unit.test_serialization.NonExistentClass'
Exemplo n.º 12
0
def setup_storage(db_path):
    storage = SQLiteStorage(str(db_path), JSONSerializer())
    storage.write_state_change(
        ActionInitChain(
            pseudo_random_generator=random.Random(),
            block_number=1,
            our_address=factories.make_address(),
            chain_id=1,
        ),
        datetime.utcnow().isoformat(timespec='milliseconds'),
    )
    return storage
Exemplo n.º 13
0
def test_decode_with_ref_cache():
    embedded_A = MockObject(channel_id='0x3DE6B821E4fb4599653BF76FF60dC5FaF2e92De8')
    A = MockObject(attr1=10, attr2='123', embedded=embedded_A)

    decoded_A = JSONSerializer.deserialize(
        JSONSerializer.serialize(A),
    )

    assert A == decoded_A

    # Create an exact replica of A
    embedded_B = MockObject(channel_id='0x3DE6B821E4fb4599653BF76FF60dC5FaF2e92De8')
    B = MockObject(attr1=10, attr2='123', embedded=embedded_B)

    decoded_B = JSONSerializer.deserialize(
        JSONSerializer.serialize(B),
    )

    assert B == decoded_B
    assert id(B) != id(decoded_B)
    # Make sure that the original decoded A
    # is returned
    assert id(decoded_A) == id(decoded_B)

    # Make sure no object is cached
    RaidenJSONDecoder.cache_object_references = False
    RaidenJSONDecoder.ref_cache.clear()

    # Decode some object
    decoded_B = JSONSerializer.deserialize(
        JSONSerializer.serialize(B),
    )

    for _, cache_entries in RaidenJSONDecoder.ref_cache._cache.items():
        assert len(cache_entries) == 0
Exemplo n.º 14
0
def test_upgrade_v16_to_v17(tmp_path):
    db_path = tmp_path / Path('test.db')

    old_db_filename = tmp_path / Path('v16_log.db')
    with patch('raiden.utils.upgrades.older_db_file') as older_db_file:
        older_db_file.return_value = str(old_db_filename)
        storage = setup_storage(str(old_db_filename))
        with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=16):
            storage.update_version()

    manager = UpgradeManager(db_filename=str(db_path))
    manager.run()

    storage = SerializedSQLiteStorage(str(db_path), JSONSerializer())
    snapshot = storage.get_latest_state_snapshot()
    assert snapshot is not None
Exemplo n.º 15
0
    def run(self):
        """
        The `_current_db_filename` is going to hold the filename of the database
        with the new version. However, the previous version's data
        is going to exist in a file whose name contains the old version.
        Therefore, running the migration means that we have to copy
        all data to the current version's database, execute the migration
        functions.
        """
        old_db_filename = older_db_file(str(self._current_db_filename.parent))

        with get_file_lock(old_db_filename), get_file_lock(
                self._current_db_filename):
            if get_db_version(self._current_db_filename) == RAIDEN_DB_VERSION:
                # The current version has already been created / updraded.
                return
            else:
                # The version inside the current database was not the expected one.
                # Delete and re-run migration
                self._delete_current_db()

            older_version = get_db_version(old_db_filename)
            if not older_version:
                # There are no older versions to upgrade from.
                return

            self._copy(str(old_db_filename), str(self._current_db_filename))

            storage = SQLiteStorage(str(self._current_db_filename),
                                    JSONSerializer())

            log.debug(f'Upgrading database to v{RAIDEN_DB_VERSION}')

            cursor = storage.conn.cursor()
            with in_transaction(cursor):
                try:
                    for upgrade_func in UPGRADES_LIST:
                        upgrade_func(cursor, older_version, RAIDEN_DB_VERSION)

                    update_version(cursor)
                    # Prevent the upgrade from happening on next restart
                    self._backup_old_db(old_db_filename)
                except RaidenDBUpgradeError:
                    self._delete_current_db()
                    raise
Exemplo n.º 16
0
def test_upgrade_v17_to_v18(tmp_path):
    db_path = tmp_path / Path('test.db')

    old_db_filename = tmp_path / Path('v17_log.db')
    with patch('raiden.utils.upgrades.older_db_file') as older_db_file:
        older_db_file.return_value = str(old_db_filename)
        storage = setup_storage(str(old_db_filename))
        with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=17):
            storage.update_version()

    manager = UpgradeManager(db_filename=str(db_path))
    manager.run()

    storage = SerializedSQLiteStorage(str(db_path), JSONSerializer())
    _, snapshot = storage.get_latest_state_snapshot()

    secrethash = list(snapshot.payment_mapping.secrethashes_to_task.keys())[0]
    mediator_task = snapshot.payment_mapping.secrethashes_to_task[secrethash]
    assert mediator_task.mediator_state.waiting_transfer is not None
    assert mediator_task.mediator_state.routes
Exemplo n.º 17
0
 def send_delivered_for(message: SignedMessage):
     delivered_message = Delivered(message.message_identifier)
     self._raiden_service.sign(delivered_message)
     self._send_raw(message.sender, JSONSerializer.serialize(delivered_message))