Exemplo n.º 1
0
    def save_checkpoint(self, process, tag=None):
        """Persist a Process instance.

        :param process: :class:`aiida.engine.Process`
        :param tag: optional checkpoint identifier to allow distinguishing multiple checkpoints for the same process
        :raises: :class:`plumpy.PersistenceError` Raised if there was a problem saving the checkpoint
        """
        LOGGER.debug('Persisting process<%d>', process.pid)

        if tag is not None:
            raise NotImplementedError('Checkpoint tags not supported yet')

        try:
            bundle = plumpy.Bundle(
                process, plumpy.LoadSaveContext(loader=get_object_loader()))
        except ImportError:
            # Couldn't create the bundle
            raise plumpy.PersistenceError(
                f"Failed to create a bundle for '{process}': {traceback.format_exc()}"
            )

        try:
            process.node.set_checkpoint(serialize.serialize(bundle))
        except Exception:
            raise plumpy.PersistenceError(
                f"Failed to store a checkpoint for '{process}': {traceback.format_exc()}"
            )

        return bundle
Exemplo n.º 2
0
    def test_serialize_group_round_trip(self):
        """Test you can serialize and deserialize a group"""
        group = orm.Group(label='test_serialize_group_round_trip').store()
        deserialized = serialize.deserialize(serialize.serialize(group))

        self.assertEqual(group.uuid, deserialized.uuid)
        self.assertEqual(group.label, deserialized.label)
Exemplo n.º 3
0
    def test_serialize_round_trip(self):
        """
        Test the serialization of a dictionary with Nodes in various data structure
        Also make sure that the serialized data is json-serializable
        """
        node_a = orm.Data().store()
        node_b = orm.Data().store()

        data = {
            'test': 1,
            'list': [1, 2, 3, node_a],
            'dict': {
                ('Si', ): node_b,
                'foo': 'bar'
            },
            'baz': 'aar'
        }

        serialized_data = serialize.serialize(data)
        deserialized_data = serialize.deserialize(serialized_data)

        # For now manual element-for-element comparison until we come up with general
        # purpose function that can equate two node instances properly
        self.assertEqual(data['test'], deserialized_data['test'])
        self.assertEqual(data['baz'], deserialized_data['baz'])
        self.assertEqual(data['list'][:3], deserialized_data['list'][:3])
        self.assertEqual(data['list'][3].uuid,
                         deserialized_data['list'][3].uuid)
        self.assertEqual(data['dict'][('Si', )].uuid,
                         deserialized_data['dict'][('Si', )].uuid)
Exemplo n.º 4
0
    def test_serialize_computer_round_trip(self):
        """Test you can serialize and deserialize a computer"""
        computer = self.computer
        deserialized = serialize.deserialize(serialize.serialize(computer))

        # pylint: disable=no-member
        self.assertEqual(computer.uuid, deserialized.uuid)
        self.assertEqual(computer.name, deserialized.name)
Exemplo n.º 5
0
    def encode_input_args(self, inputs: Dict[str, Any]) -> str:  # pylint: disable=no-self-use
        """
        Encode input arguments such that they may be saved in a Bundle

        :param inputs: A mapping of the inputs as passed to the process
        :return: The encoded (serialized) inputs
        """
        return serialize.serialize(inputs)
Exemplo n.º 6
0
    def encode_input_args(self, inputs):
        """
        Encode input arguments such that they may be saved in a Bundle

        :param inputs: A mapping of the inputs as passed to the process
        :return: The encoded (serialized) inputs
        """
        from aiida.orm.utils import serialize
        return serialize.serialize(inputs)
Exemplo n.º 7
0
    def test_serialize_group(self):
        """
        Test that serialization and deserialization of Groups works.
        Also make sure that the serialized data is json-serializable
        """
        group_name = 'groupie'
        group_a = orm.Group(label=group_name).store()

        data = {'group': group_a}

        serialized_data = serialize.serialize(data)
        deserialized_data = serialize.deserialize(serialized_data)

        self.assertEqual(data['group'].uuid, deserialized_data['group'].uuid)
        self.assertEqual(data['group'].label, deserialized_data['group'].label)
Exemplo n.º 8
0
    def test_mixed_attribute_normal_dict(self):
        """Regression test for #3092.

        The yaml mapping constructor in `aiida.orm.utils.serialize` was not properly "deeply" reconstructing nested
        mappings, causing a mix of attribute dictionaries and normal dictionaries to lose information in a round-trip.

        If a nested `AttributeDict` contained a normal dictionary, the content of the latter would be lost during the
        deserialization, despite the information being present in the serialized yaml dump.
        """
        from aiida.common.extendeddicts import AttributeDict

        # Construct a nested `AttributeDict`, which should make all nested dictionaries `AttributeDicts` recursively
        dictionary = {'nested': AttributeDict({'dict': 'string', 'value': 1})}
        attribute_dict = AttributeDict(dictionary)

        # Now add a normal dictionary in the attribute dictionary
        attribute_dict['nested']['normal'] = {'a': 2}

        serialized = serialize.serialize(attribute_dict)
        deserialized = serialize.deserialize(serialized)

        self.assertEqual(attribute_dict, deserialized)
Exemplo n.º 9
0
    def test_serialize_unstored_computer(self):
        """Test that you can't serialize an unstored node"""
        computer = orm.Computer('test_computer', 'test_host')

        with self.assertRaises(ValueError):
            serialize.serialize(computer)
Exemplo n.º 10
0
    def test_serialize_unstored_group(self):
        """Test that you can't serialize an unstored group"""
        group = orm.Group(label='test_serialize_unstored_group')

        with self.assertRaises(ValueError):
            serialize.serialize(group)
Exemplo n.º 11
0
    def test_serialize_unstored_node(self):
        """Test that you can't serialize an unstored node"""
        node = orm.Data()

        with self.assertRaises(ValueError):
            serialize.serialize(node)
Exemplo n.º 12
0
 def test_serialize_node_round_trip(self):
     """Test you can serialize and deserialize a node"""
     node = orm.Data().store()
     deserialized = serialize.deserialize(serialize.serialize(node))
     self.assertEqual(node.uuid, deserialized.uuid)
Exemplo n.º 13
0
    def _continue(self, communicator, pid, nowait, tag=None):
        """Continue the task.

        Note that the task may already have been completed, as indicated from the corresponding the node, in which
        case it is not continued, but the corresponding future is reconstructed and returned. This scenario may
        occur when the Process was already completed by another worker that however failed to send the acknowledgment.

        :param communicator: the communicator that called this method
        :param pid: the pid of the process to continue
        :param nowait: if True don't wait for the process to finish, just return the pid, otherwise wait and
            return the results
        :param tag: the tag of the checkpoint to continue from
        """
        from aiida.common import exceptions
        from aiida.engine.exceptions import PastException
        from aiida.orm import load_node, Data
        from aiida.orm.utils import serialize

        try:
            node = load_node(pk=pid)
        except (exceptions.MultipleObjectsError, exceptions.NotExistent):
            # In this case, the process node corresponding to the process id, cannot be resolved uniquely or does not
            # exist. The latter being the most common case, where someone deleted the node, before the process was
            # properly terminated. Since the node is never coming back and so the process will never be able to continue
            # we raise `Return` instead of `TaskRejected` because the latter would cause the task to be resent and start
            # to ping-pong between RabbitMQ and the daemon workers.
            LOGGER.exception('Cannot continue process<%d>', pid)
            raise gen.Return(False)

        if node.is_terminated:

            LOGGER.info(
                'not continuing process<%d> which is already terminated with state %s',
                pid, node.process_state)

            future = Future()

            if node.is_finished:
                future.set_result({
                    entry.link_label: entry.node
                    for entry in node.get_outgoing(node_class=Data)
                })
            elif node.is_excepted:
                future.set_exception(PastException(node.exception))
            elif node.is_killed:
                future.set_exception(plumpy.KilledError())

            raise gen.Return(future.result())

        try:
            result = yield super()._continue(communicator, pid, nowait, tag)
        except ImportError as exception:
            message = 'the class of the process could not be imported.'
            self.handle_continue_exception(node, exception, message)
            raise
        except Exception as exception:
            message = 'failed to recreate the process instance in order to continue it.'
            self.handle_continue_exception(node, exception, message)
            raise

        # Ensure that the result is serialized such that communication thread won't have to do database operations
        try:
            serialized = serialize.serialize(result)
        except Exception:
            LOGGER.exception('failed to serialize the result for process<%d>',
                             pid)
            raise

        raise gen.Return(serialized)