Beispiel #1
0
    def test_get_incoming(self):
        """Test that `Node.get_incoming` will return stored and cached input links."""
        source_one = Data().store()
        source_two = Data().store()
        target = CalculationNode()

        target.add_incoming(source_one, LinkType.INPUT_CALC, 'link_one')
        target.add_incoming(source_two, LinkType.INPUT_CALC, 'link_two')

        # Without link type
        incoming_nodes = target.get_incoming().all()
        incoming_uuids = sorted(
            [neighbor.node.uuid for neighbor in incoming_nodes])
        self.assertEqual(incoming_uuids,
                         sorted([source_one.uuid, source_two.uuid]))

        # Using a single link type
        incoming_nodes = target.get_incoming(
            link_type=LinkType.INPUT_CALC).all()
        incoming_uuids = sorted(
            [neighbor.node.uuid for neighbor in incoming_nodes])
        self.assertEqual(incoming_uuids,
                         sorted([source_one.uuid, source_two.uuid]))

        # Using a link type tuple
        incoming_nodes = target.get_incoming(
            link_type=(LinkType.INPUT_CALC, LinkType.INPUT_WORK)).all()
        incoming_uuids = sorted(
            [neighbor.node.uuid for neighbor in incoming_nodes])
        self.assertEqual(incoming_uuids,
                         sorted([source_one.uuid, source_two.uuid]))
Beispiel #2
0
    def fill_repo(self):
        from aiida.orm import JobCalculation, CalculationFactory, Data, DataFactory

        extra_name = self.__class__.__name__ + "/test_with_subclasses"
        calc_params = {
            'computer': self.computer,
            'resources': {
                'num_machines': 1,
                'num_mpiprocs_per_machine': 1
            }
        }

        TemplateReplacerCalc = CalculationFactory(
            'simpleplugins.templatereplacer')
        ParameterData = DataFactory('parameter')

        a1 = JobCalculation(**calc_params).store()
        # To query only these nodes later
        a1.set_extra(extra_name, True)
        a2 = TemplateReplacerCalc(**calc_params).store()
        # To query only these nodes later
        a2.set_extra(extra_name, True)
        a3 = Data().store()
        a3.set_extra(extra_name, True)
        a4 = ParameterData(dict={'a': 'b'}).store()
        a4.set_extra(extra_name, True)
        a5 = Node().store()
        a5.set_extra(extra_name, True)
        # I don't set the extras, just to be sure that the filtering works
        # The filtering is needed because other tests will put stuff int he DB
        a6 = JobCalculation(**calc_params)
        a6.store()
        a7 = Node()
        a7.store()
Beispiel #3
0
    def test_group_general(self):
        """
        General tests to verify that the group addition with the skip_orm=True flag
        work properly
        """
        backend = self.backend

        node_01 = Data().store().backend_entity
        node_02 = Data().store().backend_entity
        node_03 = Data().store().backend_entity
        node_04 = Data().store().backend_entity
        node_05 = Data().store().backend_entity
        nodes = [node_01, node_02, node_03, node_04, node_05]

        simple_user = backend.users.create('*****@*****.**')
        group = backend.groups.create(label='test_adding_nodes',
                                      user=simple_user).store()
        # Single node in a list
        group.add_nodes([node_01], skip_orm=True)
        # List of nodes
        group.add_nodes([node_02, node_03], skip_orm=True)
        # Tuple of nodes
        group.add_nodes((node_04, node_05), skip_orm=True)

        # Check
        self.assertEqual(set(_.pk for _ in nodes),
                         set(_.pk for _ in group.nodes))

        # Try to add a node that is already present: there should be no problem
        group.add_nodes([node_01], skip_orm=True)
        self.assertEqual(set(_.pk for _ in nodes),
                         set(_.pk for _ in group.nodes))
Beispiel #4
0
    def test_delete_through_utility_method(self):
        """Test deletion works correctly through the `aiida.backends.utils.delete_nodes_and_connections`."""
        from aiida.common import timezone
        from aiida.backends.utils import delete_nodes_and_connections

        data_one = Data().store()
        data_two = Data().store()
        calculation = CalculationNode()
        calculation.add_incoming(data_one, LinkType.INPUT_CALC, 'input_one')
        calculation.add_incoming(data_two, LinkType.INPUT_CALC, 'input_two')
        calculation.store()

        log_one = Log(timezone.now(), 'test', 'INFO', data_one.pk).store()
        log_two = Log(timezone.now(), 'test', 'INFO', data_two.pk).store()

        assert len(Log.objects.get_logs_for(data_one)) == 1
        assert Log.objects.get_logs_for(data_one)[0].pk == log_one.pk
        assert len(Log.objects.get_logs_for(data_two)) == 1
        assert Log.objects.get_logs_for(data_two)[0].pk == log_two.pk

        delete_nodes_and_connections([data_two.pk])

        assert len(Log.objects.get_logs_for(data_one)) == 1
        assert Log.objects.get_logs_for(data_one)[0].pk == log_one.pk
        assert len(Log.objects.get_logs_for(data_two)) == 0
Beispiel #5
0
    def test_inputs_parents_relationship(self):
        """
        This test checks that the inputs_q, parents_q relationship and the
        corresponding properties work as expected.
        """
        n1 = Data().store()
        n2 = CalculationNode()
        n3 = Data().store()

        # Create a link between these 2 nodes
        n2.add_incoming(n1, link_type=LinkType.INPUT_CALC, link_label='N1')
        n2.store()
        n3.add_incoming(n2, link_type=LinkType.CREATE, link_label='N2')

        # Check that the result of outputs is a list
        self.assertIsInstance(n1.backend_entity.dbmodel.inputs, list,
                              'This is expected to be a list')

        # Check that the result of outputs_q is a query
        from sqlalchemy.orm.dynamic import AppenderQuery
        self.assertIsInstance(n1.backend_entity.dbmodel.inputs_q,
                              AppenderQuery,
                              'This is expected to be an AppenderQuery')

        # Check that the result of inputs is correct
        out = set([_.pk for _ in n3.backend_entity.dbmodel.inputs])
        self.assertEqual(out, set([n2.pk]))
Beispiel #6
0
    def test_computer_user_immutability(self):
        """Test that computer and user of a node are immutable after storing."""
        node = Data().store()

        with self.assertRaises(exceptions.ModificationNotAllowed):
            node.computer = self.computer

        with self.assertRaises(exceptions.ModificationNotAllowed):
            node.user = self.user
Beispiel #7
0
    def test_with_subclasses(self, computer):

        extra_name = self.__class__.__name__ + "/test_with_subclasses"
        calc_params = {
            'computer': computer,
            'resources': {
                'num_machines': 1,
                'num_mpiprocs_per_machine': 1
            }
        }

        TemplateReplacerCalc = CalculationFactory(
            'simpleplugins.templatereplacer')
        ParameterData = DataFactory('parameter')

        a1 = JobCalculation(**calc_params).store()
        # To query only these nodes later
        a1.set_extra(extra_name, True)
        a2 = TemplateReplacerCalc(**calc_params).store()
        # To query only these nodes later
        a2.set_extra(extra_name, True)
        a3 = Data().store()
        a3.set_extra(extra_name, True)
        a4 = ParameterData(dict={'a': 'b'}).store()
        a4.set_extra(extra_name, True)
        a5 = Node().store()
        a5.set_extra(extra_name, True)
        # I don't set the extras, just to be sure that the filtering works
        # The filtering is needed because other tests will put stuff int he DB
        a6 = JobCalculation(**calc_params)
        a6.store()
        a7 = Node()
        a7.store()

        # Query by calculation
        results = list(JobCalculation.query(dbextras__key=extra_name))
        # a3, a4, a5 should not be found because they are not JobCalculations.
        # a6, a7 should not be found because they have not the attribute set.
        self.assertEquals(set([i.pk for i in results]), set([a1.pk, a2.pk]))

        # Same query, but by the generic Node class
        results = list(Node.query(dbextras__key=extra_name))
        self.assertEquals(set([i.pk for i in results]),
                          set([a1.pk, a2.pk, a3.pk, a4.pk, a5.pk]))

        # Same query, but by the Data class
        results = list(Data.query(dbextras__key=extra_name))
        self.assertEquals(set([i.pk for i in results]), set([a3.pk, a4.pk]))

        # Same query, but by the ParameterData subclass
        results = list(ParameterData.query(dbextras__key=extra_name))
        self.assertEquals(set([i.pk for i in results]), set([a4.pk]))

        # Same query, but by the TemplateReplacerCalc subclass
        results = list(TemplateReplacerCalc.query(dbextras__key=extra_name))
        self.assertEquals(set([i.pk for i in results]), set([a2.pk]))
Beispiel #8
0
def nodes_incorrect_type(get_pseudo_potential_data, request):
    """Dynamic fixture returning instances of `UpfData` either isolated or as a list."""
    from aiida.orm import Data

    if request.param == 'single':
        return Data().store()

    if request.param == 'tuple':
        return (Data().store(),)

    return [get_pseudo_potential_data().store(), Data().store()]
Beispiel #9
0
    def test_validate_outgoing_workflow(self):
        """Verify that attaching an unstored `Data` node with `RETURN` link from a `WorkflowNode` raises.

        This would for example be the case if a user inside a workfunction or work chain creates a new node based on its
        inputs or the outputs returned by another process and tries to attach it as an output. This would the provenance
        of that data node to be lost and should be explicitly forbidden by raising.
        """
        source = WorkflowNode()
        target = Data()

        with self.assertRaises(ValueError):
            target.add_incoming(source, LinkType.RETURN, 'link_label')
Beispiel #10
0
    def test_load_nodes(self):
        """
        Test for load_node() function.
        """
        from aiida.orm import load_node
        from aiida.backends.sqlalchemy import get_scoped_session

        a = Data()
        a.store()

        self.assertEquals(a.pk, load_node(identifier=a.pk).pk)
        self.assertEquals(a.pk, load_node(identifier=a.uuid).pk)
        self.assertEquals(a.pk, load_node(pk=a.pk).pk)
        self.assertEquals(a.pk, load_node(uuid=a.uuid).pk)

        session = get_scoped_session()

        try:
            session.begin_nested()
            with self.assertRaises(ValueError):
                load_node(identifier=a.pk, pk=a.pk)
        finally:
            session.rollback()

        try:
            session.begin_nested()
            with self.assertRaises(ValueError):
                load_node(pk=a.pk, uuid=a.uuid)
        finally:
            session.rollback()

        try:
            session.begin_nested()
            with self.assertRaises(TypeError):
                load_node(pk=a.uuid)
        finally:
            session.rollback()

        try:
            session.begin_nested()
            with self.assertRaises(TypeError):
                load_node(uuid=a.pk)
        finally:
            session.rollback()

        try:
            session.begin_nested()
            with self.assertRaises(ValueError):
                load_node()
        finally:
            session.rollback()
Beispiel #11
0
def get_data_node_and_object(store=True):
    """A function to create a simple data node, with an object."""
    data = Data()
    data.set_attribute_many({str(i): i for i in range(10)})
    data.put_object_from_filelike(StringIO('a' * 10000), 'key')
    if store:
        data.store()
    return (), {'node': data}
Beispiel #12
0
    def test_detect_invalid_nodes_unknown_node_type(self):
        """Test `verdi database integrity detect-invalid-nodes` when node type is invalid."""
        result = self.cli_runner.invoke(cmd_database.detect_invalid_nodes, [])
        self.assertEqual(result.exit_code, 0)
        self.assertClickResultNoException(result)

        # Create a node with invalid type: since there are a lot of validation rules that prevent us from creating an
        # invalid node type normally, we have to do it manually on the database model instance before storing
        node = Data()
        node.backend_entity.dbmodel.node_type = '__main__.SubClass.'
        node.store()

        result = self.cli_runner.invoke(cmd_database.detect_invalid_nodes, [])
        self.assertNotEqual(result.exit_code, 0)
        self.assertIsNotNone(result.exception)
Beispiel #13
0
    def test_detect_invalid_links_calculation_return(self):
        """Test `verdi database integrity detect-invalid-links` outgoing `return` from `calculation`."""
        result = self.cli_runner.invoke(cmd_database.detect_invalid_links, [])
        self.assertEqual(result.exit_code, 0)
        self.assertClickResultNoException(result)

        # Create an invalid link: outgoing `return` from a calculation
        data = Data().store().backend_entity
        calculation = CalculationNode().store().backend_entity

        data.add_incoming(calculation, link_type=LinkType.RETURN, link_label='return')

        result = self.cli_runner.invoke(cmd_database.detect_invalid_links, [])
        self.assertNotEqual(result.exit_code, 0)
        self.assertIsNotNone(result.exception)
Beispiel #14
0
    def test_detect_invalid_links_workflow_create(self):
        """Test `verdi database integrity detect-invalid-links` outgoing `create` from `workflow`."""
        result = self.cli_runner.invoke(cmd_database.detect_invalid_links, [])
        self.assertEqual(result.exit_code, 0)
        self.assertClickResultNoException(result)

        # Create an invalid link: outgoing `create` from a workflow
        data = Data().store().backend_entity
        workflow = WorkflowNode().store().backend_entity

        data.add_incoming(workflow, link_type=LinkType.CREATE, link_label='create')

        result = self.cli_runner.invoke(cmd_database.detect_invalid_links, [])
        self.assertNotEqual(result.exit_code, 0)
        self.assertIsNotNone(result.exception)
Beispiel #15
0
    def test_add_incoming_return(self):
        """Nodes can have an infinite amount of incoming RETURN links, as long as the link triple is unique."""
        source_one = WorkflowNode()
        source_two = WorkflowNode()
        target = Data().store()  # Needs to be stored: see `test_validate_outgoing_workflow`

        target.add_incoming(source_one, LinkType.RETURN, 'link_label')

        # Can only have a single incoming RETURN link from each source node if the label is not unique
        with self.assertRaises(ValueError):
            target.validate_incoming(source_one, LinkType.RETURN, 'link_label')

        # From another source node or using another label is fine
        target.validate_incoming(source_one, LinkType.RETURN, 'other_label')
        target.validate_incoming(source_two, LinkType.RETURN, 'link_label')
Beispiel #16
0
    def test_detect_invalid_links_create_links(self):
        """Test `verdi database integrity detect-invalid-links` when there are multiple incoming `create` links."""
        result = self.cli_runner.invoke(cmd_database.detect_invalid_links, [])
        self.assertEqual(result.exit_code, 0)
        self.assertClickResultNoException(result)

        # Create an invalid link: two `create` links
        data = Data().store().backend_entity
        calculation = CalculationNode().store().backend_entity

        data.add_incoming(calculation, link_type=LinkType.CREATE, link_label='create')
        data.add_incoming(calculation, link_type=LinkType.CREATE, link_label='create')

        result = self.cli_runner.invoke(cmd_database.detect_invalid_links, [])
        self.assertNotEqual(result.exit_code, 0)
        self.assertIsNotNone(result.exception)
 def test_dynamic_output(self):
     """Test a process spec with dynamic output enabled."""
     node = Node()
     data = Data()
     self.assertIsNotNone(self.spec.outputs.validate({'key': 'foo'}))
     self.assertIsNotNone(self.spec.outputs.validate({'key': 5}))
     self.assertIsNotNone(self.spec.outputs.validate({'key': node}))
     self.assertIsNone(self.spec.outputs.validate({'key': data}))
Beispiel #18
0
    def test_node_indegree_unique_triple(self):
        """Test that the validation of links with indegree `unique_triple` works correctly

        The example here is a `DataNode` that has two incoming RETURN links with the same label, but from different
        source nodes. This is legal and should pass validation.
        """
        return_one = WorkflowNode()
        return_two = WorkflowNode()
        data = Data().store()  # Needs to be stored: see `test_validate_outgoing_workflow`

        # Verify that adding two return links with the same link label but from different source is allowed
        data.add_incoming(return_one, link_type=LinkType.RETURN, link_label='returned')
        data.add_incoming(return_two, link_type=LinkType.RETURN, link_label='returned')

        uuids_incoming = set(node.uuid for node in data.get_incoming().all_nodes())
        uuids_expected = set([return_one.uuid, return_two.uuid])
        self.assertEqual(uuids_incoming, uuids_expected)
Beispiel #19
0
    def test_repository_garbage_collection(self):
        """Verify that the repository sandbox folder is cleaned after the node instance is garbage collected."""
        node = Data()
        dirpath = node._repository._get_temp_folder().abspath  # pylint: disable=protected-access

        self.assertTrue(os.path.isdir(dirpath))
        del node
        self.assertFalse(os.path.isdir(dirpath))
Beispiel #20
0
    def setUpClass(cls, *args, **kwargs):
        """
        Create some code to test the NodeParamType parameter type for the command line infrastructure
        We create an initial code with a random name and then on purpose create two code with a name
        that matches exactly the ID and UUID, respectively, of the first one. This allows us to test
        the rules implemented to solve ambiguities that arise when determing the identifier type
        """
        super().setUpClass(*args, **kwargs)

        cls.param = NodeParamType()
        cls.entity_01 = Data().store()
        cls.entity_02 = Data().store()
        cls.entity_03 = Data().store()

        cls.entity_01.label = 'data_01'
        cls.entity_02.label = str(cls.entity_01.pk)
        cls.entity_03.label = str(cls.entity_01.uuid)
Beispiel #21
0
    def test_get_node_by_label(self):
        """Test the get_node_by_label() method of the `LinkManager`

        In particular, check both the it returns the correct values, but also that it raises the expected
        exceptions where appropriate (missing link with a given label, or more than one link)
        """
        data = Data().store()
        calc_one_a = CalculationNode()
        calc_one_b = CalculationNode()
        calc_two = CalculationNode()

        # Two calcs using the data with the same label
        calc_one_a.add_incoming(data, link_type=LinkType.INPUT_CALC, link_label='input')
        calc_one_b.add_incoming(data, link_type=LinkType.INPUT_CALC, link_label='input')
        # A different label
        calc_two.add_incoming(data, link_type=LinkType.INPUT_CALC, link_label='the_input')

        calc_one_a.store()
        calc_one_b.store()
        calc_two.store()

        # Retrieve a link when the label is unique
        output_the_input = data.get_outgoing(link_type=LinkType.INPUT_CALC).get_node_by_label('the_input')
        self.assertEqual(output_the_input.pk, calc_two.pk)

        with self.assertRaises(exceptions.MultipleObjectsError):
            data.get_outgoing(link_type=LinkType.INPUT_CALC).get_node_by_label('input')

        with self.assertRaises(exceptions.NotExistent):
            data.get_outgoing(link_type=LinkType.INPUT_CALC).get_node_by_label('some_weird_label')
Beispiel #22
0
    def test_add_incoming_input_work(self):
        """Nodes can have an infinite amount of incoming INPUT_WORK links, as long as the link pair is unique."""
        source_one = Data()
        source_two = Data()
        target = WorkflowNode()

        target.add_incoming(source_one, LinkType.INPUT_WORK, 'link_label')

        # Can only have a single incoming INPUT_WORK link from each source node if the label is not unique
        with self.assertRaises(ValueError):
            target.validate_incoming(source_one, LinkType.INPUT_WORK, 'link_label')

        # Using another link label is fine
        target.validate_incoming(source_one, LinkType.INPUT_WORK, 'other_label')

        # However, using the same link, even from another node is illegal
        with self.assertRaises(ValueError):
            target.validate_incoming(source_two, LinkType.INPUT_WORK, 'link_label')
Beispiel #23
0
    def test_delete_collection_incoming_link(self):
        """Test deletion through objects collection raises when there are incoming links."""
        data = Data().store()
        calculation = CalculationNode()
        calculation.add_incoming(data, LinkType.INPUT_CALC, 'input')
        calculation.store()

        with pytest.raises(exceptions.InvalidOperation):
            Node.objects.delete(calculation.pk)
Beispiel #24
0
    def setUpClass(cls, *args, **kwargs):
        super().setUpClass(*args, **kwargs)
        from aiida.orm import Data, Bool, Float, Int

        cls.node_base = Data().store()
        cls.node_bool_true = Bool(True).store()
        cls.node_bool_false = Bool(False).store()
        cls.node_float = Float(1.0).store()
        cls.node_int = Int(1).store()
Beispiel #25
0
    def test_erase_stored_force(self):
        """
        Test that _repository.erase removes the content of an stored
        Data node when passing force=True.
        """
        node = Data()
        node.put_object_from_tree(self.tempdir, '')
        node.store()

        self.assertEqual(sorted(node.list_object_names()), ['c.txt', 'subdir'])
        self.assertEqual(sorted(node.list_object_names('subdir')),
                         ['a.txt', 'b.txt', 'nested'])

        node._repository.erase(force=True)  # pylint: disable=protected-access
        self.assertEqual(node.list_object_names(), [])
Beispiel #26
0
    def test_detect_invalid_links_unknown_link_type(self):
        """Test `verdi database integrity detect-invalid-links` when link type is invalid."""
        result = self.cli_runner.invoke(cmd_database.detect_invalid_links, [])
        self.assertEqual(result.exit_code, 0)
        self.assertClickResultNoException(result)

        class WrongLinkType(enum.Enum):

            WRONG_CREATE = 'wrong_create'

        # Create an invalid link: invalid link type
        data = Data().store().backend_entity
        calculation = CalculationNode().store().backend_entity

        data.add_incoming(calculation, link_type=WrongLinkType.WRONG_CREATE, link_label='create')

        result = self.cli_runner.invoke(cmd_database.detect_invalid_links, [])
        self.assertNotEqual(result.exit_code, 0)
        self.assertIsNotNone(result.exception)
Beispiel #27
0
    def test_add_incoming_create(self):
        """Nodes can only have a single incoming CREATE link, independent of the source node."""
        source_one = CalculationNode()
        source_two = CalculationNode()
        target = Data()

        target.add_incoming(source_one, LinkType.CREATE, 'link_label')

        # Can only have a single incoming CREATE link
        with self.assertRaises(ValueError):
            target.validate_incoming(source_one, LinkType.CREATE, 'link_label')

        # Even when the source node is different
        with self.assertRaises(ValueError):
            target.validate_incoming(source_two, LinkType.CREATE, 'link_label')

        # Or when the link label is different
        with self.assertRaises(ValueError):
            target.validate_incoming(source_one, LinkType.CREATE, 'other_label')
Beispiel #28
0
    def test_delete_collection_logs(self):
        """Test deletion works correctly through objects collection."""
        from aiida.common import timezone

        data_one = Data().store()
        data_two = Data().store()

        log_one = Log(timezone.now(), 'test', 'INFO', data_one.pk).store()
        log_two = Log(timezone.now(), 'test', 'INFO', data_two.pk).store()

        assert len(Log.objects.get_logs_for(data_one)) == 1
        assert Log.objects.get_logs_for(data_one)[0].pk == log_one.pk
        assert len(Log.objects.get_logs_for(data_two)) == 1
        assert Log.objects.get_logs_for(data_two)[0].pk == log_two.pk

        Node.objects.delete(data_two.pk)

        assert len(Log.objects.get_logs_for(data_one)) == 1
        assert Log.objects.get_logs_for(data_one)[0].pk == log_one.pk
        assert len(Log.objects.get_logs_for(data_two)) == 0
Beispiel #29
0
    def setUpClass(cls, *args, **kwargs):
        super().setUpClass()
        from aiida.orm import Data

        cls.node = Data().store()

        # some of the export tests write in the current directory,
        # make sure it is writeable and we don't pollute the current one
        cls.old_cwd = os.getcwd()
        cls.cwd = tempfile.mkdtemp(__name__)
        os.chdir(cls.cwd)
Beispiel #30
0
    def test_ordering_limits_offsets_sqla(self):
        """Test ordering limits offsets of SQLA query results."""
        # Creating 10 nodes with an attribute that can be ordered
        for i in range(10):
            node = Data()
            node.set_attribute('foo', i)
            node.store()
        q_b = QueryBuilder().append(Node, project='attributes.foo').order_by(
            {Node: {
                'attributes.foo': {
                    'cast': 'i'
                }
            }})
        res = next(zip(*q_b.all()))
        self.assertEqual(res, tuple(range(10)))

        # Now applying an offset:
        q_b.offset(5)
        res = next(zip(*q_b.all()))
        self.assertEqual(res, tuple(range(5, 10)))

        # Now also applying a limit:
        q_b.limit(3)
        res = next(zip(*q_b.all()))
        self.assertEqual(res, tuple(range(5, 8)))