コード例 #1
0
ファイル: test_node.py プロジェクト: CasperWA/aiida_core
    def test_delete_through_utility_method(self):
        """Test deletion works correctly through the `aiida.backends.utils.delete_nodes_and_connections`."""
        from aiida.common import timezone
        from aiida.backends.utils import delete_nodes_and_connections

        data_one = Data().store()
        data_two = Data().store()
        calculation = CalculationNode()
        calculation.add_incoming(data_one, LinkType.INPUT_CALC, 'input_one')
        calculation.add_incoming(data_two, LinkType.INPUT_CALC, 'input_two')
        calculation.store()

        log_one = Log(timezone.now(), 'test', 'INFO', data_one.pk).store()
        log_two = Log(timezone.now(), 'test', 'INFO', data_two.pk).store()

        assert len(Log.objects.get_logs_for(data_one)) == 1
        assert Log.objects.get_logs_for(data_one)[0].pk == log_one.pk
        assert len(Log.objects.get_logs_for(data_two)) == 1
        assert Log.objects.get_logs_for(data_two)[0].pk == log_two.pk

        delete_nodes_and_connections([data_two.pk])

        assert len(Log.objects.get_logs_for(data_one)) == 1
        assert Log.objects.get_logs_for(data_one)[0].pk == log_one.pk
        assert len(Log.objects.get_logs_for(data_two)) == 0
コード例 #2
0
ファイル: test_logs.py プロジェクト: zhonger/aiida-core
    def test_log_querybuilder(self):
        """ Test querying for logs by joining on nodes in the QueryBuilder """
        from aiida.orm import QueryBuilder

        # Setup nodes
        log_1, calc = self.create_log()
        log_2 = Log(now(), 'loggername', logging.getLevelName(LOG_LEVEL_REPORT), calc.id, 'log message #2')
        log_3 = Log(now(), 'loggername', logging.getLevelName(LOG_LEVEL_REPORT), calc.id, 'log message #3')

        # Retrieve a node by joining on a specific log ('log_1')
        builder = QueryBuilder()
        builder.append(Log, tag='log', filters={'id': log_2.id})
        builder.append(orm.CalculationNode, with_log='log', project=['uuid'])
        nodes = builder.all()

        self.assertEqual(len(nodes), 1)
        for node in nodes:
            self.assertIn(str(node[0]), [calc.uuid])

        # Retrieve all logs for a specific node by joining on a said node
        builder = QueryBuilder()
        builder.append(orm.CalculationNode, tag='calc', filters={'id': calc.id})
        builder.append(Log, with_node='calc', project=['uuid'])
        logs = builder.all()

        self.assertEqual(len(logs), 3)
        for log in logs:
            self.assertIn(str(log[0]), [str(log_1.uuid), str(log_2.uuid), str(log_3.uuid)])
コード例 #3
0
ファイル: test_logs.py プロジェクト: CasperWA/aiida_core
 def test_find_limit(self):
     """
     Test the limit option of log.find
     """
     node = orm.Data().store()
     limit = 2
     for _ in range(limit * 2):
         self.log_record['dbnode_id'] = node.id
         Log(**self.log_record)
     entries = Log.objects.find(limit=limit)
     self.assertEqual(len(entries), limit)
コード例 #4
0
ファイル: test_logs.py プロジェクト: CasperWA/aiida_core
    def test_objects_find(self):
        """Put logs in and find them"""
        node = orm.Data().store()
        for _ in range(10):
            record = self.log_record
            record['dbnode_id'] = node.id
            Log(**record)

        entries = Log.objects.all()
        self.assertEqual(10, len(entries))
        self.assertIsInstance(entries[0], Log)
コード例 #5
0
ファイル: test_node.py プロジェクト: CasperWA/aiida_core
    def test_delete_collection_logs(self):
        """Test deletion works correctly through objects collection."""
        from aiida.common import timezone

        data_one = Data().store()
        data_two = Data().store()

        log_one = Log(timezone.now(), 'test', 'INFO', data_one.pk).store()
        log_two = Log(timezone.now(), 'test', 'INFO', data_two.pk).store()

        assert len(Log.objects.get_logs_for(data_one)) == 1
        assert Log.objects.get_logs_for(data_one)[0].pk == log_one.pk
        assert len(Log.objects.get_logs_for(data_two)) == 1
        assert Log.objects.get_logs_for(data_two)[0].pk == log_two.pk

        Node.objects.delete(data_two.pk)

        assert len(Log.objects.get_logs_for(data_one)) == 1
        assert Log.objects.get_logs_for(data_one)[0].pk == log_one.pk
        assert len(Log.objects.get_logs_for(data_two)) == 0
コード例 #6
0
    def setUpClass(cls, *args, **kwargs):  # pylint: disable=too-many-locals, too-many-statements
        """
        Basides the standard setup we need to add few more objects in the
        database to be able to explore different requests/filters/orderings etc.
        """
        # call parent setUpClass method
        super(RESTApiTestCase, cls).setUpClass()

        # connect the app and the api
        # Init the api by connecting it the the app (N.B. respect the following
        # order, api.__init__)
        kwargs = dict(PREFIX=cls._url_prefix,
                      PERPAGE_DEFAULT=cls._PERPAGE_DEFAULT,
                      LIMIT_DEFAULT=cls._LIMIT_DEFAULT)

        cls.app = App(__name__)
        cls.app.config['TESTING'] = True
        AiidaApi(cls.app, **kwargs)

        # create test inputs
        cell = ((2., 0., 0.), (0., 2., 0.), (0., 0., 2.))
        structure = orm.StructureData(cell=cell)
        structure.append_atom(position=(0., 0., 0.), symbols=['Ba'])
        structure.store()
        structure.add_comment('This is test comment.')
        structure.add_comment('Add another comment.')

        cif = orm.CifData(ase=structure.get_ase())
        cif.store()

        parameter1 = orm.Dict(dict={'a': 1, 'b': 2})
        parameter1.store()

        parameter2 = orm.Dict(dict={'c': 3, 'd': 4})
        parameter2.store()

        kpoint = orm.KpointsData()
        kpoint.set_kpoints_mesh([4, 4, 4])
        kpoint.store()

        resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 1}

        calcfunc = orm.CalcFunctionNode(computer=cls.computer)
        calcfunc.store()

        calc = orm.CalcJobNode(computer=cls.computer)
        calc.set_option('resources', resources)
        calc.set_attribute('attr1', 'OK')
        calc.set_attribute('attr2', 'OK')
        calc.set_extra('extra1', False)
        calc.set_extra('extra2', 'extra_info')

        calc.add_incoming(structure,
                          link_type=LinkType.INPUT_CALC,
                          link_label='link_structure')
        calc.add_incoming(parameter1,
                          link_type=LinkType.INPUT_CALC,
                          link_label='link_parameter')

        aiida_in = 'The input file\nof the CalcJob node'
        # Add the calcjob_inputs folder with the aiida.in file to the CalcJobNode repository
        with tempfile.NamedTemporaryFile(mode='w+') as handle:
            handle.write(aiida_in)
            handle.flush()
            handle.seek(0)
            calc.put_object_from_filelike(handle,
                                          key='calcjob_inputs/aiida.in',
                                          force=True)
        calc.store()

        # create log message for calcjob
        import logging
        from aiida.common.log import LOG_LEVEL_REPORT
        from aiida.common.timezone import now
        from aiida.orm import Log

        log_record = {
            'time': now(),
            'loggername': 'loggername',
            'levelname': logging.getLevelName(LOG_LEVEL_REPORT),
            'dbnode_id': calc.id,
            'message': 'This is a template record message',
            'metadata': {
                'content': 'test'
            },
        }
        Log(**log_record)

        aiida_out = 'The output file\nof the CalcJob node'
        retrieved_outputs = orm.FolderData()
        # Add the calcjob_outputs folder with the aiida.out file to the FolderData node
        with tempfile.NamedTemporaryFile(mode='w+') as handle:
            handle.write(aiida_out)
            handle.flush()
            handle.seek(0)
            retrieved_outputs.put_object_from_filelike(
                handle, key='calcjob_outputs/aiida.out', force=True)
        retrieved_outputs.store()
        retrieved_outputs.add_incoming(calc,
                                       link_type=LinkType.CREATE,
                                       link_label='retrieved')

        kpoint.add_incoming(calc,
                            link_type=LinkType.CREATE,
                            link_label='create')

        calc1 = orm.CalcJobNode(computer=cls.computer)
        calc1.set_option('resources', resources)
        calc1.store()

        dummy_computers = [{
            'name': 'test1',
            'hostname': 'test1.epfl.ch',
            'transport_type': 'ssh',
            'scheduler_type': 'pbspro',
        }, {
            'name': 'test2',
            'hostname': 'test2.epfl.ch',
            'transport_type': 'ssh',
            'scheduler_type': 'torque',
        }, {
            'name': 'test3',
            'hostname': 'test3.epfl.ch',
            'transport_type': 'local',
            'scheduler_type': 'slurm',
        }, {
            'name': 'test4',
            'hostname': 'test4.epfl.ch',
            'transport_type': 'ssh',
            'scheduler_type': 'slurm',
        }]

        for dummy_computer in dummy_computers:
            computer = orm.Computer(**dummy_computer)
            computer.store()

        # Prepare typical REST responses
        cls.process_dummy_data()
コード例 #7
0
ファイル: test_logs.py プロジェクト: CasperWA/aiida_core
 def create_log(self):
     node = orm.CalculationNode().store()
     record = self.log_record
     record['dbnode_id'] = node.id
     return Log(**record), node
コード例 #8
0
ファイル: test_logs.py プロジェクト: CasperWA/aiida_core
    def test_raise_wrong_metadata_type_error(self):
        """
        Test a TypeError exception is thrown with string metadata.
        Also test that metadata is correctly created.
        """
        from aiida.common import json

        # Create CalculationNode
        calc = orm.CalculationNode().store()

        # dict metadata
        correct_metadata_format = {
            'msg': 'Life is like riding a bicycle.',
            'args': '()',
            'name': 'aiida.orm.node.process.calculation.CalculationNode'
        }

        # str of dict metadata
        wrong_metadata_format = str(correct_metadata_format)

        # JSON-serialized-deserialized dict metadata
        json_metadata_format = json.loads(json.dumps(correct_metadata_format))

        # Check an error is raised when creating a Log with wrong metadata
        with self.assertRaises(TypeError):
            Log(now(),
                'loggername',
                logging.getLevelName(LOG_LEVEL_REPORT),
                calc.id,
                'To keep your balance, you must keep moving',
                metadata=wrong_metadata_format)

        # Check no error is raised when creating a Log with dict metadata
        correct_metadata_log = Log(
            now(),
            'loggername',
            logging.getLevelName(LOG_LEVEL_REPORT),
            calc.id,
            'To keep your balance, you must keep moving',
            metadata=correct_metadata_format)

        # Check metadata is correctly created
        self.assertEqual(correct_metadata_log.metadata,
                         correct_metadata_format)

        # Create Log with json metadata, making sure TypeError is NOT raised
        json_metadata_log = Log(now(),
                                'loggername',
                                logging.getLevelName(LOG_LEVEL_REPORT),
                                calc.id,
                                'To keep your balance, you must keep moving',
                                metadata=json_metadata_format)

        # Check metadata is correctly created
        self.assertEqual(json_metadata_log.metadata, json_metadata_format)

        # Check no error is raised if no metadata is given
        no_metadata_log = Log(now(),
                              'loggername',
                              logging.getLevelName(LOG_LEVEL_REPORT),
                              calc.id,
                              'To keep your balance, you must keep moving',
                              metadata=None)

        # Check metadata is an empty dict for no_metadata_log
        self.assertEqual(no_metadata_log.metadata, {})