def test_addremovenodes(self): """ Test group addnotes command """ from aiida.orm.calculation import Calculation calc = Calculation() calc._set_attr("attr1", "OK") # pylint: disable=protected-access calc._set_attr("attr2", "OK") # pylint: disable=protected-access calc.store() result = self.runner.invoke(group_addnodes, ["--force", "--group=dummygroup1", calc.uuid]) self.assertIsNone(result.exception) # check if node is added in group using group show command result = self.runner.invoke(group_show, ["dummygroup1"]) self.assertIsNone(result.exception) self.assertIn("Calculation", result.output) self.assertIn(str(calc.pk), result.output) ## remove same node result = self.runner.invoke(group_removenodes, ["--force", "--group=dummygroup1", calc.uuid]) self.assertIsNone(result.exception) # check if node is added in group using group show command result = self.runner.invoke(group_show, ["dummygroup1"]) self.assertIsNone(result.exception) self.assertNotIn("Calculation", result.output) self.assertNotIn(str(calc.pk), result.output)
def test_calculation_updatable_not_copied(self): """ Check that updatable attributes of Calculation are not copied """ a = Calculation() a._set_attr('state', self.stateval) a.store() b = a.copy() # updatable attributes are not copied with self.assertRaises(AttributeError): b.get_attr('state')
def test_calculation_updatable_attribute(self): """ Check that updatable attributes and only those can be mutated for a stored but unsealed Calculation """ a = Calculation() attrs_to_set = { 'bool': self.boolval, 'integer': self.intval, 'float': self.floatval, 'string': self.stringval, 'dict': self.dictval, 'list': self.listval, 'state': self.stateval } for k, v in attrs_to_set.iteritems(): a._set_attr(k, v) # Check before storing a._set_attr(Calculation.PROCESS_STATE_KEY, self.stateval) self.assertEquals(a.get_attr(Calculation.PROCESS_STATE_KEY), self.stateval) a.store() # Check after storing self.assertEquals(a.get_attr(Calculation.PROCESS_STATE_KEY), self.stateval) # I should be able to mutate the updatable attribute but not the others a._set_attr(Calculation.PROCESS_STATE_KEY, 'FINISHED') a._del_attr(Calculation.PROCESS_STATE_KEY) # Deleting non-existing attribute should raise attribute error with self.assertRaises(AttributeError): a._del_attr(Calculation.PROCESS_STATE_KEY) with self.assertRaises(ModificationNotAllowed): a._set_attr('bool', False) with self.assertRaises(ModificationNotAllowed): a._del_attr('bool') a.seal() # After sealing, even updatable attributes should be immutable with self.assertRaises(ModificationNotAllowed): a._set_attr(Calculation.PROCESS_STATE_KEY, 'FINISHED') with self.assertRaises(ModificationNotAllowed): a._del_attr(Calculation.PROCESS_STATE_KEY)
def test_db_log_handler(self): """ Verify that the db log handler is attached correctly by firing a log message through the regular logging module attached to a calculation node """ message = 'Testing logging of critical failure' calc = Calculation() # Firing a log for an unstored should not end up in the database calc.logger.critical(message) logs = self._backend.log.find() self.assertEquals(len(logs), 0) # After storing the node, logs above log level should be stored calc.store() calc.logger.critical(message) logs = self._backend.log.find() self.assertEquals(len(logs), 1) self.assertEquals(logs[0].message, message)
def test_db_log_handler(self): """ Verify that the db log handler is attached correctly by firing a log message through the regular logging module attached to a calculation node """ message = 'Testing logging of critical failure' calc = Calculation() # Make sure that global logging is not accidentally disabled logging.disable(logging.NOTSET) # # Temporarily disable logging to the stream handler (i.e. screen) # # because otherwise fix_calc_states will print warnings # handler = next((h for h in logging.getLogger('aiida').handlers if # isinstance(h, logging.StreamHandler)), None) # # try: # if handler: # original_level = handler.level # handler.setLevel(logging.CRITICAL + 1) # Firing a log for an unstored should not end up in the database calc.logger.critical(message) logs = self._backend.log.find() self.assertEquals(len(logs), 0) # After storing the node, logs above log level should be stored calc.store() calc.logger.critical(message) logs = self._backend.log.find() self.assertEquals(len(logs), 1) self.assertEquals(logs[0].message, message)
def setUpClass(cls): """ Basides the standard setup we need to add few more objects in the database to be able to explore different requests/filters/orderings etc. """ # call parent setUpClass method super(RESTApiTestCase, cls).setUpClass() # connect the app and the api # Init the api by connecting it the the app (N.B. respect the following # order, api.__init__) kwargs = dict(PREFIX=cls._url_prefix, PERPAGE_DEFAULT=cls._PERPAGE_DEFAULT, LIMIT_DEFAULT=cls._LIMIT_DEFAULT) cls.app = App(__name__) cls.app.config['TESTING'] = True api = AiidaApi(cls.app, **kwargs) # create test inputs cell = ((2., 0., 0.), (0., 2., 0.), (0., 0., 2.)) structure = StructureData(cell=cell) structure.append_atom(position=(0., 0., 0.), symbols=['Ba']) structure.store() cif = CifData(ase=structure.get_ase()) cif.store() parameter1 = ParameterData(dict={"a": 1, "b": 2}) parameter1.store() parameter2 = ParameterData(dict={"c": 3, "d": 4}) parameter2.store() kpoint = KpointsData() kpoint.set_kpoints_mesh([4, 4, 4]) kpoint.store() calc = Calculation() calc._set_attr("attr1", "OK") calc._set_attr("attr2", "OK") calc.store() calc.add_link_from(structure) calc.add_link_from(parameter1) kpoint.add_link_from(calc, link_type=LinkType.CREATE) calc1 = Calculation() calc1.store() from aiida.orm.computer import Computer dummy_computers = [{ "name": "test1", "hostname": "test1.epfl.ch", "transport_type": "ssh", "scheduler_type": "pbspro", }, { "name": "test2", "hostname": "test2.epfl.ch", "transport_type": "ssh", "scheduler_type": "torque", }, { "name": "test3", "hostname": "test3.epfl.ch", "transport_type": "local", "scheduler_type": "slurm", }, { "name": "test4", "hostname": "test4.epfl.ch", "transport_type": "ssh", "scheduler_type": "slurm", }] for dummy_computer in dummy_computers: computer = Computer(**dummy_computer) computer.store() # Prepare typical REST responses cls.process_dummy_data()
def setUpClass(cls): """ Basides the standard setup we need to add few more objects in the database to be able to explore different requests/filters/orderings etc. """ # call parent setUpClass method super(RESTApiTestCase, cls).setUpClass() # create test inputs cell = ((2., 0., 0.), (0., 2., 0.), (0., 0., 2.)) structure = StructureData(cell=cell) structure.append_atom(position=(0., 0., 0.), symbols=['Ba']) structure.store() parameter1 = ParameterData(dict={"a": 1, "b": 2}) parameter1.store() parameter2 = ParameterData(dict={"c": 3, "d": 4}) parameter2.store() kpoint = KpointsData() kpoint.set_kpoints_mesh([4, 4, 4]) kpoint.store() calc = Calculation() calc._set_attr("attr1", "OK") calc._set_attr("attr2", "OK") calc.store() calc.add_link_from(structure) calc.add_link_from(parameter1) kpoint.add_link_from(calc, link_type=LinkType.CREATE) calc1 = Calculation() calc1.store() from aiida.orm.computer import Computer dummy_computers = [{ "name": "test1", "hostname": "test1.epfl.ch", "transport_type": "ssh", "scheduler_type": "pbspro", }, { "name": "test2", "hostname": "test2.epfl.ch", "transport_type": "ssh", "scheduler_type": "torque", }, { "name": "test3", "hostname": "test3.epfl.ch", "transport_type": "local", "scheduler_type": "slurm", }, { "name": "test4", "hostname": "test4.epfl.ch", "transport_type": "ssh", "scheduler_type": "slurm", }] for dummy_computer in dummy_computers: computer = Computer(**dummy_computer) computer.store() # Prepare typical REST responses cls.process_dummy_data()