def test_deletion(self): from aiida.orm.computer import Computer from aiida.orm import delete_computer, JobCalculation from aiida.common.exceptions import InvalidOperation newcomputer = Computer(name="testdeletioncomputer", hostname='localhost', transport_type='local', scheduler_type='pbspro', workdir='/tmp/aiida').store() # # This should be possible, because nothing is using this computer delete_computer(newcomputer) calc_params = { 'computer': self.computer, 'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1} } _ = JobCalculation(**calc_params).store() # This should fail, because there is at least a calculation # using this computer (the one created just above) with self.assertRaises(InvalidOperation): delete_computer(self.computer)
def test_1(self): from aiida.orm import delete_computer # Creating a folder for the import/export files temp_folder = tempfile.mkdtemp() try: StructureData = DataFactory('structure') sd = StructureData() sd.store() calc = JobCalculation() calc.set_computer(self.computer) calc.set_resources({ "num_machines": 1, "num_mpiprocs_per_machine": 1 }) calc.store() calc.add_link_from(sd) pks = [sd.pk, calc.pk] attrs = {} for pk in pks: node = load_node(pk) attrs[node.uuid] = dict() for k in node.attrs(): attrs[node.uuid][k] = node.get_attr(k) filename = os.path.join(temp_folder, "export.tar.gz") export([calc.dbnode], outfile=filename, silent=True) self.tearDownClass() self.setUpClass() delete_computer(self.computer) # NOTE: it is better to load new nodes by uuid, rather than assuming # that they will have the first 3 pks. In fact, a recommended policy in # databases is that pk always increment, even if you've deleted elements import_data(filename, silent=True) for uuid in attrs.keys(): node = load_node(uuid) for k in node.attrs(): self.assertEquals(attrs[uuid][k], node.get_attr(k)) finally: # Deleting the created temporary folder shutil.rmtree(temp_folder, ignore_errors=True)