def populate_restapi_database(clear_database_before_test): """Populates the database with a considerable set of nodes to test the restAPI""" # pylint: disable=unused-argument from aiida import orm struct_forcif = orm.StructureData().store() orm.StructureData().store() orm.StructureData().store() orm.Dict().store() orm.Dict().store() orm.CifData(ase=struct_forcif.get_ase()).store() orm.KpointsData().store() orm.FolderData().store() orm.CalcFunctionNode().store() orm.CalcJobNode().store() orm.CalcJobNode().store() orm.WorkFunctionNode().store() orm.WorkFunctionNode().store() orm.WorkChainNode().store()
def test_get_process_function_report(self): """Test the `get_process_function_report` utility.""" from aiida.cmdline.utils.common import get_process_function_report warning = 'You have been warned' node = orm.CalcFunctionNode() node.store() # Add a log message through the logger node.logger.warning(warning) self.assertIn(warning, get_process_function_report(node))
def create_provenance(self): """create an example provenance graph """ pd0 = orm.Dict() pd0.label = 'pd0' pd0.store() pd1 = orm.Dict() pd1.label = 'pd1' pd1.store() wc1 = orm.WorkChainNode() wc1.set_process_state(ProcessState.RUNNING) wc1.add_incoming(pd0, link_type=LinkType.INPUT_WORK, link_label='input1') wc1.add_incoming(pd1, link_type=LinkType.INPUT_WORK, link_label='input2') wc1.store() calc1 = orm.CalcJobNode() calc1.computer = self.computer calc1.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) calc1.label = 'calc1' calc1.set_process_state(ProcessState.FINISHED) calc1.set_exit_status(0) calc1.add_incoming(pd0, link_type=LinkType.INPUT_CALC, link_label='input1') calc1.add_incoming(pd1, link_type=LinkType.INPUT_CALC, link_label='input2') calc1.add_incoming(wc1, link_type=LinkType.CALL_CALC, link_label='call1') calc1.store() rd1 = orm.RemoteData() rd1.label = 'rd1' rd1.set_remote_path('/x/y.py') rd1.computer = self.computer rd1.store() rd1.add_incoming(calc1, link_type=LinkType.CREATE, link_label='output') pd2 = orm.Dict() pd2.label = 'pd2' pd2.store() calcf1 = orm.CalcFunctionNode() calcf1.label = 'calcf1' calcf1.set_process_state(ProcessState.FINISHED) calcf1.set_exit_status(200) calcf1.add_incoming(rd1, link_type=LinkType.INPUT_CALC, link_label='input1') calcf1.add_incoming(pd2, link_type=LinkType.INPUT_CALC, link_label='input2') calcf1.add_incoming(wc1, link_type=LinkType.CALL_CALC, link_label='call2') calcf1.store() pd3 = orm.Dict() pd3.label = 'pd3' fd1 = orm.FolderData() fd1.label = 'fd1' pd3.add_incoming(calcf1, link_type=LinkType.CREATE, link_label='output1') pd3.store() fd1.add_incoming(calcf1, link_type=LinkType.CREATE, link_label='output2') fd1.store() pd3.add_incoming(wc1, link_type=LinkType.RETURN, link_label='output1') fd1.add_incoming(wc1, link_type=LinkType.RETURN, link_label='output2') return AttributeDict({ 'pd0': pd0, 'pd1': pd1, 'calc1': calc1, 'rd1': rd1, 'pd2': pd2, 'calcf1': calcf1, 'pd3': pd3, 'fd1': fd1, 'wc1': wc1 })
def setUpClass(cls, *args, **kwargs): # pylint: disable=too-many-locals, too-many-statements """ Basides the standard setup we need to add few more objects in the database to be able to explore different requests/filters/orderings etc. """ # call parent setUpClass method super(RESTApiTestCase, cls).setUpClass() # connect the app and the api # Init the api by connecting it the the app (N.B. respect the following # order, api.__init__) kwargs = dict(PREFIX=cls._url_prefix, PERPAGE_DEFAULT=cls._PERPAGE_DEFAULT, LIMIT_DEFAULT=cls._LIMIT_DEFAULT) cls.app = App(__name__) cls.app.config['TESTING'] = True AiidaApi(cls.app, **kwargs) # create test inputs cell = ((2., 0., 0.), (0., 2., 0.), (0., 0., 2.)) structure = orm.StructureData(cell=cell) structure.append_atom(position=(0., 0., 0.), symbols=['Ba']) structure.store() structure.add_comment('This is test comment.') structure.add_comment('Add another comment.') cif = orm.CifData(ase=structure.get_ase()) cif.store() parameter1 = orm.Dict(dict={'a': 1, 'b': 2}) parameter1.store() parameter2 = orm.Dict(dict={'c': 3, 'd': 4}) parameter2.store() kpoint = orm.KpointsData() kpoint.set_kpoints_mesh([4, 4, 4]) kpoint.store() resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 1} calcfunc = orm.CalcFunctionNode(computer=cls.computer) calcfunc.store() calc = orm.CalcJobNode(computer=cls.computer) calc.set_option('resources', resources) calc.set_attribute('attr1', 'OK') calc.set_attribute('attr2', 'OK') calc.set_extra('extra1', False) calc.set_extra('extra2', 'extra_info') calc.add_incoming(structure, link_type=LinkType.INPUT_CALC, link_label='link_structure') calc.add_incoming(parameter1, link_type=LinkType.INPUT_CALC, link_label='link_parameter') aiida_in = 'The input file\nof the CalcJob node' # Add the calcjob_inputs folder with the aiida.in file to the CalcJobNode repository with tempfile.NamedTemporaryFile(mode='w+') as handle: handle.write(aiida_in) handle.flush() handle.seek(0) calc.put_object_from_filelike(handle, key='calcjob_inputs/aiida.in', force=True) calc.store() # create log message for calcjob import logging from aiida.common.log import LOG_LEVEL_REPORT from aiida.common.timezone import now from aiida.orm import Log log_record = { 'time': now(), 'loggername': 'loggername', 'levelname': logging.getLevelName(LOG_LEVEL_REPORT), 'dbnode_id': calc.id, 'message': 'This is a template record message', 'metadata': { 'content': 'test' }, } Log(**log_record) aiida_out = 'The output file\nof the CalcJob node' retrieved_outputs = orm.FolderData() # Add the calcjob_outputs folder with the aiida.out file to the FolderData node with tempfile.NamedTemporaryFile(mode='w+') as handle: handle.write(aiida_out) handle.flush() handle.seek(0) retrieved_outputs.put_object_from_filelike( handle, key='calcjob_outputs/aiida.out', force=True) retrieved_outputs.store() retrieved_outputs.add_incoming(calc, link_type=LinkType.CREATE, link_label='retrieved') kpoint.add_incoming(calc, link_type=LinkType.CREATE, link_label='create') calc1 = orm.CalcJobNode(computer=cls.computer) calc1.set_option('resources', resources) calc1.store() dummy_computers = [{ 'name': 'test1', 'hostname': 'test1.epfl.ch', 'transport_type': 'ssh', 'scheduler_type': 'pbspro', }, { 'name': 'test2', 'hostname': 'test2.epfl.ch', 'transport_type': 'ssh', 'scheduler_type': 'torque', }, { 'name': 'test3', 'hostname': 'test3.epfl.ch', 'transport_type': 'local', 'scheduler_type': 'slurm', }, { 'name': 'test4', 'hostname': 'test4.epfl.ch', 'transport_type': 'ssh', 'scheduler_type': 'slurm', }] for dummy_computer in dummy_computers: computer = orm.Computer(**dummy_computer) computer.store() # Prepare typical REST responses cls.process_dummy_data()