示例#1
0
    def test_inline_export(self):
        from aiida.tools.dbexporters.tcod import export_values
        import tempfile

        with tempfile.NamedTemporaryFile(mode='w+') as tmpf:
            tmpf.write('''
                data_test
                _cell_length_a    10
                _cell_length_b    10
                _cell_length_c    10
                _cell_angle_alpha 90
                _cell_angle_beta  90
                _cell_angle_gamma 90
                loop_
                _atom_site_label
                _atom_site_fract_x
                _atom_site_fract_y
                _atom_site_fract_z
                C 0 0 0
                O 0.5 0.5 0.5
            ''')
            tmpf.flush()
            a = orm.CifData(filepath=tmpf.name)

        s = a.get_structure(store=True)
        val = export_values(s)
        script = val.first_block()['_tcod_file_contents'][1]
        function = '_get_aiida_structure_pymatgen_inline'
        self.assertNotEqual(script.find(function), script.rfind(function))
示例#2
0
def populate_restapi_database(clear_database_before_test):
    """Populates the database with a considerable set of nodes to test the restAPI"""
    # pylint: disable=unused-argument
    from aiida import orm

    struct_forcif = orm.StructureData().store()
    orm.StructureData().store()
    orm.StructureData().store()

    orm.Dict().store()
    orm.Dict().store()

    orm.CifData(ase=struct_forcif.get_ase()).store()

    orm.KpointsData().store()

    orm.FolderData().store()

    orm.CalcFunctionNode().store()
    orm.CalcJobNode().store()
    orm.CalcJobNode().store()

    orm.WorkFunctionNode().store()
    orm.WorkFunctionNode().store()
    orm.WorkChainNode().store()
示例#3
0
    def generate_class_instance(data_class):
        """Generate a dummy `Data` instance for the given sub class."""
        dirpath_fixtures = os.path.abspath(
            os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
                         'fixtures'))
        if data_class is orm.CifData:
            instance = data_class(
                file=os.path.join(dirpath_fixtures, 'data', 'Si.cif'))
            return instance

        if data_class is orm.UpfData:
            filename = os.path.join(
                dirpath_fixtures, 'pseudos',
                'Ba.pbesol-spn-rrkjus_psl.0.2.3-tot-pslib030.UPF')
            instance = data_class(file=filename)
            return instance

        if data_class is orm.StructureData:
            instance = orm.CifData(file=os.path.join(
                dirpath_fixtures, 'data', 'Si.cif')).get_structure()
            return instance

        if data_class is orm.BandsData:
            kpoints = orm.KpointsData()
            kpoints.set_cell([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
            kpoints.set_kpoints([[0., 0., 0.], [0.1, 0.1, 0.1]])
            instance = data_class()
            instance.set_kpointsdata(kpoints)
            instance.set_bands([[1.0, 2.0], [3.0, 4.0]])
            return instance

        if data_class is orm.TrajectoryData:
            instance = data_class()
            stepids = numpy.array([60])
            times = stepids * 0.01
            cells = numpy.array([[[3., 0., 0.], [0., 3., 0.], [0., 0., 3.]]])
            positions = numpy.array([[[0., 0., 0.]]])
            instance.set_trajectory(stepids=stepids,
                                    cells=cells,
                                    symbols=['H'],
                                    positions=positions,
                                    times=times)
            return instance

        if data_class is orm.UpfData:
            filepath_base = os.path.abspath(
                os.path.join(__file__, os.pardir, os.pardir, os.pardir,
                             'fixtures', 'pseudos'))
            filepath_carbon = os.path.join(filepath_base,
                                           'C_pbe_v1.2.uspp.F.UPF')
            instance = data_class(file=filepath_carbon)
            return instance

        raise RuntimeError(
            'no instance generator implemented for class `{}`. If you have added a `_prepare_*` method '
            'for this data class, add a generator of a dummy instance here'.
            format(data_class))
示例#4
0
    def test_cif_structure_roundtrip(self):
        from aiida.tools.dbexporters.tcod import export_cif, export_values
        from aiida.common.folders import SandboxFolder
        import tempfile

        with tempfile.NamedTemporaryFile(mode='w+') as tmpf:
            tmpf.write('''
                data_test
                _cell_length_a    10
                _cell_length_b    10
                _cell_length_c    10
                _cell_angle_alpha 90
                _cell_angle_beta  90
                _cell_angle_gamma 90
                loop_
                _atom_site_label
                _atom_site_fract_x
                _atom_site_fract_y
                _atom_site_fract_z
                C 0 0 0
                O 0.5 0.5 0.5
            ''')
            tmpf.flush()
            a = orm.CifData(filepath=tmpf.name)

        c = a.get_structure()
        c.store()
        pd = orm.Dict()

        code = orm.Code(local_executable='test.sh')
        with tempfile.NamedTemporaryFile(mode='w+') as tmpf:
            tmpf.write("#/bin/bash\n\necho test run\n")
            tmpf.flush()
            code.put_object_from_filelike(tmpf, 'test.sh')

        code.store()

        calc = orm.CalcJobNode(computer=self.computer)
        calc.set_option('resources', {
            'num_machines': 1,
            'num_mpiprocs_per_machine': 1
        })
        calc.add_incoming(code, LinkType.INPUT_CALC, "code")
        calc.set_option('environment_variables', {
            'PATH': '/dev/null',
            'USER': '******'
        })

        with tempfile.NamedTemporaryFile(mode='w+', prefix="Fe") as tmpf:
            tmpf.write("<UPF version=\"2.0.1\">\nelement=\"Fe\"\n")
            tmpf.flush()
            upf = orm.UpfData(filepath=tmpf.name)
            upf.store()
            calc.add_incoming(upf, LinkType.INPUT_CALC, "upf")

        with tempfile.NamedTemporaryFile(mode='w+') as tmpf:
            tmpf.write("data_test")
            tmpf.flush()
            cif = orm.CifData(filepath=tmpf.name)
            cif.store()
            calc.add_incoming(cif, LinkType.INPUT_CALC, "cif")

        with SandboxFolder() as fhandle:
            calc.put_object_from_tree(fhandle.abspath)
        calc.store()

        fd = orm.FolderData()
        with fd.open('_scheduler-stdout.txt', 'w') as fhandle:
            fhandle.write(u"standard output")

        with fd.open('_scheduler-stderr.txt', 'w') as fhandle:
            fhandle.write(u"standard error")

        fd.store()
        fd.add_incoming(calc, LinkType.CREATE, calc.link_label_retrieved)

        pd.add_incoming(calc, LinkType.CREATE, "create1")
        pd.store()

        with self.assertRaises(ValueError):
            export_cif(c, parameters=pd)

        c.add_incoming(calc, LinkType.CREATE, "create2")
        export_cif(c, parameters=pd)

        values = export_values(c, parameters=pd)
        values = values['0']

        self.assertEquals(values['_tcod_computation_environment'],
                          ['PATH=/dev/null\nUSER=unknown'])
        self.assertEquals(values['_tcod_computation_command'],
                          ['cd 1; ./_aiidasubmit.sh'])
示例#5
0
    def setUpClass(cls, *args, **kwargs):  # pylint: disable=too-many-locals, too-many-statements
        """
        Basides the standard setup we need to add few more objects in the
        database to be able to explore different requests/filters/orderings etc.
        """
        # call parent setUpClass method
        super(RESTApiTestCase, cls).setUpClass()

        # connect the app and the api
        # Init the api by connecting it the the app (N.B. respect the following
        # order, api.__init__)
        kwargs = dict(PREFIX=cls._url_prefix,
                      PERPAGE_DEFAULT=cls._PERPAGE_DEFAULT,
                      LIMIT_DEFAULT=cls._LIMIT_DEFAULT)

        cls.app = App(__name__)
        cls.app.config['TESTING'] = True
        AiidaApi(cls.app, **kwargs)

        # create test inputs
        cell = ((2., 0., 0.), (0., 2., 0.), (0., 0., 2.))
        structure = orm.StructureData(cell=cell)
        structure.append_atom(position=(0., 0., 0.), symbols=['Ba'])
        structure.store()
        structure.add_comment('This is test comment.')
        structure.add_comment('Add another comment.')

        cif = orm.CifData(ase=structure.get_ase())
        cif.store()

        parameter1 = orm.Dict(dict={'a': 1, 'b': 2})
        parameter1.store()

        parameter2 = orm.Dict(dict={'c': 3, 'd': 4})
        parameter2.store()

        kpoint = orm.KpointsData()
        kpoint.set_kpoints_mesh([4, 4, 4])
        kpoint.store()

        resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 1}

        calcfunc = orm.CalcFunctionNode(computer=cls.computer)
        calcfunc.store()

        calc = orm.CalcJobNode(computer=cls.computer)
        calc.set_option('resources', resources)
        calc.set_attribute('attr1', 'OK')
        calc.set_attribute('attr2', 'OK')
        calc.set_extra('extra1', False)
        calc.set_extra('extra2', 'extra_info')

        calc.add_incoming(structure,
                          link_type=LinkType.INPUT_CALC,
                          link_label='link_structure')
        calc.add_incoming(parameter1,
                          link_type=LinkType.INPUT_CALC,
                          link_label='link_parameter')

        aiida_in = 'The input file\nof the CalcJob node'
        # Add the calcjob_inputs folder with the aiida.in file to the CalcJobNode repository
        with tempfile.NamedTemporaryFile(mode='w+') as handle:
            handle.write(aiida_in)
            handle.flush()
            handle.seek(0)
            calc.put_object_from_filelike(handle,
                                          key='calcjob_inputs/aiida.in',
                                          force=True)
        calc.store()

        # create log message for calcjob
        import logging
        from aiida.common.log import LOG_LEVEL_REPORT
        from aiida.common.timezone import now
        from aiida.orm import Log

        log_record = {
            'time': now(),
            'loggername': 'loggername',
            'levelname': logging.getLevelName(LOG_LEVEL_REPORT),
            'dbnode_id': calc.id,
            'message': 'This is a template record message',
            'metadata': {
                'content': 'test'
            },
        }
        Log(**log_record)

        aiida_out = 'The output file\nof the CalcJob node'
        retrieved_outputs = orm.FolderData()
        # Add the calcjob_outputs folder with the aiida.out file to the FolderData node
        with tempfile.NamedTemporaryFile(mode='w+') as handle:
            handle.write(aiida_out)
            handle.flush()
            handle.seek(0)
            retrieved_outputs.put_object_from_filelike(
                handle, key='calcjob_outputs/aiida.out', force=True)
        retrieved_outputs.store()
        retrieved_outputs.add_incoming(calc,
                                       link_type=LinkType.CREATE,
                                       link_label='retrieved')

        kpoint.add_incoming(calc,
                            link_type=LinkType.CREATE,
                            link_label='create')

        calc1 = orm.CalcJobNode(computer=cls.computer)
        calc1.set_option('resources', resources)
        calc1.store()

        dummy_computers = [{
            'name': 'test1',
            'hostname': 'test1.epfl.ch',
            'transport_type': 'ssh',
            'scheduler_type': 'pbspro',
        }, {
            'name': 'test2',
            'hostname': 'test2.epfl.ch',
            'transport_type': 'ssh',
            'scheduler_type': 'torque',
        }, {
            'name': 'test3',
            'hostname': 'test3.epfl.ch',
            'transport_type': 'local',
            'scheduler_type': 'slurm',
        }, {
            'name': 'test4',
            'hostname': 'test4.epfl.ch',
            'transport_type': 'ssh',
            'scheduler_type': 'slurm',
        }]

        for dummy_computer in dummy_computers:
            computer = orm.Computer(**dummy_computer)
            computer.store()

        # Prepare typical REST responses
        cls.process_dummy_data()