예제 #1
0
    def prepare_bands_calculation(self):
        """Prepare all the neccessary input links to run the calculation"""
        self.report("prepare calculation 1")
        self.ctx.inputs = AttributeDict({
            'code': self.inputs.code,
            'structure'  : self.ctx.structure,
            '_options'    : self.ctx.options,
            })


        # Conduction band
        cond_band = add_condband(self.ctx.structure)
        self.report("number of states")
        self.report(cond_band)
        self.ctx.parameters['FORCE_EVAL']['DFT']['SCF']['ADDED_MOS'] = cond_band



        # Define path kpoints generated by seekpath
        path = []
        point_coord = {}
        path = self.ctx.kpoints.dict['path']
        point_coord = self.ctx.kpoints.dict['point_coords']

        #self.ctx.parameters = self.ctx.parameters
        kpath = get_kpoints_path_cp2k(point_coord, path)
        self.ctx.parameters['FORCE_EVAL']['DFT']['PRINT']['BAND_STRUCTURE']['KPOINT_SET'] = kpath
        #self.ctx.parameters['FORCE_EVAL']['DFT']['PRINT']['KPOINT_SET']['NPOINTS'] = 14

        # use the new parameters
        p = ParameterData(dict=self.ctx.parameters)
        p.store()
        self.ctx.inputs['parameters'] = p
예제 #2
0
    def prepare_calculation(self):
        """Prepare all the neccessary input links to run the calculation"""
        self.ctx.inputs = AttributeDict({
            'code': self.inputs.code,
            'structure': self.ctx.structure,
            '_options': self.ctx.options,
        })

        if self.ctx.restart_calc is not None:
            self.ctx.inputs['retrieved_parent_folder'] = self.ctx.restart_calc

        # use the new parameters
        p = ParameterData(dict=self.ctx.parameters)
        p.store()
        self.ctx.inputs['parameters'] = p
예제 #3
0
    def prepare_calculation(self):
        """Prepare all the neccessary input links to run the calculation"""
        self.ctx.inputs = AttributeDict({
            'code': self.inputs.code,
            'structure': self.ctx.structure,
            '_options': self.ctx.options,
        })

        # restart from the previous calculation only if the necessary data are
        # provided
        if self.ctx.restart_calc:
            self.ctx.inputs['parent_folder'] = self.ctx.restart_calc
            self.ctx.parameters['FORCE_EVAL']['DFT']['SCF'][
                'SCF_GUESS'] = 'RESTART'
        else:
            self.ctx.parameters['FORCE_EVAL']['DFT']['SCF'][
                'SCF_GUESS'] = 'ATOMIC'

        # use the new parameters
        p = ParameterData(dict=self.ctx.parameters)
        p.store()
        self.ctx.inputs['parameters'] = p
예제 #4
0
파일: query.py 프로젝트: zooks97/aiida_core
    def test_subclassing(self):
        from aiida.orm.data.structure import StructureData
        from aiida.orm.data.parameter import ParameterData
        from aiida.orm import Node, Data
        from aiida.orm.querybuilder import QueryBuilder
        s = StructureData()
        s._set_attr('cat', 'miau')
        s.store()

        d = Data()
        d._set_attr('cat', 'miau')
        d.store()

        p = ParameterData(dict=dict(cat='miau'))
        p.store()

        n = Node()
        n._set_attr('cat', 'miau')
        n.store()

        # Now when asking for a node with attr.cat==miau, I want 4 esults:
        qb = QueryBuilder().append(Node, filters={'attributes.cat': 'miau'})
        self.assertEqual(qb.count(), 4)

        qb = QueryBuilder().append(Data, filters={'attributes.cat': 'miau'})
        self.assertEqual(qb.count(), 3)

        # If I'm asking for the specific lowest subclass, I want one result
        for cls in (StructureData, ParameterData):
            qb = QueryBuilder().append(cls, filters={'attributes.cat': 'miau'})
            self.assertEqual(qb.count(), 1)

        # Now I am not allow the subclassing, which should give 1 result for each
        for cls in (StructureData, ParameterData, Node, Data):
            qb = QueryBuilder().append(cls,
                                       filters={'attributes.cat': 'miau'},
                                       subclassing=False)
            self.assertEqual(qb.count(), 1)
예제 #5
0
class TestVerdiDataParameter(AiidaTestCase):
    """
    Testing verdi data parameter 
    """
    @classmethod
    def setUpClass(cls):
        super(TestVerdiDataParameter, cls).setUpClass()

    def setUp(self):
        self.p = ParameterData()
        self.p.set_dict({'a': 1, 'b': 2})
        self.p.store()

        self.cli_runner = CliRunner()

    def test_parametershowhelp(self):
        output = sp.check_output(
            ['verdi', 'data', 'parameter', 'show', '--help'])
        self.assertIn('Usage:', output,
                      "Sub-command verdi data parameter show --help failed.")

    def test_parametershow(self):
        supported_formats = ['json_date']
        for format in supported_formats:
            options = [str(self.p.id)]
            res = self.cli_runner.invoke(cmd_parameter.show,
                                         options,
                                         catch_exceptions=False)
            self.assertEquals(
                res.exit_code, 0,
                "The command verdi data parameter show did not"
                " finish correctly")
        self.assertIn(
            '"a": 1', res.output_bytes,
            'The string "a": 1 was not found in the output'
            ' of verdi data parameter show')
예제 #6
0
    def test_cif_structure_roundtrip(self):
        from aiida.tools.dbexporters.tcod import export_cif, export_values
        from aiida.orm import Code
        from aiida.orm import JobCalculation
        from aiida.orm.data.cif import CifData
        from aiida.orm.data.parameter import ParameterData
        from aiida.orm.data.upf import UpfData
        from aiida.orm.data.folder import FolderData
        from aiida.common.folders import SandboxFolder
        from aiida.common.datastructures import calc_states
        import tempfile

        with tempfile.NamedTemporaryFile() as f:
            f.write('''
                data_test
                _cell_length_a    10
                _cell_length_b    10
                _cell_length_c    10
                _cell_angle_alpha 90
                _cell_angle_beta  90
                _cell_angle_gamma 90
                loop_
                _atom_site_label
                _atom_site_fract_x
                _atom_site_fract_y
                _atom_site_fract_z
                C 0 0 0
                O 0.5 0.5 0.5
            ''')
            f.flush()
            a = CifData(file=f.name)

        c = a._get_aiida_structure()
        c.store()
        pd = ParameterData()

        code = Code(local_executable='test.sh')
        with tempfile.NamedTemporaryFile() as f:
            f.write("#/bin/bash\n\necho test run\n")
            f.flush()
            code.add_path(f.name, 'test.sh')

        code.store()

        calc = JobCalculation(computer=self.computer)
        calc.set_resources({'num_machines': 1, 'num_mpiprocs_per_machine': 1})
        calc.add_link_from(code, "code")
        calc.set_environment_variables({
            'PATH': '/dev/null',
            'USER': '******'
        })

        with tempfile.NamedTemporaryFile(prefix="Fe") as f:
            f.write("<UPF version=\"2.0.1\">\nelement=\"Fe\"\n")
            f.flush()
            upf = UpfData(file=f.name)
            upf.store()
            calc.add_link_from(upf, "upf")

        with tempfile.NamedTemporaryFile() as f:
            f.write("data_test")
            f.flush()
            cif = CifData(file=f.name)
            cif.store()
            calc.add_link_from(cif, "cif")

        calc.store()
        calc._set_state(calc_states.SUBMITTING)
        with SandboxFolder() as f:
            calc._store_raw_input_folder(f.abspath)

        fd = FolderData()
        with open(
                fd._get_folder_pathsubfolder.get_abs_path(
                    calc._SCHED_OUTPUT_FILE), 'w') as f:
            f.write("standard output")
            f.flush()

        with open(
                fd._get_folder_pathsubfolder.get_abs_path(
                    calc._SCHED_ERROR_FILE), 'w') as f:
            f.write("standard error")
            f.flush()

        fd.store()
        fd.add_link_from(calc, calc._get_linkname_retrieved(), LinkType.CREATE)

        pd.add_link_from(calc, "calc", LinkType.CREATE)
        pd.store()

        with self.assertRaises(ValueError):
            export_cif(c, parameters=pd)

        c.add_link_from(calc, "calc", LinkType.CREATE)
        export_cif(c, parameters=pd)

        values = export_values(c, parameters=pd)
        values = values['0']

        self.assertEquals(values['_tcod_computation_environment'],
                          ['PATH=/dev/null\nUSER=unknown'])
        self.assertEquals(values['_tcod_computation_command'],
                          ['cd 1; ./_aiidasubmit.sh'])
예제 #7
0
    def test_complex_graph_import_export(self):
        """
        This test checks that a small and bit complex graph can be correctly
        exported and imported.

        It will create the graph, store it to the database, export it to a file
        and import it. In the end it will check if the initial nodes are present
        at the imported graph.
        """
        import tempfile
        import shutil
        import os

        from aiida.orm.calculation.job import JobCalculation
        from aiida.orm.data.folder import FolderData
        from aiida.orm.data.parameter import ParameterData
        from aiida.orm.data.remote import RemoteData
        from aiida.common.links import LinkType
        from aiida.orm.importexport import export, import_data
        from aiida.orm.utils import load_node
        from aiida.common.exceptions import NotExistent

        temp_folder = tempfile.mkdtemp()
        try:
            calc1 = JobCalculation()
            calc1.set_computer(self.computer)
            calc1.set_resources({
                "num_machines": 1,
                "num_mpiprocs_per_machine": 1
            })
            calc1.label = "calc1"
            calc1.store()
            calc1._set_state(u'RETRIEVING')

            pd1 = ParameterData()
            pd1.label = "pd1"
            pd1.store()

            pd2 = ParameterData()
            pd2.label = "pd2"
            pd2.store()

            rd1 = RemoteData()
            rd1.label = "rd1"
            rd1.set_remote_path("/x/y.py")
            rd1.set_computer(self.computer)
            rd1.store()
            rd1.add_link_from(calc1, link_type=LinkType.CREATE)

            calc2 = JobCalculation()
            calc2.set_computer(self.computer)
            calc2.set_resources({
                "num_machines": 1,
                "num_mpiprocs_per_machine": 1
            })
            calc2.label = "calc2"
            calc2.store()
            calc2.add_link_from(pd1, link_type=LinkType.INPUT)
            calc2.add_link_from(pd2, link_type=LinkType.INPUT)
            calc2.add_link_from(rd1, link_type=LinkType.INPUT)
            calc2._set_state(u'SUBMITTING')

            fd1 = FolderData()
            fd1.label = "fd1"
            fd1.store()
            fd1.add_link_from(calc2, link_type=LinkType.CREATE)

            node_uuids_labels = {
                calc1.uuid: calc1.label,
                pd1.uuid: pd1.label,
                pd2.uuid: pd2.label,
                rd1.uuid: rd1.label,
                calc2.uuid: calc2.label,
                fd1.uuid: fd1.label
            }

            filename = os.path.join(temp_folder, "export.tar.gz")
            export([fd1.dbnode], outfile=filename, silent=True)

            self.clean_db()

            import_data(filename, silent=True, ignore_unknown_nodes=True)

            for uuid, label in node_uuids_labels.iteritems():
                try:
                    load_node(uuid)
                except NotExistent:
                    self.fail("Node with UUID {} and label {} was not "
                              "found.".format(uuid, label))

        finally:
            # Deleting the created temporary folder
            shutil.rmtree(temp_folder, ignore_errors=True)
예제 #8
0
# -*- coding: utf-8 -*-

from aiida.backends.utils import load_dbenv, is_dbenv_loaded

__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved."
__license__ = "MIT license, see LICENSE.txt file."
__authors__ = "The AiiDA team."
__version__ = "0.7.0"

if not is_dbenv_loaded():
    load_dbenv()

from aiida.orm.data.simple import Int
from aiida.workflows2.run import run, asyncd

from aiida.tutorial.simple_wf import SimpleWF
from aiida.orm.data.parameter import ParameterData


p = ParameterData(dict=dict(number=12))
p.store()
asyncd(SimpleWF, params=p)
def submit_stress_tensor(**kwargs):
    # get code
    #code = Code.get(label='pw.sirius.x', computername='piz_daint', useremail='*****@*****.**')
    code = test_and_get_code('pw.sirius.x', expected_code_type='quantumespresso.pw')
    #code.set_prepend_text(prep_text)

    # calculation should always belong to some group, otherwise things get messy after some time
    stress_tensor_grp, created = Group.get_or_create(name=kwargs['group'])
    
    # create parameters
    params = create_calculation_parameters(code,
                                           kwargs.get('partition', 'cpu'),
                                           kwargs.get('num_ranks_per_node', 36),
                                           kwargs.get('num_ranks_kp', 1),
                                           kwargs.get('num_ranks_diag', 1))
    # load structure
    structure = load_node(kwargs['structure_pk'])
    
    # generate k-points
    params['kpoints'] = KpointsData()
    params['kpoints'].set_kpoints_mesh(kwargs.get('kmesh', [24, 24, 24]), offset=(0.0, 0.0, 0.0))
    params['atomic_files'] = kwargs['atomic_files']
    params['calculation_wallclock_seconds'] = kwargs.get('time_limit', 3600)
    params['structure'] = structure
    params['num_points'] = 5
    params['group'] = kwargs['group']
    params['kpoints'].store()
    params['calculation_parameters'].store()
    params['calculation_settings'].store()

    stress_tensor_dict = {}
    stress_tensor_dict['label'] = 'stress_tensor_' + structure.get_formula() + '_' + code.label
    stress_tensor_dict['description'] = "Stress tensor for structure with PK=%i"%structure.pk
    stress_tensor_dict['calc_pk'] = []
    stress_tensor_dict['num_points'] = params['num_points']
    stress_tensor_dict['structure_pk'] = structure.pk
    stress_tensor_dict['code_pk'] = code.pk
    stress_tensor_dict['job_tag'] = params['job_tag']

    # volume scales from 0.94 to 1.06, alat scales as pow(1/3)
    scales = np.linspace(0.992, 1.008, num=params['num_points']).tolist()

    eps = np.linspace(-0.008, 0.008, num=params['num_points']).tolist()
    #scales = np.linspace(0.99, 1.05, num=params['num_points']).tolist()

    use_symmetry = .False.

    if use_symmetry:
       SGN = get_space_group_number(structure_id=structure_id)
    else:
       SGN = 1

    LC = self.get_Laue_dict(space_group_number=SGN)

    def_list = get_Lagrange_distorted_index(structure_id=structure_id, LC=LC)

    SCs = len(def_list)

    alat_steps = params['num_points']

    distorted_structure_index = []
    eps_index = 0
    for i in def_list:
        for a in eps:

            eps_index = eps_index + 1

            distorted_structure_index.append(eps_index)

    for ii in distorted_structure_index:

        a = eps[ii % alat_steps - 1]
        i = def_list[int((ii - 1) / alat_steps)]

        M_Lagrange_eps = get_Lagrange_strain_matrix(eps=a, def_mtx_index=i)

        structure_new = get_Lagrange_distorted_structure(structure_id=structure_id, M_Lagrange_eps=M_Lagrange_eps)

        structure_new.store()
        
        calc_label = 'gs_' + structure.get_formula() + '_' + code.label
        calc_desc = params['job_tag']
    
        # create calculation
        calc = create_calculation(structure_new, params, calc_label, calc_desc)
        calc.store()
        print "created calculation with uuid='{}' and PK={}".format(calc.uuid, calc.pk)
        stress_tensor_grp.add_nodes([calc])
        calc.submit()
        stress_tensor_dict['calc_pk'].append(calc.pk)
    
    stress_tensor_node = ParameterData(dict=stress_tensor_dict)
    stress_tensor_node.store()
    stress_tensor_grp.add_nodes([stress_tensor_node])
    print "created stress tensor node with uuid='{}' and PK={}".format(stress_tensor_node.uuid, stress_tensor_node.pk)