def get_lapw_calculation(self, lapw_structure, lapw_parameters, lapw_kpoint):

        params = self.get_parameters()

        lapw_codename = params['lapw_codename']
        num_machines = params['num_machines']
        max_wallclock_seconds = params['max_wallclock_seconds']
        lapwbasis_family = params['lapwbasis_family']

        code = Code.get_from_string(lapw_codename)
        computer = code.get_remote_computer()

        LAPWCalc = CalculationFactory('exciting.exciting')

        calc = LAPWCalc(computer=computer)
        calc.set_max_wallclock_seconds(max_wallclock_seconds)
        calc.set_resources({"num_machines": num_machines})
        calc.store()

        calc.use_code(code)

        calc.use_structure(lapw_structure)
        calc.use_lapwbasis_from_family(lapwbasis_family)
        calc.use_parameters(lapw_parameters)
        calc.use_kpoints(lapw_kpoint)

        return calc
    def get_pw_calculation(self, pw_structure, pw_parameters, pw_kpoint):

        params = self.get_parameters()

        pw_codename = params['pw_codename']
        num_machines = params['num_machines']
        max_wallclock_seconds = params['max_wallclock_seconds']
        pseudo_family = params['pseudo_family']

        code = Code.get_from_string(pw_codename)
        computer = code.get_remote_computer()

        QECalc = CalculationFactory('quantumespresso.pw')

        calc = QECalc(computer=computer)
        calc.set_max_wallclock_seconds(max_wallclock_seconds)
        calc.set_resources({"num_machines": num_machines})
        calc.store()

        calc.use_code(code)

        calc.use_structure(pw_structure)
        calc.use_pseudos_from_family(pseudo_family)
        calc.use_parameters(pw_parameters)
        calc.use_kpoints(pw_kpoint)

        return calc
Esempio n. 3
0
    def test_autogroup_filter_class(self):  # pylint: disable=too-many-locals
        """Check if the autogroup is properly generated but filtered classes are skipped."""
        from aiida.orm import Code, QueryBuilder, Node, AutoGroup, load_node

        script_content = textwrap.dedent("""\
            import sys
            from aiida.orm import Computer, Int, ArrayData, KpointsData, CalculationNode, WorkflowNode
            from aiida.plugins import CalculationFactory
            from aiida.engine import run_get_node
            ArithmeticAdd = CalculationFactory('arithmetic.add')

            computer = Computer(
                label='localhost-example-{}'.format(sys.argv[1]),
                hostname='localhost',
                description='my computer',
                transport_type='local',
                scheduler_type='direct',
                workdir='/tmp'
            ).store()
            computer.configure()

            code = Code(
                input_plugin_name='arithmetic.add',
                remote_computer_exec=[computer, '/bin/true']).store()
            inputs = {
                'x': Int(1),
                'y': Int(2),
                'code': code,
                'metadata': {
                    'options': {
                        'resources': {
                            'num_machines': 1,
                            'num_mpiprocs_per_machine': 1
                        }
                    }
                }
            }

            node1 = KpointsData().store()
            node2 = ArrayData().store()
            node3 = Int(3).store()
            node4 = CalculationNode().store()
            node5 = WorkflowNode().store()
            _, node6 = run_get_node(ArithmeticAdd, **inputs)
            print(node1.pk)
            print(node2.pk)
            print(node3.pk)
            print(node4.pk)
            print(node5.pk)
            print(node6.pk)
            """)

        Code()
        for idx, (
                flags,
                kptdata_in_autogroup,
                arraydata_in_autogroup,
                int_in_autogroup,
                calc_in_autogroup,
                wf_in_autogroup,
                calcarithmetic_in_autogroup,
        ) in enumerate([
            [['--exclude', 'aiida.data:array.kpoints'], False, True, True,
             True, True, True],
                # Check if % works anywhere - both 'int' and 'array.kpoints' contain an 'i'
            [['--exclude', 'aiida.data:%i%'], False, True, False, True, True,
             True],
            [['--exclude', 'aiida.data:int'], True, True, False, True, True,
             True],
            [['--exclude', 'aiida.data:%'], False, False, False, True, True,
             True],
            [['--exclude', 'aiida.data:array', 'aiida.data:array.%'], False,
             False, True, True, True, True],
            [[
                '--exclude', 'aiida.data:array', 'aiida.data:array.%',
                'aiida.data:int'
            ], False, False, False, True, True, True],
            [['--exclude', 'aiida.calculations:arithmetic.add'], True, True,
             True, True, True, False],
            [
                ['--include', 'aiida.node:process.calculation'
                 ],  # Base type, no specific plugin
                False,
                False,
                False,
                True,
                False,
                False
            ],
            [
                ['--include', 'aiida.node:process.workflow'
                 ],  # Base type, no specific plugin
                False,
                False,
                False,
                False,
                True,
                False
            ],
            [[], True, True, True, True, True, True],
        ]):
            with tempfile.NamedTemporaryFile(mode='w+') as fhandle:
                fhandle.write(script_content)
                fhandle.flush()

                options = ['--auto-group'] + flags + [
                    '--', fhandle.name, str(idx)
                ]
                result = self.cli_runner.invoke(cmd_run.run, options)
                self.assertClickResultNoException(result)

                pk1_str, pk2_str, pk3_str, pk4_str, pk5_str, pk6_str = result.output.split(
                )
                pk1 = int(pk1_str)
                pk2 = int(pk2_str)
                pk3 = int(pk3_str)
                pk4 = int(pk4_str)
                pk5 = int(pk5_str)
                pk6 = int(pk6_str)
                _ = load_node(pk1)  # Check if the node can be loaded
                _ = load_node(pk2)  # Check if the node can be loaded
                _ = load_node(pk3)  # Check if the node can be loaded
                _ = load_node(pk4)  # Check if the node can be loaded
                _ = load_node(pk5)  # Check if the node can be loaded
                _ = load_node(pk6)  # Check if the node can be loaded

                queryb = QueryBuilder().append(Node,
                                               filters={'id': pk1},
                                               tag='node')
                queryb.append(AutoGroup, with_node='node', project='*')
                all_auto_groups_kptdata = queryb.all()

                queryb = QueryBuilder().append(Node,
                                               filters={'id': pk2},
                                               tag='node')
                queryb.append(AutoGroup, with_node='node', project='*')
                all_auto_groups_arraydata = queryb.all()

                queryb = QueryBuilder().append(Node,
                                               filters={'id': pk3},
                                               tag='node')
                queryb.append(AutoGroup, with_node='node', project='*')
                all_auto_groups_int = queryb.all()

                queryb = QueryBuilder().append(Node,
                                               filters={'id': pk4},
                                               tag='node')
                queryb.append(AutoGroup, with_node='node', project='*')
                all_auto_groups_calc = queryb.all()

                queryb = QueryBuilder().append(Node,
                                               filters={'id': pk5},
                                               tag='node')
                queryb.append(AutoGroup, with_node='node', project='*')
                all_auto_groups_wf = queryb.all()

                queryb = QueryBuilder().append(Node,
                                               filters={'id': pk6},
                                               tag='node')
                queryb.append(AutoGroup, with_node='node', project='*')
                all_auto_groups_calcarithmetic = queryb.all()

                self.assertEqual(
                    len(all_auto_groups_kptdata),
                    1 if kptdata_in_autogroup else 0,
                    'Wrong number of nodes in autogroup associated with the KpointsData node '
                    "just created with flags '{}'".format(' '.join(flags)))
                self.assertEqual(
                    len(all_auto_groups_arraydata),
                    1 if arraydata_in_autogroup else 0,
                    'Wrong number of nodes in autogroup associated with the ArrayData node '
                    "just created with flags '{}'".format(' '.join(flags)))
                self.assertEqual(
                    len(all_auto_groups_int), 1 if int_in_autogroup else 0,
                    'Wrong number of nodes in autogroup associated with the Int node '
                    "just created with flags '{}'".format(' '.join(flags)))
                self.assertEqual(
                    len(all_auto_groups_calc), 1 if calc_in_autogroup else 0,
                    'Wrong number of nodes in autogroup associated with the CalculationNode '
                    "just created with flags '{}'".format(' '.join(flags)))
                self.assertEqual(
                    len(all_auto_groups_wf), 1 if wf_in_autogroup else 0,
                    'Wrong number of nodes in autogroup associated with the WorkflowNode '
                    "just created with flags '{}'".format(' '.join(flags)))
                self.assertEqual(
                    len(all_auto_groups_calcarithmetic),
                    1 if calcarithmetic_in_autogroup else 0,
                    'Wrong number of nodes in autogroup associated with the ArithmeticAdd CalcJobNode '
                    "just created with flags '{}'".format(' '.join(flags)))
Esempio n. 4
0
VaspCalculation = CalculationFactory('vasp.vasp')

options = {
	'resources': {
		'num_machines': 1,
		'tot_num_mpiprocs': 1,
	},
	'max_wallclock_seconds': 1800,
}

kpoints = KpointsData()
kpoints.set_kpoints_mesh([1, 1, 1])

inputs = {
	'code': Code.get_from_string('VASP.5.4.4@Raichu'),
	'structure': load_node(888),
	'kpoints': kpoints,
	'parameters': ParameterData(dict={}),
	'settings': ParameterData(dict={}),
	'pseudo_family': Str('vasp-pbe'),
        'options' : ParameterData( dict = { 
                      'max_wallclock_seconds' : 3600,
                      'max_memory_kb': 10000000,
                      'resources' : { 'num_machines': 1
                                    },
                    }),
        'max_iterations' : Int(1),
}

process = VaspCalculation.process()
Esempio n. 5
0
    def test_code_local(self):
        """Test local code."""
        import tempfile

        from aiida.orm import Code
        from aiida.common.exceptions import ValidationError

        code = Code(local_executable='test.sh')
        with self.assertRaises(ValidationError):
            # No file with name test.sh
            code.store()

        with tempfile.NamedTemporaryFile(mode='w+') as fhandle:
            fhandle.write('#/bin/bash\n\necho test run\n')
            fhandle.flush()
            code.put_object_from_filelike(fhandle, 'test.sh')

        code.store()
        self.assertTrue(code.can_run_on(self.computer))
        self.assertTrue(code.get_local_executable(), 'test.sh')
        self.assertTrue(code.get_execname(), 'stest.sh')
                    type=float,
                    help='time in minutes')

args = parser.parse_args()

sirius_config_fname = 'sirius.json'

assert os.path.exists(sirius_config_fname)
assert os.path.exists('nlcg.yaml')
# converters sirius_json to aiida provenance input
sirius_json = json.load(open(sirius_config_fname, 'r'))

# get code
computer = helpers.get_computer('localhost')
# code = helpers.get_code(entry_point='sirius.py.nlcg', computer=computer)
code = Code.get_from_string('sirius.py.nlcg@' + computer.get_name())

####################
# # Prepare inputs #
####################
SiriusParameters = DataFactory('sirius.scf')
StructureData = DataFactory('structure')
KpointsData = DataFactory('array.kpoints')
Dict = DataFactory('dict')
SinglefileData = DataFactory('singlefile')

NLCGParameters = DataFactory('sirius.py.nlcg')
parameters = SiriusParameters(sirius_json)
nlcgconfig = yaml.load(open('nlcg.yaml', 'r'))
nlcgconfig = {'System': nlcgconfig['System'], 'CG': nlcgconfig['CG']}
def main(zeopp_code_label, raspa_code_label):
    """
    Prepare inputs and submit the Isotherm workchain.
    Usage: verdi run run_HTSEvWorkChain_KAXQIL_2comp.py zeopp@teslin raspa37@teslin
    """

    builder = HTSEvWorkChain.get_builder()

    builder.metadata.label = "test_ev"

    builder.structure = CifData(
        file=os.path.abspath('../aiida_matdis/data/KAXQIL_clean_P1.cif'),
        label="kaxqil")

    builder.mixture = Dict(
        dict={
            'comp1': {
                'name': 'xenon',
                'molfraction': 0.20
            },
            'comp2': {
                'name': 'krypton',
                'molfraction': 0.80
            }
        })

    builder.ev_output = load_node(21064)

    builder.zeopp.code = Code.get_from_string(zeopp_code_label)
    builder.zeopp.atomic_radii = SinglefileData(
        file=os.path.abspath('../aiida_matdis/data/UFF.rad'))

    builder.raspa_base.raspa.code = Code.get_from_string(raspa_code_label)

    options = {
        "resources": {
            "num_machines": 1,
            "tot_num_mpiprocs": 1,
        },
        "max_wallclock_seconds": 1 * 60 * 60,
        "withmpi": False,
    }
    builder.raspa_base.raspa.metadata.options = options
    builder.zeopp.metadata.options = options

    builder.parameters = Dict(
        dict={
            'ff_framework': 'UFF',  # Default: UFF
            "ff_cutoff": 12.5,
            'temperature':
            298,  # (K) Note: higher temperature will have less adsorbate and it is faster
            "ff_tail_corrections": False,
            'zeopp_volpo_samples':
            1000,  # Default: 1e5 *NOTE: default is good for standard real-case!
            'zeopp_sa_samples':
            1000,  # Default: 1e5 *NOTE: default is good for standard real-case!
            'zeopp_block_samples': 100,  # Default: 100
            'raspa_widom_cycles': 500,  # Default: 1e5
            'raspa_gcmc_init_cycles': 500,  # Default: 1e3
            'raspa_gcmc_prod_cycles': 500,  # Default: 1e4
            'pressure_list': [0.1, 1.0],
            'probe_based': True,
        })

    run(builder)
def main(cp2k_code_string, ddec_code_string, ddec_atdens_path):
    """Example usage:
    ATDENS_PATH='/home/daniele/aiida-lsmo-codes/data/chargemol/atomic_densities/'
    verdi run run_Cp2kMultistageDdecWorkChain_h2o.py cp2k@localhost ddec@localhost $ATDENS_PATH
    """
    print('Testing CP2K-Multistage calculation + DDEC on H2O...')

    cp2k_code = Code.get_from_string(cp2k_code_string)
    ddec_code = Code.get_from_string(ddec_code_string)

    atoms = ase.build.molecule('H2O')
    atoms.center(vacuum=2.0)
    structure = StructureData(ase=atoms)

    cp2k_options = {
        'resources': {
            'num_machines': 1
        },
        'max_wallclock_seconds': 10 * 60,
        'withmpi': True,
    }

    ddec_options = {
        'resources': {
            'num_machines': 1
        },
        'max_wallclock_seconds': 10 * 60,
        'withmpi': False,
    }

    ddec_params = Dict(
        dict={
            'net charge': 0.0,
            'charge type': 'DDEC6',
            'periodicity along A, B, and C vectors': [True, True, True],
            'compute BOs': False,
            'atomic densities directory complete path': ddec_atdens_path,
            'input filename': 'valence_density',
        })

    inputs = {
        'structure': structure,
        'metadata': {
            'label': 'test-h2o'
        },
        'protocol_tag': Str('test'),
        'cp2k_base': {
            'cp2k': {
                'code': cp2k_code,
                'metadata': {
                    'options': cp2k_options,
                }
            }
        },
        'ddec': {
            'parameters': ddec_params,
            'code': ddec_code,
            'metadata': {
                'options': ddec_options,
            }
        }
    }

    run(MultistageDdecWorkChain, **inputs)
Esempio n. 9
0
    def test_scf_wc_Cu_simple(self):
        """
        simple Cu noSOC, FP, lmax2 full example using scf workflow
        """
        from aiida.orm import Code, load_node, DataFactory
        from aiida.work import run
        from aiida_kkr.tools.kkr_params import kkrparams
        from aiida_kkr.workflows.kkr_scf import kkr_scf_wc
        from pprint import pprint
        from scipy import array

        ParameterData = DataFactory('parameter')
        StructureData = DataFactory('structure')

        alat = 6.83  # in a_Bohr
        abohr = 0.52917721067  # conversion factor to Angstroem units
        # bravais vectors
        bravais = array([[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]])

        a = 0.5 * alat * abohr
        Cu = StructureData(cell=[[a, a, 0.0], [a, 0.0, a], [0.0, a, a]])
        Cu.append_atom(position=[0.0, 0.0, 0.0], symbols='Cu')

        Cu.store()
        print(Cu)

        # here we create a parameter node for the workflow input (workflow specific parameter) and adjust the convergence criterion.
        wfd = kkr_scf_wc.get_wf_defaults()

        wfd['convergence_criterion'] = 10**-4
        wfd['check_dos'] = False
        wfd['kkr_runmax'] = 5
        wfd['nsteps'] = 50
        wfd['queue_name'] = ''
        wfd['resources']['num_machines'] = 1
        wfd['use_mpi'] = False  #True

        wfd['num_rerun'] = 2
        wfd['natom_in_cls_min'] = 20

        KKRscf_wf_parameters = ParameterData(dict=wfd)

        # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations
        VoroCode = Code.get_from_string('voronoi@my_mac')
        KKRCode = Code.get_from_string('KKRcode@my_mac')

        # Finally we use the kkrparams class to prepare a valid set of KKR parameters that are stored as a ParameterData object for the use in aiida
        ParaNode = ParameterData(dict=kkrparams(
            LMAX=2, RMAX=7, GMAX=65, NSPIN=1, RCLUSTZ=1.9).get_dict())

        label = 'KKR-scf for Cu bulk'
        descr = 'KKR self-consistency workflow for Cu bulk'
        try:
            out = run(kkr_scf_wc,
                      structure=Cu,
                      calc_parameters=ParaNode,
                      voronoi=VoroCode,
                      kkr=KKRCode,
                      wf_parameters=KKRscf_wf_parameters,
                      _label=label,
                      _description=descr)
        except:
            print 'some Error occured in run of kkr_scf_wc'

        # load node of workflow
        print out
        n = load_node(out[1])

        print '\noutputs of workflow\n-------------------------------------------------'
        pprint(n.get_outputs_dict())

        # get output dictionary
        n = n.get_outputs()[-1]
        out = n.get_dict()
        print '\n\noutput dictionary:\n-------------------------------------------------'
        pprint(out)

        # finally check some output
        print '\n\ncheck values ...\n-------------------------------------------------'

        print 'voronoi_step_success', out['voronoi_step_success']
        assert out['voronoi_step_success']

        print 'kkr_step_success', out['kkr_step_success']
        assert out['kkr_step_success']

        print 'successful', out['successful']
        assert out['successful']

        print 'error', out['errors']
        assert out['errors'] == []

        print 'warning', out['warnings']
        assert out['warnings'] == []

        print 'convergence_reached', out['convergence_reached']
        assert out['convergence_reached']

        print 'convergence_value', out['convergence_value']
        assert out['convergence_value'] < 10**-4

        print 'charge_neutrality', abs(out['charge_neutrality'])
        assert abs(out['charge_neutrality']) < 5 * 10**-4

        print 'used_higher_accuracy', out['used_higher_accuracy']
        assert out['used_higher_accuracy']

        print '\ndone with checks\n'
Esempio n. 10
0
 def setUp(self):
     self.calc_cls = CalculationFactory('vasp.scf')
     self.code = Code()
     self.code.set_computer(self.computer)
     self.code.set_remote_computer_exec((self.computer, '/bin/foo'))
     Common.import_paw()
def main(cp2k_code_string, ddec_code_string, ddec_atdens_path):
    """Example usage:
    ATDENS_PATH='/home/daniele/Programs/aiida-database/data/chargemol_09_26_2017/atomic_densities/'
    verdi run run_cp2k_ddec_MOF-74.py cp2k@localhost ddec@localhost $ATDENS_PATH
    """
    print('Testing CP2K ENERGY calculation + DDEC on H2O...')

    cp2k_code = Code.get_from_string(cp2k_code_string)
    ddec_code = Code.get_from_string(ddec_code_string)

    thisdir = os.path.dirname(os.path.abspath(__file__))
    structure = StructureData(
        ase=ase.io.read(os.path.join(thisdir, 'data/Zn-MOF-74.cif')))

    cp2k_options = {
        'resources': {
            'num_machines': 1
        },
        'max_wallclock_seconds': 10 * 60,
        'withmpi': True,
    }

    ddec_options = {
        'resources': {
            'num_machines': 1
        },
        'max_wallclock_seconds': 10 * 60,
        'withmpi': False,
    }

    cp2k_params = Dict(
        dict={
            'FORCE_EVAL': {
                'METHOD': 'Quickstep',
                'DFT': {
                    'BASIS_SET_FILE_NAME': 'BASIS_MOLOPT',
                    'MGRID': {
                        'NGRIDS': 4,
                        'CUTOFF': 280,
                        'REL_CUTOFF': 30,
                    },
                    'SCF': {
                        'MAX_SCF': 3,  # limited for testing purpose
                    },
                    'XC': {
                        'XC_FUNCTIONAL': {
                            '_': 'PBE',
                        },
                    },
                },
                'SUBSYS': {
                    'KIND': [
                        {
                            '_': 'H',
                            'BASIS_SET': 'SZV-MOLOPT-SR-GTH',
                            'POTENTIAL': 'GTH-PBE'
                        },
                        {
                            '_': 'C',
                            'BASIS_SET': 'SZV-MOLOPT-SR-GTH',
                            'POTENTIAL': 'GTH-PBE'
                        },
                        {
                            '_': 'O',
                            'BASIS_SET': 'SZV-MOLOPT-SR-GTH',
                            'POTENTIAL': 'GTH-PBE'
                        },
                        {
                            '_': 'Zn',
                            'BASIS_SET': 'SZV-MOLOPT-SR-GTH',
                            'POTENTIAL': 'GTH-PBE'
                        },
                    ],
                },
            }
        })

    ddec_params = Dict(
        dict={
            'net charge': 0.0,
            'charge type': 'DDEC6',
            'periodicity along A, B, and C vectors': [True, True, True],
            'compute BOs': False,
            'atomic densities directory complete path': ddec_atdens_path,
            'input filename': 'valence_density',
        })

    inputs = {
        'metadata': {
            'label': 'test-MOF-74'
        },
        'cp2k_base': {
            'cp2k': {
                'structure': structure,
                'parameters': cp2k_params,
                'code': cp2k_code,
                'metadata': {
                    'options': cp2k_options,
                }
            }
        },
        'ddec': {
            'parameters': ddec_params,
            'code': ddec_code,
            'metadata': {
                'options': ddec_options,
            }
        }
    }

    run(Cp2kDdecWorkChain, **inputs)
Esempio n. 12
0
def test_process():
    """Test running a calculation
    note this does not test that the expected outputs are created of output parsing"""
    from aiida.plugins import DataFactory, CalculationFactory
    from aiida.engine import run
    from aiida.orm import Code, SinglefileData, Int, Float, Str, Bool, List, Dict, ArrayData, XyData, SinglefileData, FolderData, RemoteData
    import numpy as np
    import aiida
    import os
    aiida.load_profile()
    #pre-prepared files
    dmdata = SinglefileData(
        file=os.path.join(os.getcwd(), "input_files", 'dmdata'))
    jij = SinglefileData(
        file=os.path.join(os.getcwd(), "input_files", 'jij'))
    momfile = SinglefileData(
        file=os.path.join(os.getcwd(), "input_files", 'momfile'))
    posfile = SinglefileData(
        file=os.path.join(os.getcwd(), "input_files", 'posfile'))
    qfile = SinglefileData(
        file=os.path.join(os.getcwd(), "input_files", 'qfile'))
    # inpsd.dat file selection
    simid = Str('SCsurf_T')

    ncell = ArrayData()
    ncell.set_array('matrix', np.array([128, 128, 1]))

    BC = Str('P         P         0 ')

    cell = ArrayData()
    cell.set_array('matrix', np.array([[1.00000, 0.00000, 0.00000], [
                   0.00000, 1.00000, 0.00000], [0.00000, 0.00000, 1.00000]]))

    do_prnstruct = Int(2)
    maptype = Int(2)
    SDEalgh = Int(1)
    Initmag = Int(3)
    ip_mode = Str('Q')
    qm_svec = ArrayData()
    qm_svec.set_array('matrix', np.array([1, -1, 0]))

    qm_nvec = ArrayData()
    qm_nvec.set_array('matrix', np.array([0, 0, 1]))

    mode = Str('S')
    temp = Float(0.000)
    damping = Float(0.500)
    Nstep = Int(5000)
    timestep = Str('1.000d-15')
    qpoints = Str('F')
    plotenergy = Int(1)
    do_avrg = Str('Y')

    code = Code.get_from_string('uppasd_dev@uppasd_local')
    
    r_l = List(list= [f'coord.{simid.value}.out',
                                    f'qm_minima.{simid.value}.out',
                                    f'qm_sweep.{simid.value}.out',
                                    f'qpoints.out',
                                    f'totenergy.{simid.value}.out',
                                    f'averages.{simid.value}.out',
                                    'fort.2000',
                                    'inp.SCsurf_T.yaml',
                                    'qm_restart.SCsurf_T.out',
                                    'restart.SCsurf_T.out'])
    # set up calculation
    inputs = {
        'code': code,
        'dmdata': dmdata,
        'jij': jij,
        'momfile': momfile,
        'posfile': posfile,
        'qfile': qfile,
        'simid': simid,
        'ncell': ncell,
        'BC': BC,
        'cell': cell,
        'do_prnstruct': do_prnstruct,
        'maptype': maptype,
        'SDEalgh': SDEalgh,
        'Initmag': Initmag,
        'ip_mode': ip_mode,
        'qm_svec': qm_svec,
        'qm_nvec': qm_nvec,
        'mode': mode,
        'temp': temp,
        'damping': damping,
        'Nstep': Nstep,
        'timestep': timestep,
        'qpoints': qpoints,
        'plotenergy': plotenergy,
        'do_avrg': do_avrg,
        'retrieve_list_name': r_l,
        'metadata': {
            'options': {
                'max_wallclock_seconds': 60,
                'resources': {'num_machines': 1},
                'input_filename': 'inpsd.dat',
                'parser_name': 'UppASD_core_parsers',
                
            },

        },
    }

    result = run(CalculationFactory('UppASD_core_calculations'), **inputs)
    computed_diff = result['UppASD_core_calculations'].get_content()

    assert 'content1' in computed_diff
    assert 'content2' in computed_diff
Esempio n. 13
0
def main(codelabel):
    """Test structure roundtrip precision ase->aiida->cp2k->aiida->ase"""
    try:
        code = Code.get_from_string(codelabel)
    except NotExistent:
        print("The code '{}' does not exist".format(codelabel))
        sys.exit(1)

    print(
        "Testing structure roundtrip precision ase->aiida->cp2k->aiida->ase..."
    )

    # structure
    epsilon = 1e-10  # expected precision in Angstrom
    dist = 0.74 + epsilon
    positions = [(0, 0, 0), (0, 0, dist)]
    cell = np.diag([4, -4, 4 + epsilon])
    atoms = ase.Atoms('H2', positions=positions, cell=cell)
    structure = StructureData(ase=atoms)

    # parameters
    parameters = Dict(
        dict={
            'GLOBAL': {
                'RUN_TYPE': 'MD',
            },
            'MOTION': {
                'MD': {
                    'TIMESTEP': 0.0,  # do not move atoms
                    'STEPS': 1,
                },
            },
            'FORCE_EVAL': {
                'METHOD': 'Quickstep',
                'DFT': {
                    'BASIS_SET_FILE_NAME': 'BASIS_MOLOPT',
                    'SCF': {
                        'MAX_SCF': 1,
                    },
                    'XC': {
                        'XC_FUNCTIONAL': {
                            '_': 'LDA',
                        },
                    },
                },
                'SUBSYS': {
                    'KIND': {
                        '_': 'DEFAULT',
                        'BASIS_SET': 'DZVP-MOLOPT-SR-GTH',
                        'POTENTIAL': 'GTH-LDA',
                    },
                },
            },
        })

    # resources
    options = {
        "resources": {
            "num_machines": 1,
            "num_mpiprocs_per_machine": 1,
        },
        "max_wallclock_seconds": 1 * 60 * 60,
    }

    inputs = {
        'structure': structure,
        'parameters': parameters,
        'code': code,
        'metadata': {
            'options': options,
        }
    }

    print("submitted calculation...")
    calc = run(Cp2kCalculation, **inputs)

    # check structure preservation
    atoms2 = calc['output_structure'].get_ase()

    # zeros should be preserved exactly
    if np.all(atoms2.positions[0] == 0.0):
        print("OK, zeros in structure were preserved exactly")
    else:
        print("ERROR!")
        print("Zeros in structure changed: ", atoms2.positions[0])
        sys.exit(3)

    # other values should be preserved with epsilon precision
    dist2 = atoms2.get_distance(0, 1)
    if abs(dist2 - dist) < epsilon:
        print("OK, structure preserved with %.1e Angstrom precision" % epsilon)
    else:
        print("ERROR!")
        print("Structure changed by %e Angstrom" % abs(dist - dist2))
        sys.exit(3)

    # check cell preservation
    cell_diff = np.amax(np.abs(atoms2.cell - cell))
    if cell_diff < epsilon:
        print("OK, cell preserved with %.1e Angstrom precision" % epsilon)
    else:
        print("ERROR!")
        print("Cell changed by %e Angstrom" % cell_diff)
        sys.exit(3)

    sys.exit(0)
Esempio n. 14
0
                argkey, argval = sys.argv.pop(0).split('=', 1)
            else:
                argkey = sys.argv.pop(0)
                argval = True
            if argkey not in options.keys():
                options[argkey] = []
            options[argkey].append(argval)
        else:
            files.append(arg)

    expected_code_type = "codtools.cifcoddeposit"

    try:
        if codename is None:
            raise ValueError
        code = Code.get(codename)
        if code.get_input_plugin_name() != expected_code_type:
            raise ValueError
    except (NotExistent, ValueError):
        valid_code_labels = [c.label for c in Code.query(
            dbattributes__key="input_plugin",
            dbattributes__tval=expected_code_type)]
        if valid_code_labels:
            print >> sys.stderr, "Pass as further parameter a valid code label."
            print >> sys.stderr, "Valid labels with a {} executable are:".format(expected_code_type)
            for l in valid_code_labels:
                print >> sys.stderr, "*", l
        else:
            print >> sys.stderr, "Code not valid, and no valid codes for {}. Configure at least one first using".format(
                expected_code_type)
            print >> sys.stderr, "    verdi code setup"
Esempio n. 15
0
from aiida.plugins import DataFactory, WorkflowFactory
Qecp = WorkflowFactory('qecpworkchain.cp')
StructureData = DataFactory('structure')

aiida_structure = aiida.orm.load_node(
    42)  #some node where you have a structure

supercell = aiida_structure.get_pymatgen_structure(
)  # eventually replicate the structure
supercell.make_supercell([2, 2, 2])
aiida_structure = aiida.orm.StructureData(pymatgen_structure=supercell)

build = Qecp.get_builder()

#define computing resources
pw = Code.get_from_string('qe-6.5@ulysses')
cp = Code.get_from_string('cp-6.5@ulysses')
resources = {
    'resources': {
        'num_machines': 1,
        'num_mpiprocs_per_machine': 20,
    },
    'wallclock': 3600 * 3,
    'queue': 'regular1',
}
resourcespw = {
    'resources': {
        'num_machines': 1,
        'num_mpiprocs_per_machine': 20,
    },
    'wallclock': 600,
Esempio n. 16
0
"""
from __future__ import absolute_import
from __future__ import print_function
import os
from aiida.orm import Code
from aiida.common.extendeddicts import AttributeDict
from aiida.plugins import DataFactory, CalculationFactory
from aiida.engine import run
from aiida import load_profile
load_profile()

from aiida_premod.tests import TEST_DIR  # pylint: disable=wrong-import-position

# get code
code_string = 'premod@localhost'
code = Code.get_from_string(code_string)

# Set input parameters
parameters = AttributeDict()
parameters.MODE_SIM = 'PPT'
parameters.MODE_ENTITY = 'SD'
parameters.MODE_SD = 'EULERIAN'
parameters.FILE_OUT_PPT = 'PreModRun.out'
parameters.OUTPUT_TIMES = [1800.00, 3600.00, 14400.00, 61200.00, 277200.00]
parameters.MODE_IO = 'TXT'
parameters.FILE_SOLVER = 'solver.txt'
parameters.FILE_ALLOY = 'alloy.txt'
parameters.FILE_PROCESS = 'temperature.txt'
parameters.FILE_PHASES = 'libphases.txt'
parameters.FILE_PPTLIB = 'models.txt'
parameters.FILE_PPTSIM = 'libmodel.txt'
Esempio n. 17
0
inputs['settings'] = ParameterData(dict={'cmdline': ['-nk', str(num_pools)]})

if __name__ == "__main__":
    import argparse
    parser = argparse.ArgumentParser(description='NSCF calculation.')
    parser.add_argument('--code',
                        type=str,
                        dest='codename',
                        required=True,
                        help='The pw codename to use')
    parser.add_argument('--pseudo',
                        type=str,
                        dest='pseudo',
                        required=True,
                        help='The pseudo family to use')
    parser.add_argument('--parent',
                        type=int,
                        dest='parent',
                        required=True,
                        help='The parent  to use')
    args = parser.parse_args()
    code = Code.get_from_string(args.codename)
    structure = load_node(args.parent).inp.structure
    inputs['structure'] = structure
    inputs['pseudo'] = get_pseudos_from_structure(structure, args.pseudo)
    inputs['code'] = code
    inputs['parent_folder'] = load_node(args.parent).out.remote_folder
    process = PwCalculation.process()
    running = submit(process, **inputs)
    print "Created calculation; with pid={}".format(running.pid)
def create_builder_from_file(input_folder,
                             input_file_name,
                             code,
                             metadata,
                             pseudo_folder_path=None,
                             use_first=False):
    """Create a populated process builder for a `PwCalculation` from a standard QE input file and pseudo (upf) files.

    :param input_folder: the folder containing the input file
    :type input_folder: aiida.common.folders.Folder or str
    :param input_file_name: the name of the input file
    :type input_file_name: str
    :param code: the code associated with the calculation
    :type code: aiida.orm.Code or str
    :param metadata: metadata values for the calculation (e.g. resources)
    :type metadata: dict
    :param pseudo_folder_path: the folder containing the upf files (if None, then input_folder is used)
    :type pseudo_folder_path: aiida.common.folders.Folder or str or None
    :param use_first: passed to UpfData.get_or_create
    :type use_first: bool
    :raises NotImplementedError: if the structure is not ibrav=0
    :return: a builder instance for PwCalculation
    """
    PwCalculation = CalculationFactory('quantumespresso.pw')

    builder = PwCalculation.get_builder()
    builder.metadata = metadata

    if isinstance(code, str):
        code = Code.get_from_string(code)
    builder.code = code

    # read input_file
    if isinstance(input_folder, str):
        input_folder = Folder(input_folder)

    with input_folder.open(input_file_name) as input_file:
        parsed_file = PwInputFile(input_file)

    builder.structure = parsed_file.get_structuredata()
    builder.kpoints = parsed_file.get_kpointsdata()

    if parsed_file.namelists['SYSTEM']['ibrav'] != 0:
        raise NotImplementedError(
            'Found ibrav != 0: `aiida-quantumespresso` currently only supports ibrav = 0.'
        )

    # Then, strip the namelist items that the plugin doesn't allow or sets later.
    # NOTE: If any of the position or cell units are in alat or crystal
    # units, that will be taken care of by the input parsing tools, and
    # we are safe to fake that they were never there in the first place.
    parameters_dict = copy.deepcopy(parsed_file.namelists)
    for namelist, blocked_key in PwCalculation._blocked_keywords:  # pylint: disable=protected-access
        for key in list(parameters_dict[namelist].keys()):
            # take into account that celldm and celldm(*) must be blocked
            if re.sub('[(0-9)]', '', key) == blocked_key:
                parameters_dict[namelist].pop(key, None)
    builder.parameters = Dict(dict=parameters_dict)

    # Get or create a UpfData node for the pseudopotentials used for the calculation.
    pseudos_map = {}
    if pseudo_folder_path is None:
        pseudo_folder_path = input_folder
    if isinstance(pseudo_folder_path, str):
        pseudo_folder_path = Folder(pseudo_folder_path)
    names = parsed_file.atomic_species['names']
    pseudo_file_names = parsed_file.atomic_species['pseudo_file_names']
    pseudo_file_map = {}
    for name, fname in zip(names, pseudo_file_names):
        if fname not in pseudo_file_map:
            local_path = pseudo_folder_path.get_abs_path(fname)
            upf_node, _ = UpfData.get_or_create(local_path,
                                                use_first=use_first,
                                                store_upf=False)
            pseudo_file_map[fname] = upf_node
        pseudos_map[name] = pseudo_file_map[fname]
    builder.pseudos = pseudos_map

    settings_dict = {}
    if parsed_file.k_points['type'] == 'gamma':
        settings_dict['gamma_only'] = True

    # If there are any fixed coordinates (i.e. force modification) present in the input file, specify in settings
    fixed_coords = parsed_file.atomic_positions['fixed_coords']
    # Function ``any()`` only works for 1-dimensional lists so we have to call it twice manually.
    if any((any(fc_xyz) for fc_xyz in fixed_coords)):
        settings_dict['FIXED_COORDS'] = fixed_coords

    if settings_dict:
        builder.settings = settings_dict

    return builder
Esempio n. 19
0
 def _fixture_code(entry_point_name):
     from aiida.orm import Code
     return Code(input_plugin_name=entry_point_name,
                 remote_computer_exec=[fixture_localhost, '/bin/true'])
Esempio n. 20
0
    def get_builder(self,
                    structure,
                    calc_engines,
                    protocol,
                    relaxation_type,
                    threshold_forces=None,
                    threshold_stress=None,
                    previous_workchain=None,
                    **kwargs):
        """Return a process builder for the corresponding workchain class with inputs set according to the protocol.

        :param structure: the structure to be relaxed
        :param calc_engines: ...
        :param protocol: the protocol to use when determining the workchain inputs
        :param relaxation_type: the type of relaxation to perform, instance of `RelaxType`
        :param threshold_forces: target threshold for the forces in eV/Å.
        :param threshold_stress: target threshold for the stress in eV/Å^3.
        :param kwargs: any inputs that are specific to the plugin.
        :return: a `aiida.engine.processes.ProcessBuilder` instance ready to be submitted.
        """
        # pylint: disable=too-many-locals

        super().get_builder(structure, calc_engines, protocol, relaxation_type,
                            threshold_forces, threshold_stress,
                            previous_workchain, **kwargs)

        # Get the protocol that we want to use
        if protocol is None:
            protocol = self._default_protocol
        protocol = self.get_protocol(protocol)

        # Set the builder
        builder = self.process_class.get_builder()

        # Set code
        builder.code = Code.get_from_string(calc_engines['relax']['code'])

        # Set structure
        builder.structure = structure

        # Set options
        builder.options = DataFactory('dict')(
            dict=calc_engines['relax']['options'])

        # Set settings
        # Make sure we add forces and stress for the VASP parser
        settings = AttributeDict()
        settings.update(
            {'parser_settings': {
                'add_forces': True,
                'add_stress': True
            }})
        builder.settings = DataFactory('dict')(dict=settings)

        # Set workchain related inputs, in this case, give more explicit output to report
        builder.verbose = DataFactory('bool')(True)

        # Set parameters
        builder.parameters = DataFactory('dict')(dict=protocol['parameters'])

        # Set potentials and their mapping
        builder.potential_family = DataFactory('str')(
            protocol['potential_family'])
        builder.potential_mapping = DataFactory('dict')(
            dict=self._potential_mapping[protocol['potential_mapping']])

        # Set the kpoint grid from the density in the protocol
        kpoints = DataFactory('array.kpoints')()
        kpoints.set_kpoints_mesh([1, 1, 1])
        kpoints.set_cell_from_structure(structure)
        rec_cell = kpoints.reciprocal_cell
        kpoints.set_kpoints_mesh(
            fetch_k_grid(rec_cell, protocol['kpoint_distance']))
        builder.kpoints = kpoints

        # Here we set the protocols fast, moderate and precise. These currently have no formal meaning.
        # After a while these will be set in the VASP workchain entrypoints using the convergence workchain etc.
        # However, for now we rely on defaults plane wave cutoffs and a set k-point density for the chosen protocol.
        relax = AttributeDict()
        relax.perform = DataFactory('bool')(True)
        relax.algo = DataFactory('str')(protocol['relax']['algo'])

        if relaxation_type == RelaxType.ATOMS:
            relax.positions = DataFactory('bool')(True)
            relax.shape = DataFactory('bool')(False)
            relax.volume = DataFactory('bool')(False)
        elif relaxation_type == RelaxType.CELL:
            relax.positions = DataFactory('bool')(False)
            relax.shape = DataFactory('bool')(True)
            relax.volume = DataFactory('bool')(True)
        elif relaxation_type == RelaxType.ATOMS_CELL:
            relax.positions = DataFactory('bool')(True)
            relax.shape = DataFactory('bool')(True)
            relax.volume = DataFactory('bool')(True)
        else:
            raise ValueError('relaxation type `{}` is not supported'.format(
                relaxation_type.value))

        if threshold_forces is not None:
            threshold = threshold_forces
        else:
            threshold = protocol['relax']['threshold_forces']
        relax.force_cutoff = DataFactory('float')(threshold)

        if threshold_stress is not None:
            raise ValueError(
                'Using a stress threshold is not directly available in VASP during relaxation.'
            )

        builder.relax = relax

        return builder
Esempio n. 21
0
def main():
    expected_results_calculations = {}
    expected_results_workchains = {}
    code = Code.get_from_string(codename)

    # Submitting the Calculations the new way directly through the launchers
    print(
        'Submitting {} calculations to the daemon'.format(number_calculations))
    for counter in range(1, number_calculations + 1):
        inputval = counter
        calc, expected_result = launch_calculation(code=code,
                                                   counter=counter,
                                                   inputval=inputval)
        expected_results_calculations[calc.pk] = expected_result

    # Submitting the Workchains
    print('Submitting {} workchains to the daemon'.format(number_workchains))
    for index in range(number_workchains):
        inp = Int(index)
        result, node = run_get_node(NestedWorkChain, inp=inp)
        expected_results_workchains[node.pk] = index

    print("Submitting a workchain with 'submit'.")
    builder = NestedWorkChain.get_builder()
    input_val = 4
    builder.inp = Int(input_val)
    proc = submit(builder)
    expected_results_workchains[proc.pk] = input_val

    print('Submitting a workchain with a nested input namespace.')
    value = Int(-12)
    pk = submit(NestedInputNamespace, foo={'bar': {'baz': value}}).pk

    print('Submitting a workchain with a dynamic non-db input.')
    value = [4, 2, 3]
    pk = submit(DynamicNonDbInput, namespace={'input': value}).pk
    expected_results_workchains[pk] = value

    print('Submitting a workchain with a dynamic db input.')
    value = 9
    pk = submit(DynamicDbInput, namespace={'input': Int(value)}).pk
    expected_results_workchains[pk] = value

    print('Submitting a workchain with a mixed (db / non-db) dynamic input.')
    value_non_db = 3
    value_db = Int(2)
    pk = submit(DynamicMixedInput,
                namespace={
                    'inputs': {
                        'input_non_db': value_non_db,
                        'input_db': value_db
                    }
                }).pk
    expected_results_workchains[pk] = value_non_db + value_db

    print('Submitting the serializing workchain')
    pk = submit(SerializeWorkChain, test=Int).pk
    expected_results_workchains[pk] = ObjectLoader().identify_object(Int)

    print('Submitting the ListEcho workchain.')
    list_value = List()
    list_value.extend([1, 2, 3])
    pk = submit(ListEcho, list=list_value).pk
    expected_results_workchains[pk] = list_value

    print('Submitting a WorkChain which contains a workfunction.')
    value = Str('workfunction test string')
    pk = submit(WorkFunctionRunnerWorkChain, input=value).pk
    expected_results_workchains[pk] = value

    print('Submitting a WorkChain which contains a calcfunction.')
    value = Int(1)
    pk = submit(CalcFunctionRunnerWorkChain, input=value).pk
    expected_results_workchains[pk] = Int(2)

    calculation_pks = sorted(expected_results_calculations.keys())
    workchains_pks = sorted(expected_results_workchains.keys())
    pks = calculation_pks + workchains_pks

    print('Wating for end of execution...')
    start_time = time.time()
    exited_with_timeout = True
    while time.time() - start_time < timeout_secs:
        time.sleep(15)  # Wait a few seconds

        # Print some debug info, both for debugging reasons and to avoid
        # that the test machine is shut down because there is no output

        print('#' * 78)
        print('####### TIME ELAPSED: {} s'.format(time.time() - start_time))
        print('#' * 78)
        print("Output of 'verdi process list -a':")
        try:
            print(
                subprocess.check_output(
                    ['verdi', 'process', 'list', '-a'],
                    stderr=subprocess.STDOUT,
                ))
        except subprocess.CalledProcessError as e:
            print('Note: the command failed, message: {}'.format(e))

        print("Output of 'verdi daemon status':")
        try:
            print(
                subprocess.check_output(
                    ['verdi', 'daemon', 'status'],
                    stderr=subprocess.STDOUT,
                ))
        except subprocess.CalledProcessError as e:
            print('Note: the command failed, message: {}'.format(e))

        if jobs_have_finished(pks):
            print('Calculation terminated its execution')
            exited_with_timeout = False
            break

    if exited_with_timeout:
        print_daemon_log()
        print('')
        print('Timeout!! Calculation did not complete after {} seconds'.format(
            timeout_secs))
        sys.exit(2)
    else:
        # Launch the same calculations but with caching enabled -- these should be FINISHED immediately
        cached_calcs = []
        with enable_caching(identifier='aiida.calculations:templatereplacer'):
            for counter in range(1, number_calculations + 1):
                inputval = counter
                calc, expected_result = run_calculation(code=code,
                                                        counter=counter,
                                                        inputval=inputval)
                cached_calcs.append(calc)
                expected_results_calculations[calc.pk] = expected_result

        if (validate_calculations(expected_results_calculations)
                and validate_workchains(expected_results_workchains)
                and validate_cached(cached_calcs)):
            print_daemon_log()
            print('')
            print('OK, all calculations have the expected parsed result')
            sys.exit(0)
        else:
            print_daemon_log()
            print('')
            print(
                'ERROR! Some return values are different from the expected value'
            )
            sys.exit(3)
Esempio n. 22
0
load_profile()
structure = open(sys.argv[1]).read()
assert detect_format(structure) == 'cif'
ase_obj, error = cif_to_ase(structure)
assert not error, error
assert 'disordered' not in ase_obj.info

try: symprec = float(sys.argv[2])
except: symprec = 3E-02 # NB needs tuning
print('symprec = %s' % symprec)

label = get_formula(ase_obj) + "/" + spglib.get_spacegroup(ase_obj, symprec=symprec)

ase_obj, error = refine(ase_obj, accuracy=symprec, conventional_cell=True)
assert not error, error

calc_setup = get_template()
calc_setup['parameters']['crystal']['title'] = label

inputs = MPDSCrystalWorkchain.get_builder()
inputs.crystal_code = Code.get_from_string('{}@{}'.format(calc_setup['codes'][0], calc_setup['cluster']))
inputs.crystal_parameters = DataFactory('dict')(dict=calc_setup['parameters']['crystal'])
inputs.basis_family, _ = DataFactory('crystal_dft.basis_family').get_or_create(calc_setup['basis_family'])
inputs.options = DataFactory('dict')(dict=calc_setup['options'])

inputs.metadata = dict(label=label)
inputs.mpds_query = DataFactory('dict')() # Add direct structures submitting support: FIXME
inputs.struct_in = DataFactory('structure')(ase=ase_obj)
wc = submit(MPDSCrystalWorkchain, **inputs)
print("Submitted WorkChain %s" % wc.pk)
Esempio n. 23
0
    def test_cif_structure_roundtrip(self):
        from aiida.tools.dbexporters.tcod import export_cif, export_values
        from aiida.orm import Code
        from aiida.orm import JobCalculation
        from aiida.orm.data.cif import CifData
        from aiida.orm.data.parameter import ParameterData
        from aiida.orm.data.upf import UpfData
        from aiida.orm.data.folder import FolderData
        from aiida.common.folders import SandboxFolder
        from aiida.common.datastructures import calc_states
        import tempfile

        with tempfile.NamedTemporaryFile() as f:
            f.write('''
                data_test
                _cell_length_a    10
                _cell_length_b    10
                _cell_length_c    10
                _cell_angle_alpha 90
                _cell_angle_beta  90
                _cell_angle_gamma 90
                loop_
                _atom_site_label
                _atom_site_fract_x
                _atom_site_fract_y
                _atom_site_fract_z
                C 0 0 0
                O 0.5 0.5 0.5
            ''')
            f.flush()
            a = CifData(file=f.name)

        c = a._get_aiida_structure()
        c.store()
        pd = ParameterData()

        code = Code(local_executable='test.sh')
        with tempfile.NamedTemporaryFile() as f:
            f.write("#/bin/bash\n\necho test run\n")
            f.flush()
            code.add_path(f.name, 'test.sh')

        code.store()

        calc = JobCalculation(computer=self.computer)
        calc.set_resources({'num_machines': 1, 'num_mpiprocs_per_machine': 1})
        calc.add_link_from(code, "code")
        calc.set_environment_variables({
            'PATH': '/dev/null',
            'USER': '******'
        })

        with tempfile.NamedTemporaryFile(prefix="Fe") as f:
            f.write("<UPF version=\"2.0.1\">\nelement=\"Fe\"\n")
            f.flush()
            upf = UpfData(file=f.name)
            upf.store()
            calc.add_link_from(upf, "upf")

        with tempfile.NamedTemporaryFile() as f:
            f.write("data_test")
            f.flush()
            cif = CifData(file=f.name)
            cif.store()
            calc.add_link_from(cif, "cif")

        calc.store()
        calc._set_state(calc_states.SUBMITTING)
        with SandboxFolder() as f:
            calc._store_raw_input_folder(f.abspath)

        fd = FolderData()
        with open(
                fd._get_folder_pathsubfolder.get_abs_path(
                    calc._SCHED_OUTPUT_FILE), 'w') as f:
            f.write("standard output")
            f.flush()

        with open(
                fd._get_folder_pathsubfolder.get_abs_path(
                    calc._SCHED_ERROR_FILE), 'w') as f:
            f.write("standard error")
            f.flush()

        fd.store()
        fd.add_link_from(calc, calc._get_linkname_retrieved(), LinkType.CREATE)

        pd.add_link_from(calc, "calc", LinkType.CREATE)
        pd.store()

        with self.assertRaises(ValueError):
            export_cif(c, parameters=pd)

        c.add_link_from(calc, "calc", LinkType.CREATE)
        export_cif(c, parameters=pd)

        values = export_values(c, parameters=pd)
        values = values['0']

        self.assertEquals(values['_tcod_computation_environment'],
                          ['PATH=/dev/null\nUSER=unknown'])
        self.assertEquals(values['_tcod_computation_command'],
                          ['cd 1; ./_aiidasubmit.sh'])
import aiida
from aiida.orm import Code, Str, Dict
import pytest

import deliver
from deliver import deliver_stage, stage_solution

aiida.load_profile("<profile>")

deliver.GENERAL_INPUTS = {
    "code": Code.get_from_string('<code>'),
    "pseudo_family": Str("<pseudo-family>"),
    "options": Dict(dict={
        'withmpi': False,
        'max_wallclock_seconds': 3600 * 2
    }),
    "parameters": Dict(),
}


@pytest.mark.parametrize("stage", [1, 2, 3, 4])
def test_solution(stage):
    deliver_stage(stage, stage_solution(stage).deliverable)
Esempio n. 25
0
    def test_pw_translation(self):
        from aiida.tools.dbexporters.tcod \
            import translate_calculation_specific_values
        # from aiida.tools.dbexporters.tcod_plugins.pw \
        #     import PwTcodtranslator as PWT
        # from aiida.tools.dbexporters.tcod_plugins.cp \
        #     import CpTcodtranslator as CPT
        from aiida.orm.code import Code
        from aiida.orm.data.array import ArrayData
        from aiida.orm.data.array.kpoints import KpointsData
        from aiida.orm.data.parameter import ParameterData
        import numpy
        from aiida.common.pluginloader import get_plugin
        PWT = get_plugin('tools.dbexporters.tcod_plugins',
                         'quantumespresso.pw')
        CPT = get_plugin('tools.dbexporters.tcod_plugins',
                         'quantumespresso.cp')

        code = Code()
        code._set_attr('remote_exec_path', '/test')

        kpoints = KpointsData()
        kpoints.set_kpoints_mesh([2, 3, 4], offset=[0.25, 0.5, 0.75])

        def empty_list():
            return []

        calc = FakeObject({
            "inp": {
                "parameters": ParameterData(dict={}),
                "kpoints": kpoints,
                "code": code
            },
            "out": {
                "output_parameters": ParameterData(dict={})
            },
            "get_inputs": empty_list
        })

        res = translate_calculation_specific_values(calc, PWT)
        self.assertEquals(
            res, {
                '_dft_BZ_integration_grid_X': 2,
                '_dft_BZ_integration_grid_Y': 3,
                '_dft_BZ_integration_grid_Z': 4,
                '_dft_BZ_integration_grid_shift_X': 0.25,
                '_dft_BZ_integration_grid_shift_Y': 0.5,
                '_dft_BZ_integration_grid_shift_Z': 0.75,
                '_dft_pseudopotential_atom_type': [],
                '_dft_pseudopotential_type': [],
                '_dft_pseudopotential_type_other_name': [],
                '_tcod_software_package': 'Quantum ESPRESSO',
                '_tcod_software_executable_path': '/test',
            })

        calc = FakeObject({
            "inp": {
                "parameters":
                ParameterData(dict={
                    'SYSTEM': {
                        'ecutwfc': 40,
                        'occupations': 'smearing'
                    }
                })
            },
            "out": {
                "output_parameters":
                ParameterData(dict={
                    'number_of_electrons': 10,
                })
            },
            "get_inputs": empty_list
        })
        res = translate_calculation_specific_values(calc, PWT)
        self.assertEquals(
            res, {
                '_dft_cell_valence_electrons': 10,
                '_tcod_software_package': 'Quantum ESPRESSO',
                '_dft_BZ_integration_smearing_method': 'Gaussian',
                '_dft_pseudopotential_atom_type': [],
                '_dft_pseudopotential_type': [],
                '_dft_pseudopotential_type_other_name': [],
                '_dft_kinetic_energy_cutoff_EEX': 2176.910676048,
                '_dft_kinetic_energy_cutoff_charge_density': 2176.910676048,
                '_dft_kinetic_energy_cutoff_wavefunctions': 544.227669012,
            })

        calc = FakeObject({
            "inp": {
                "parameters": ParameterData(dict={})
            },
            "out": {
                "output_parameters": ParameterData(dict={
                    'energy_xc': 5,
                })
            },
            "get_inputs": empty_list
        })
        with self.assertRaises(ValueError):
            translate_calculation_specific_values(calc, PWT)

        calc = FakeObject({
            "inp": {
                "parameters": ParameterData(dict={})
            },
            "out": {
                "output_parameters":
                ParameterData(dict={
                    'energy_xc': 5,
                    'energy_xc_units': 'meV'
                })
            },
            "get_inputs": empty_list
        })
        with self.assertRaises(ValueError):
            translate_calculation_specific_values(calc, PWT)

        energies = {
            'energy': -3701.7004199449257,
            'energy_one_electron': -984.0078459766,
            'energy_xc': -706.6986753641559,
            'energy_ewald': -2822.6335103043157,
            'energy_hartree': 811.6396117001462,
            'fermi_energy': 10.25208617898623,
        }
        dct = energies
        for key in energies.keys():
            dct["{}_units".format(key)] = 'eV'
        calc = FakeObject({
            "inp": {
                "parameters":
                ParameterData(dict={'SYSTEM': {
                    'smearing': 'mp'
                }})
            },
            "out": {
                "output_parameters": ParameterData(dict=dct)
            },
            "get_inputs": empty_list
        })
        res = translate_calculation_specific_values(calc, PWT)
        self.assertEquals(
            res, {
                '_tcod_total_energy': energies['energy'],
                '_dft_1e_energy': energies['energy_one_electron'],
                '_dft_correlation_energy': energies['energy_xc'],
                '_dft_ewald_energy': energies['energy_ewald'],
                '_dft_hartree_energy': energies['energy_hartree'],
                '_dft_fermi_energy': energies['fermi_energy'],
                '_tcod_software_package': 'Quantum ESPRESSO',
                '_dft_BZ_integration_smearing_method': 'Methfessel-Paxton',
                '_dft_BZ_integration_MP_order': 1,
                '_dft_pseudopotential_atom_type': [],
                '_dft_pseudopotential_type': [],
                '_dft_pseudopotential_type_other_name': [],
            })
        dct = energies
        dct['number_of_electrons'] = 10
        for key in energies.keys():
            dct["{}_units".format(key)] = 'eV'
        calc = FakeObject({
            "inp": {
                "parameters":
                ParameterData(dict={'SYSTEM': {
                    'smearing': 'unknown-method'
                }})
            },
            "out": {
                "output_parameters": ParameterData(dict=dct)
            },
            "get_inputs": empty_list
        })
        res = translate_calculation_specific_values(calc, CPT)
        self.assertEquals(
            res, {
                '_dft_cell_valence_electrons': 10,
                '_tcod_software_package': 'Quantum ESPRESSO'
            })

        ad = ArrayData()
        ad.set_array("forces", numpy.array([[[1, 2, 3], [4, 5, 6]]]))
        calc = FakeObject({
            "inp": {
                "parameters":
                ParameterData(dict={'SYSTEM': {
                    'smearing': 'unknown-method'
                }})
            },
            "out": {
                "output_parameters": ParameterData(dict={}),
                "output_array": ad
            },
            "get_inputs": empty_list
        })
        res = translate_calculation_specific_values(calc, PWT)
        self.assertEquals(
            res,
            {
                '_tcod_software_package': 'Quantum ESPRESSO',
                '_dft_BZ_integration_smearing_method': 'other',
                '_dft_BZ_integration_smearing_method_other': 'unknown-method',
                '_dft_pseudopotential_atom_type': [],
                '_dft_pseudopotential_type': [],
                '_dft_pseudopotential_type_other_name': [],
                ## Residual forces are no longer produced, as they should
                ## be in the same CIF loop with coordinates -- to be
                ## implemented later, since it's not yet clear how.
                # '_tcod_atom_site_resid_force_Cartn_x': [1,4],
                # '_tcod_atom_site_resid_force_Cartn_y': [2,5],
                # '_tcod_atom_site_resid_force_Cartn_z': [3,6],
            })
Esempio n. 26
0
    def test_remote(self):
        """Test remote code."""
        import tempfile

        from aiida.orm import Code
        from aiida.common.exceptions import ValidationError

        with self.assertRaises(ValueError):
            # remote_computer_exec has length 2 but is not a list or tuple
            Code(remote_computer_exec='ab')

        # invalid code path
        with self.assertRaises(ValueError):
            Code(remote_computer_exec=(self.computer, ''))

        # Relative path is invalid for remote code
        with self.assertRaises(ValueError):
            Code(remote_computer_exec=(self.computer, 'subdir/run.exe'))

        # first argument should be a computer, not a string
        with self.assertRaises(TypeError):
            Code(remote_computer_exec=('localhost', '/bin/ls'))

        code = Code(remote_computer_exec=(self.computer, '/bin/ls'))
        with tempfile.NamedTemporaryFile(mode='w+') as fhandle:
            fhandle.write('#/bin/bash\n\necho test run\n')
            fhandle.flush()
            code.put_object_from_filelike(fhandle, 'test.sh')

        with self.assertRaises(ValidationError):
            # There are files inside
            code.store()

        # If there are no files, I can store
        code.delete_object('test.sh')
        code.store()

        self.assertEqual(code.get_remote_computer().pk, self.computer.pk)  # pylint: disable=no-member
        self.assertEqual(code.get_remote_exec_path(), '/bin/ls')
        self.assertEqual(code.get_execname(), '/bin/ls')

        self.assertTrue(code.can_run_on(self.computer))
        othercomputer = orm.Computer(
            label='another_localhost',
            hostname='localhost',
            transport_type='local',
            scheduler_type='pbspro',
            workdir='/tmp/aiida'
        ).store()
        self.assertFalse(code.can_run_on(othercomputer))
Esempio n. 27
0
if not is_dbenv_loaded():
    load_dbenv(profile='aiida_test')
from aiida.orm import Code, DataFactory
from aiida.orm import load_node
#from aiida.work.run import run
from fleur_workflows.band import fleur_band_wc

StructureData = DataFactory('structure')
ParameterData = DataFactory('parameter')
KpointsData = DataFactory('array.kpoints')
FleurinpData = DataFactory('fleur.fleurinp')

###############################
# Set your values here
codename2 = 'fleur_iff@local_iff'  #'fleur_iff003_v0_27@iff003'
codename2 = 'fleur_iff003_v0_27@iff003'
###############################

code2 = Code.get_from_string(codename2)

fleurinp = load_node(1684)
fleur_calc = load_node(1693)
remote = fleur_calc.out.remote_folder
wf_para = ParameterData(dict={'queue': 'th123_node'})

#res = band.run(fleurinp=fleurinp, remote=remote, fleur=code2)
res = fleur_band_wc.run(wf_parameters=wf_para,
                        fleurinp=fleurinp,
                        remote=remote,
                        fleur=code2)
Esempio n. 28
0
    def setUpClass(cls, *args, **kwargs):
        super(TestVerdiCalculation, cls).setUpClass(*args, **kwargs)
        from aiida.backends.tests.utils.fixtures import import_archive_fixture
        from aiida.common.exceptions import ModificationNotAllowed
        from aiida.common.links import LinkType
        from aiida.orm import Code, Computer, Group, Node, JobCalculation, CalculationFactory
        from aiida.orm.data.parameter import ParameterData
        from aiida.orm.querybuilder import QueryBuilder
        from aiida.work.processes import ProcessState

        rmq_config = rmq.get_rmq_config()

        # These two need to share a common event loop otherwise the first will never send
        # the message while the daemon is running listening to intercept
        cls.runner = runners.Runner(rmq_config=rmq_config,
                                    rmq_submit=True,
                                    poll_interval=0.)
        cls.daemon_runner = runners.DaemonRunner(rmq_config=rmq_config,
                                                 rmq_submit=True,
                                                 poll_interval=0.)

        cls.computer = Computer(name='comp',
                                hostname='localhost',
                                transport_type='local',
                                scheduler_type='direct',
                                workdir='/tmp/aiida').store()

        cls.code = Code(remote_computer_exec=(cls.computer,
                                              '/bin/true')).store()
        cls.group = Group(name='test_group').store()
        cls.node = Node().store()
        cls.calcs = []

        from aiida.orm.backend import construct_backend
        backend = construct_backend()
        authinfo = backend.authinfos.create(
            computer=cls.computer, user=backend.users.get_automatic_user())
        authinfo.store()

        # Create 13 JobCalculations (one for each CalculationState)
        for calculation_state in calc_states:

            calc = JobCalculation(computer=cls.computer,
                                  resources={
                                      'num_machines': 1,
                                      'num_mpiprocs_per_machine': 1
                                  }).store()

            # Trying to set NEW will raise, but in this case we don't need to change the state
            try:
                calc._set_state(calculation_state)
            except ModificationNotAllowed:
                pass

            try:
                exit_status = JobCalculationExitStatus[calculation_state]
            except KeyError:
                if calculation_state == 'IMPORTED':
                    calc._set_process_state(ProcessState.FINISHED)
                else:
                    calc._set_process_state(ProcessState.RUNNING)
            else:
                calc._set_exit_status(exit_status)
                calc._set_process_state(ProcessState.FINISHED)

            cls.calcs.append(calc)

            if calculation_state == 'PARSING':

                cls.KEY_ONE = 'key_one'
                cls.KEY_TWO = 'key_two'
                cls.VAL_ONE = 'val_one'
                cls.VAL_TWO = 'val_two'

                output_parameters = ParameterData(dict={
                    cls.KEY_ONE: cls.VAL_ONE,
                    cls.KEY_TWO: cls.VAL_TWO,
                }).store()

                output_parameters.add_link_from(calc,
                                                'output_parameters',
                                                link_type=LinkType.RETURN)

                # Create shortcut for easy dereferencing
                cls.result_job = calc

                # Add a single calc to a group
                cls.group.add_nodes([calc])

        # Load the fixture containing a single ArithmeticAddCalculation node
        import_archive_fixture(
            'calculation/simpleplugins.arithmetic.add.aiida')

        # Get the imported ArithmeticAddCalculation node
        ArithmeticAddCalculation = CalculationFactory(
            'simpleplugins.arithmetic.add')
        calculations = QueryBuilder().append(ArithmeticAddCalculation).all()[0]
        cls.arithmetic_job = calculations[0]
Esempio n. 29
0
def test_bands_wc(fresh_aiida_env, potentials, mock_vasp):
    """Test with mocked vasp code."""
    from aiida.orm import Code, Log, RemoteData
    from aiida.plugins import WorkflowFactory
    from aiida.engine import run

    workchain = WorkflowFactory('vasp.bands')

    mock_vasp.store()
    create_authinfo(computer=mock_vasp.computer, store=True)

    structure = PoscarParser(file_path=data_path('test_bands_wc', 'inp', 'POSCAR')).structure
    parameters = IncarParser(file_path=data_path('test_bands_wc', 'inp', 'INCAR')).incar
    parameters['system'] = 'test-case:test_bands_wc'
    # Make sure we replace encut with pwcutoff
    del parameters['encut']
    parameters = {'vasp': parameters}
    parameters['electronic'] = {'pwcutoff': 200}

    inputs = AttributeDict()
    inputs.code = Code.get_from_string('mock-vasp@localhost')
    inputs.structure = structure
    inputs.parameters = get_data_node('dict', dict=parameters)
    inputs.potential_family = get_data_node('str', POTCAR_FAMILY_NAME)
    inputs.potential_mapping = get_data_node('dict', dict=POTCAR_MAP)
    inputs.options = get_data_node('dict',
                                   dict={
                                       'withmpi': False,
                                       'queue_name': 'None',
                                       'resources': {
                                           'num_machines': 1,
                                           'num_mpiprocs_per_machine': 1
                                       },
                                       'max_wallclock_seconds': 3600
                                   })
    inputs.max_iterations = get_data_node('int', 1)
    inputs.clean_workdir = get_data_node('bool', False)
    inputs.verbose = get_data_node('bool', True)
    # Also set the restart folder as we assume a bands data will start from
    # a previous calculation that is sitting in the restart folder
    inputs.restart_folder = RemoteData(computer=inputs.code.computer, remote_path=data_path('test_bands_wc', 'inp'))
    results, node = run.get_node(workchain, **inputs)
    assert node.exit_status == 0
    assert 'bands' in results
    kpoints = results['bands'].get_kpoints()
    test_array = np.array([[0., 0., 0.], [0.02272727, 0., 0.02272727], [0.04545454, 0., 0.04545454], [0.06818182, 0., 0.06818182],
                           [0.09090909, 0., 0.09090909], [0.11363636, 0., 0.11363636], [0.13636364, 0., 0.13636364],
                           [0.15909091, 0., 0.15909091], [0.18181818, 0., 0.18181818], [0.20454545, 0., 0.20454545],
                           [0.22727273, 0., 0.22727273], [0.25, 0., 0.25], [0.27272727, 0., 0.27272727], [0.29545455, 0., 0.29545455],
                           [0.31818182, 0., 0.31818182], [0.34090909, 0., 0.34090909], [0.36363636, 0., 0.36363636],
                           [0.38636364, 0., 0.38636364], [0.40909091, 0., 0.40909091], [0.43181818, 0., 0.43181818],
                           [0.45454545, 0., 0.45454545], [0.47727273, 0., 0.47727273], [0.5, 0., 0.5], [0.51785714, 0.03571429, 0.51785714],
                           [0.53571429, 0.07142857, 0.53571429], [0.55357143, 0.10714286, 0.55357143], [0.57142857, 0.14285714, 0.57142857],
                           [0.58928571, 0.17857143, 0.58928571], [0.60714286, 0.21428571, 0.60714286], [0.625, 0.25, 0.625],
                           [0.375, 0.375, 0.75], [0.35869565, 0.35869565, 0.7173913], [0.3423913, 0.3423913, 0.68478261],
                           [0.32608696, 0.32608696, 0.65217391], [0.30978261, 0.30978261, 0.61956522], [0.29347826, 0.29347826, 0.58695652],
                           [0.27717391, 0.27717391, 0.55434783], [0.26086957, 0.26086957, 0.52173913], [0.24456522, 0.24456522, 0.48913043],
                           [0.22826087, 0.22826087, 0.45652174], [0.21195652, 0.21195652, 0.42391304], [0.19565217, 0.19565217, 0.39130435],
                           [0.17934783, 0.17934783, 0.35869565], [0.16304348, 0.16304348, 0.32608696], [0.14673913, 0.14673913, 0.29347826],
                           [0.13043478, 0.13043478, 0.26086957], [0.11413044, 0.11413044, 0.22826087], [0.09782609, 0.09782609, 0.19565217],
                           [0.08152174, 0.08152174, 0.16304348], [0.06521739, 0.06521739, 0.13043478], [0.04891304, 0.04891304, 0.09782609],
                           [0.0326087, 0.0326087, 0.06521739], [0.01630435, 0.01630435, 0.0326087], [0., 0., 0.],
                           [0.02631579, 0.02631579, 0.02631579], [0.05263158, 0.05263158, 0.05263158], [0.07894737, 0.07894737, 0.07894737],
                           [0.10526316, 0.10526316, 0.10526316], [0.13157895, 0.13157895, 0.13157895], [0.15789474, 0.15789474, 0.15789474],
                           [0.18421053, 0.18421053, 0.18421053], [0.21052632, 0.21052632, 0.21052632], [0.2368421, 0.2368421, 0.2368421],
                           [0.26315789, 0.26315789, 0.26315789], [0.28947368, 0.28947368, 0.28947368], [0.31578947, 0.31578947, 0.31578947],
                           [0.34210526, 0.34210526, 0.34210526], [0.36842105, 0.36842105, 0.36842105], [0.39473684, 0.39473684, 0.39473684],
                           [0.42105263, 0.42105263, 0.42105263], [0.44736842, 0.44736842, 0.44736842], [0.47368421, 0.47368421, 0.47368421],
                           [0.5, 0.5, 0.5], [0.5, 0.48333333, 0.51666667], [0.5, 0.46666667, 0.53333333], [0.5, 0.45, 0.55],
                           [0.5, 0.43333333, 0.56666667], [0.5, 0.41666667, 0.58333333], [0.5, 0.4, 0.6], [0.5, 0.38333333, 0.61666667],
                           [0.5, 0.36666667, 0.63333333], [0.5, 0.35, 0.65], [0.5, 0.33333333, 0.66666667], [0.5, 0.31666667, 0.68333333],
                           [0.5, 0.3, 0.7], [0.5, 0.28333333, 0.71666667], [0.5, 0.26666667, 0.73333333], [0.5, 0.25, 0.75],
                           [0.5, 0.225, 0.725], [0.5, 0.2, 0.7], [0.5, 0.175, 0.675], [0.5, 0.15, 0.65], [0.5, 0.125, 0.625],
                           [0.5, 0.1, 0.6], [0.5, 0.075, 0.575], [0.5, 0.05, 0.55], [0.5, 0.025, 0.525], [0.5, 0., 0.5]])
    np.testing.assert_allclose(kpoints, test_array)
    bands = results['bands'].get_bands()
    assert bands.shape == (1, 98, 20)
    np.testing.assert_allclose(bands[0, 0, 0:3], np.array([-6.0753, 6.0254, 6.0254]))
    np.testing.assert_allclose(bands[0, 2, 0:3], np.array([-6.0386, 5.7955, 5.8737]))
    np.testing.assert_allclose(bands[0, 97, 0:3], np.array([-1.867, -1.867, 3.1102]))
lammps_machine = {
    'num_machines': 1,
    'parallel_env': 'mpi*',
    'tot_num_mpiprocs': 16
}

parameters_md = {
    'timestep': 0.001,
    'temperature': 300,
    'thermostat_variable': 0.5,
    'equilibrium_steps': 100,
    'total_steps': 2000,
    'dump_rate': 1
}

code = Code.get_from_string(codename)

calc = code.new_calc(max_wallclock_seconds=3600, resources=lammps_machine)

calc.label = "test lammps calculation"
calc.description = "A much longer description"
calc.use_code(code)
calc.use_structure(structure)
calc.use_potential(ParameterData(dict=potential))

calc.use_parameters(ParameterData(dict=parameters_md))

test_only = False

if test_only:  # It will not be submitted
    import os
Esempio n. 31
0
def test_converge_wc_pw(fresh_aiida_env, vasp_params, potentials, mock_vasp):
    """Test convergence workflow using mock code."""
    from aiida.orm import Code
    from aiida.plugins import WorkflowFactory
    from aiida.engine import run

    workchain = WorkflowFactory('vasp.converge')

    mock_vasp.store()
    create_authinfo(computer=mock_vasp.computer).store()

    structure = PoscarParser(file_path=data_path('test_converge_wc/pw/200',
                                                 'inp', 'POSCAR')).structure
    parameters = IncarParser(
        file_path=data_path('test_converge_wc/pw/200', 'inp', 'INCAR')).incar
    parameters['system'] = 'test-case:test_converge_wc'
    parameters = {
        k: v
        for k, v in parameters.items()
        if k not in ['isif', 'ibrion', 'encut', 'nsw']
    }
    kpoints = KpointsParser(file_path=data_path('test_converge_wc/pw/200',
                                                'inp', 'KPOINTS')).kpoints

    restart_clean_workdir = get_data_node('bool', False)
    restart_clean_workdir.store()

    inputs = AttributeDict()
    inputs.code = Code.get_from_string('mock-vasp@localhost')
    inputs.structure = structure
    inputs.kpoints = kpoints
    inputs.parameters = get_data_node('dict', dict={'incar': parameters})
    inputs.potential_family = get_data_node('str', POTCAR_FAMILY_NAME)
    inputs.potential_mapping = get_data_node('dict', dict=POTCAR_MAP)
    inputs.options = get_data_node('dict',
                                   dict={
                                       'withmpi': False,
                                       'queue_name': 'None',
                                       'resources': {
                                           'num_machines': 1,
                                           'num_mpiprocs_per_machine': 1
                                       },
                                       'max_wallclock_seconds': 3600
                                   })
    inputs.max_iterations = get_data_node('int', 1)
    inputs.clean_workdir = get_data_node('bool', False)
    relax = AttributeDict()
    converge = AttributeDict()
    relax.perform = get_data_node('bool', False)
    converge.relax = get_data_node('bool', False)
    converge.testing = get_data_node('bool', True)
    converge.compress = get_data_node('bool', False)
    converge.displace = get_data_node('bool', False)
    converge.pwcutoff_samples = get_data_node('int', 3)
    converge.k_samples = get_data_node('int', 3)
    inputs.relax = relax
    inputs.converge = converge
    inputs.verbose = get_data_node('bool', True)
    results, node = run.get_node(workchain, **inputs)
    assert node.exit_status == 0
    assert 'converge' in results
    converge = results['converge']
    assert 'data' in converge
    conv_data = converge['data']
    try:
        conv_data = conv_data.get_array('pw_regular')
    except KeyError:
        pytest.fail('Did not find pw_regular in converge.data')
    conv_data_test = np.array([[200.0, -10.77974998, 0.0, 0.0, 0.5984],
                               [250.0, -10.80762044, 0.0, 0.0, 0.5912],
                               [300.0, -10.82261992, 0.0, 0.0, 0.5876]])
    np.testing.assert_allclose(conv_data, conv_data_test)

    assert 'pwcutoff_recommended' in converge
    try:
        _encut = converge['pwcutoff_recommended'].value
        np.testing.assert_equal(_encut, 300)
    except AttributeError:
        pytest.fail('pwcutoff_recommended does not have the expected format')
Esempio n. 32
0
def test_converge_wc(fresh_aiida_env, potentials, mock_vasp):
    """Test submitting only, not correctness, with mocked vasp code."""
    from aiida.orm import Code
    from aiida.plugins import WorkflowFactory
    from aiida.engine import run

    workchain = WorkflowFactory('vasp.converge')

    mock_vasp.store()
    create_authinfo(computer=mock_vasp.computer, store=True)

    structure = PoscarParser(
        file_path=data_path('test_converge_wc', 'inp', 'POSCAR')).structure
    parameters = IncarParser(
        file_path=data_path('test_converge_wc', 'inp', 'INCAR')).incar
    parameters['system'] = 'test-case:test_converge_wc'
    parameters = {
        k: v
        for k, v in parameters.items()
        if k not in ['isif', 'ibrion', 'encut', 'nsw']
    }

    restart_clean_workdir = get_data_node('bool', False)
    restart_clean_workdir.store()

    inputs = AttributeDict()
    inputs.code = Code.get_from_string('mock-vasp@localhost')
    inputs.structure = structure
    inputs.parameters = get_data_node('dict', dict={'incar': parameters})
    inputs.potential_family = get_data_node('str', POTCAR_FAMILY_NAME)
    inputs.potential_mapping = get_data_node('dict', dict=POTCAR_MAP)
    inputs.options = get_data_node('dict',
                                   dict={
                                       'withmpi': False,
                                       'queue_name': 'None',
                                       'resources': {
                                           'num_machines': 1,
                                           'num_mpiprocs_per_machine': 1
                                       },
                                       'max_wallclock_seconds': 3600
                                   })
    inputs.max_iterations = get_data_node('int', 1)
    inputs.clean_workdir = get_data_node('bool', False)
    relax = AttributeDict()
    converge = AttributeDict()
    converge.relax = get_data_node('bool', False)
    converge.compress = get_data_node('bool', False)
    converge.displace = get_data_node('bool', False)
    converge.pwcutoff_samples = get_data_node('int', 3)
    converge.k_samples = get_data_node('int', 3)
    relax.perform = get_data_node('bool', True)
    inputs.relax = relax
    inputs.converge = converge
    inputs.verbose = get_data_node('bool', True)
    results, node = run.get_node(workchain, **inputs)
    assert node.exit_status == 0
    converge = results['converge']
    assert 'data' in converge

    conv_data = converge['data']
    try:
        conv_data.get_array('pw_regular')
    except KeyError:
        pytest.fail('Did not find pw_regular in converge.data')
    try:
        conv_data.get_array('kpoints_regular')
    except KeyError:
        pytest.fail('Did not find kpoints_regular in converge.data')

    assert 'pwcutoff_recommended' in converge
    try:
        _encut = converge['pwcutoff_recommended'].value
    except AttributeError:
        pytest.fail('pwcutoff_recommended does not have the expected format')
    assert 'kpoints_recommended' in converge
    try:
        _kpoints = converge['kpoints_recommended'].get_kpoints_mesh()
    except AttributeError:
        pytest.fail('kpoints_recommended does not have the expected format')
#!/usr/bin/env python
#from aiida.backends.utils import load_dbenv
#load_dbenv()
codename = 'QE5.4@daint-gpu'
from aiida.orm import Code
code = Code.get_from_string(codename)
from aiida.orm import DataFactory
StructureData = DataFactory('structure')
alat = 5.4 # angstrom
cell = [[alat, 0.10, 0.20,],
        [0.30, alat, 0.40,],
        [0.50, 0.60, alat,],
       ]
s = StructureData(cell=cell)
s.append_atom(position=(0.,0.,0.),symbols='Si')
s.append_atom(position=(alat/2.,alat/2.,0.),symbols='Si')
s.append_atom(position=(alat/2.,0.,alat/2.),symbols='Si')
s.append_atom(position=(0.,alat/2.,alat/2.),symbols='Si')
s.append_atom(position=(alat/4.,alat/4.,alat/4.),symbols='Si')
s.append_atom(position=(3.*alat/4.,3.*alat/4.,alat/4.),symbols='Si')
s.append_atom(position=(3.*alat/4.,alat/4.,3.*alat/4.),symbols='Si')
s.append_atom(position=(alat/4.,3.*alat/4.,3.*alat/4.),symbols='Si')
ParameterData = DataFactory('parameter')
parameters = ParameterData(dict={
          'CONTROL': {
              'calculation': 'relax',
              'restart_mode': 'from_scratch',
              'wf_collect': True,
              'tprnfor': True,
	      'tstress': True,
              },