コード例 #1
0
def launch(code, structure, pseudo_family, daemon, protocol):
    """
    Run the PwBandStructureWorkChain for a given input structure 
    to compute the band structure for the relaxed structure
    """
    from aiida.orm.data.base import Str
    from aiida.orm.utils import WorkflowFactory
    from aiida.work.launch import run, submit

    PwBandStructureWorkChain = WorkflowFactory(
        'quantumespresso.pw.band_structure')

    inputs = {
        'code': code,
        'structure': structure,
        'pseudo_family': Str(pseudo_family),
        'protocol': Str(protocol),
    }

    if daemon:
        workchain = submit(PwBandStructureWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            PwBandStructureWorkChain.__name__, workchain.pk))
    else:
        run(PwBandStructureWorkChain, **inputs)
コード例 #2
0
def launch(code, calculation, kpoints, max_num_machines, max_wallclock_seconds,
           daemon, clean_workdir):
    """
    Run the PhBaseWorkChain for a previously completed PwCalculation
    """
    from aiida.orm.data.base import Bool
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.utils import WorkflowFactory
    from aiida.work.launch import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options

    PhBaseWorkChain = WorkflowFactory('quantumespresso.ph.base')

    parameters = {'INPUTPH': {}}

    options = get_default_options(max_num_machines, max_wallclock_seconds)

    inputs = {
        'code': code,
        'qpoints': kpoints,
        'parent_folder': calculation.out.remote_folder,
        'parameters': ParameterData(dict=parameters),
        'options': ParameterData(dict=options),
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    if daemon:
        workchain = submit(PhBaseWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            PhBaseWorkChain.__name__, workchain.pk))
    else:
        run(PhBaseWorkChain, **inputs)
コード例 #3
0
def eos(structure, codename, pseudo_family):
    Proc = PwCalculation.process()
    results = {}
    for s in (0.98, 0.99, 1.0, 1.02, 1.04):
        rescaled = rescale(structure, Float(s))
        inputs = generate_scf_input_params(rescaled, codename, pseudo_family)
        outputs = run(Proc, **inputs)
        res = outputs['output_parameters'].dict
        results[str(s)] = res

    return results
コード例 #4
0
def launch(code, calculation, kpoints, max_num_machines, max_wallclock_seconds,
           daemon, parallelize_atoms):
    """
    Run the HpWorkChain for a completed Hubbard PwCalculation
    """
    from aiida.orm.data.base import Bool
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.utils import WorkflowFactory
    from aiida.work.launch import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options

    HpWorkChain = WorkflowFactory('quantumespresso.hp.main')

    parameters = {'INPUTHP': {}}

    inputs = {
        'code':
        code,
        'parent_calculation':
        calculation,
        'qpoints':
        kpoints,
        'parameters':
        ParameterData(dict=parameters),
        'options':
        ParameterData(
            dict=get_default_options(max_num_machines, max_wallclock_seconds)),
        'clean_workdir':
        Bool(clean_workdir),
        'parallelize_atoms':
        Bool(parallelize_atoms),
    }

    if daemon:
        workchain = submit(HpWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            HpWorkChain.__name__, workchain.pk))
    else:
        run(HpWorkChain, **inputs)
コード例 #5
0
def launch(code_pw, code_hp, structure, pseudo_family, kpoints, qpoints,
           ecutwfc, ecutrho, hubbard_u, starting_magnetization,
           automatic_parallelization, clean_workdir, max_num_machines,
           max_wallclock_seconds, daemon, meta_convergence, is_insulator,
           parallelize_atoms):
    """
    Run the SelfConsistentHubbardWorkChain for a given input structure
    """
    from aiida.orm.data.base import Bool, Str
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.utils import WorkflowFactory
    from aiida.work.launch import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options

    SelfConsistentHubbardWorkChain = WorkflowFactory(
        'quantumespresso.hp.hubbard')

    parameters = {
        'SYSTEM': {
            'ecutwfc': ecutwfc,
            'ecutrho': ecutrho,
            'lda_plus_u': True,
        },
    }

    parameters_hp = {'INPUTHP': {}}

    options = get_default_options(max_num_machines, max_wallclock_seconds)

    structure_kinds = structure.get_kind_names()
    hubbard_u_kinds = [kind for kind, value in hubbard_u]
    hubbard_u = {kind: value for kind, value in hubbard_u}

    if not set(hubbard_u_kinds).issubset(structure_kinds):
        raise click.BadParameter(
            'the kinds in the specified starting Hubbard U values {} is not a strict subset of the kinds in the structure {}'
            .format(hubbard_u_kinds, structure_kinds),
            param_hint='hubbard_u')

    if starting_magnetization:

        parameters['SYSTEM']['nspin'] = 2

        for kind, magnetization in starting_magnetization:

            if kind not in structure_kinds:
                raise click.BadParameter(
                    'the provided structure does not contain the kind {}'.
                    format(kind),
                    param_hint='starting_magnetization')

            parameters['SYSTEM'].setdefault('starting_magnetization',
                                            {})[kind] = magnetization

    inputs = {
        'structure': structure,
        'hubbard_u': ParameterData(dict=hubbard_u),
        'meta_convergence': Bool(meta_convergence),
        'is_insulator': Bool(is_insulator),
        'scf': {
            'code': code_pw,
            'pseudo_family': Str(pseudo_family),
            'kpoints': kpoints,
            'parameters': ParameterData(dict=parameters),
            'options': ParameterData(dict=options)
        },
        'hp': {
            'code': code_hp,
            'qpoints': qpoints,
            'parameters': ParameterData(dict=parameters_hp),
            'options': ParameterData(dict=options),
            'parallelize_atoms': Bool(parallelize_atoms),
        }
    }

    if daemon:
        workchain = submit(SelfConsistentHubbardWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            SelfConsistentHubbardWorkChain.__name__, workchain.pk))
    else:
        run(SelfConsistentHubbardWorkChain, **inputs)
コード例 #6
0
def launch(code, structure, pseudo_family, kpoints, max_num_machines,
           max_wallclock_seconds, daemon, ecutwfc, ecutrho, hubbard_u,
           hubbard_v, hubbard_file_pk, starting_magnetization, smearing,
           automatic_parallelization, clean_workdir):
    """
    Run the PwBaseWorkChain for a given input structure
    """
    from aiida.orm.data.base import Bool, Str
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.utils import WorkflowFactory
    from aiida.work.launch import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options, get_automatic_parallelization_options

    PwBaseWorkChain = WorkflowFactory('quantumespresso.pw.base')

    parameters = {
        'SYSTEM': {
            'ecutwfc': ecutwfc,
            'ecutrho': ecutrho,
        },
    }

    try:
        hubbard_file = validate.validate_hubbard_parameters(
            structure, parameters, hubbard_u, hubbard_v, hubbard_file_pk)
    except ValueError as exception:
        raise click.BadParameter(exception.message)

    try:
        validate.validate_starting_magnetization(structure, parameters,
                                                 starting_magnetization)
    except ValueError as exception:
        raise click.BadParameter(exception.message)

    try:
        validate.validate_smearing(parameters, smearing)
    except ValueError as exception:
        raise click.BadParameter(exception.message)

    inputs = {
        'code': code,
        'structure': structure,
        'pseudo_family': Str(pseudo_family),
        'kpoints': kpoints,
        'parameters': ParameterData(dict=parameters),
    }

    if automatic_parallelization:
        automatic_parallelization = get_automatic_parallelization_options(
            max_num_machines, max_wallclock_seconds)
        inputs['automatic_parallelization'] = ParameterData(
            dict=automatic_parallelization)
    else:
        options = get_default_options(max_num_machines, max_wallclock_seconds)
        inputs['options'] = ParameterData(dict=options)

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    if daemon:
        workchain = submit(PwBaseWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            PwBaseWorkChain.__name__, workchain.pk))
    else:
        run(PwBaseWorkChain, **inputs)
コード例 #7
0
#!/usr/bin/env runaiida

from __future__ import print_function

from aiida.work.launch import run

from serialize_workchain import SerializeWorkChain

if __name__ == '__main__':
    print(run(
        SerializeWorkChain,
        a=1, b=1.2, c=True
    ))
    # Result: {'a': 1, 'b': 1.2, 'c': True}
コード例 #8
0
from aiida.orm.data.int import Int
from aiida.work.launch import run
from aiida.work.workchain import WorkChain


class AddAndMultiplyWorkChain(WorkChain):
    ...


result = run(AddAndMultiplyWorkChain, a=Int(1), b=Int(2), c=Int(3))
コード例 #9
0
ファイル: run.py プロジェクト: chrisjsewell/aiida_core
 def test_run(self):
     inputs = {'a': Int(2), 'b': Str('test')}
     result = run(DummyProcess, **inputs)