Esempio n. 1
0
def generate_inputs(process_class: engine.Process,
                    protocol: Dict,
                    code: orm.Code,
                    structure: StructureData,
                    override: Dict[str, Any] = None) -> Dict[str, Any]:
    """Generate the input parameters for the given workchain type for a given code and structure.

    The override argument can be used to pass a dictionary with values for specific inputs that should override the
    defaults. This dictionary should have the same nested structure as the final input dictionary would have for the
    workchain submission.

    :param process_class: process class, either calculation or workchain,
        i.e. ``AbinitCalculation`` or ``AbinitBaseWorkChain``
    :param protocol: the protocol based on which to choose input parameters
    :param code: the code or code name to use
    :param magnetism: the type of magnetisation to be used
    :param initial_mag: value for the initial magnetisation along the z axis.
    :param soc: whether or not to use spin-orbit coupling
    :param structure: the structure
    :param override: a dictionary to override specific inputs
    :return: input dictionary
    """
    # pylint: disable=too-many-arguments,unused-argument
    from aiida.common.lang import type_check

    AbinitCalculation = plugins.CalculationFactory('abinit')  # pylint: disable=invalid-name
    AbinitBaseWorkChain = plugins.WorkflowFactory('abinit.base')  # pylint: disable=invalid-name

    type_check(structure, orm.StructureData)

    if not isinstance(code, orm.Code):
        try:
            code = orm.load_code(code)
        except (exceptions.MultipleObjectsError,
                exceptions.NotExistent) as exception:
            raise ValueError('could not load the code {}: {}'.format(
                code, exception))

    if process_class == AbinitCalculation:
        protocol = protocol['abinit']
        dictionary = generate_inputs_calculation(protocol, code, structure,
                                                 override)
    elif process_class == AbinitBaseWorkChain:
        protocol = protocol['base']
        dictionary = generate_inputs_base(protocol, code, structure, override)
    else:
        raise NotImplementedError(
            'process class {} is not supported'.format(process_class))

    return dictionary
 def __init__(self,
              pw_code_id: ty.Union[str, int],
              structure_group_id: ty.Union[str, int],
              pseudo_family_id: ty.Union[str, int],
              *args,
              structure_filters: ty.Optional[ty.Dict[str, ty.Any]] = None,
              **kwargs):
     """A SubmissionController for PwBaseWorkChains."""
     super().__init__(*args, **kwargs)
     self._code = orm.load_code(identifier=pw_code_id)
     self._process_class = plugins.WorkflowFactory(
         self.WORKFLOW_ENTRY_POINT)
     self._structure_group = orm.load_group(identifier=structure_group_id)
     self._structure_filters = structure_filters if structure_filters is not None else {}
     self._pseudo_family = orm.load_group(identifier=pseudo_family_id)
def generate_inputs(process_class: engine.Process,
                    protocol: Dict,
                    code: orm.Code,
                    structure: orm.StructureData,
                    override: Dict[str, Any] = None) -> Dict[str, Any]:
    """Generate the input parameters for the given workchain type for a given code, structure and pseudo family.

    The override argument can be used to pass a dictionary with values for specific inputs that should override the
    defaults. This dictionary should have the same nested structure as the final input dictionary would have for the
    workchain submission. For example if one wanted to generate the inputs for a PwBandsWorkChain and override the
    ecutwfc parameter for the PwBaseWorkChain of the PwRelaxWorkChains, one would have to pass:

        override = {'relax': {'base': {'ecutwfc': 400}}}

    :param process_class: process class, either calculation or workchain, i.e. ``PwCalculation`` or ``PwBaseWorkChain``
    :param protocol: the protocol based on which to choose input parameters
    :param code: the code or code name to use
    :param structure: the structure
    :param override: a dictionary to override specific inputs
    :return: input dictionary
    """
    # pylint: disable=too-many-arguments,unused-argument
    from aiida.common.lang import type_check

    family_name = protocol['relax']['base']['pseudos_family']
    if isinstance(family_name, orm.Str):
        family_name = family_name.value
    try:
        otfg_family = OTFGGroup.objects.get(label=family_name)
    except exceptions.NotExistent:
        raise ValueError(
            'protocol `{}` requires the `{}` `pseudos family` but could not be found.'
            .format(protocol['name'],
                    protocol['relax']['base']['pseudos_family']))

    CastepCalculation = plugins.CalculationFactory('castep.castep')  # pylint: disable=invalid-name
    CastepBaseWorkChain = plugins.WorkflowFactory('castep.base')  # pylint: disable=invalid-name
    CastepRelaxWorkChain = plugins.WorkflowFactory('castep.relax')  # pylint: disable=invalid-name

    type_check(structure, orm.StructureData)

    if not isinstance(code, orm.Code):
        try:
            code = orm.load_code(code)
        except (exceptions.MultipleObjectsError,
                exceptions.NotExistent) as exception:
            raise ValueError('could not load the code {}: {}'.format(
                code, exception))

    if process_class == CastepCalculation:
        protocol = protocol['relax']['base']['calc']
        dictionary = generate_inputs_calculation(protocol, code, structure,
                                                 otfg_family, override)
    elif process_class == CastepBaseWorkChain:
        protocol = protocol['relax']['base']
        dictionary = generate_inputs_base(protocol, code, structure,
                                          otfg_family, override)
    elif process_class == CastepRelaxWorkChain:
        protocol = protocol['relax']
        dictionary = generate_inputs_relax(protocol, code, structure,
                                           otfg_family, override)
    else:
        raise NotImplementedError(
            'process class {} is not supported'.format(process_class))

    return dictionary
# -*- coding: utf-8 -*-
"""Tests for the :mod:`aiida_common_workflows.workflows.relax.orca` module."""
# pylint: disable=redefined-outer-name
import pytest

from aiida import engine
from aiida import plugins

WORKCHAIN = plugins.WorkflowFactory('common_workflows.relax.orca')
GENERATOR = WORKCHAIN.get_input_generator()


@pytest.fixture
def default_builder_inputs(generate_code, generate_structure):
    """Return a dictionary with minimum required inputs for the ``get_builder`` method of the inputs generator."""
    return {
        'structure': generate_structure(symbols=('Si', )),
        'engines': {
            'relax': {
                'code': generate_code('orca_main').store().uuid,
                'options': {
                    'resources': {
                        'num_machines': 1,
                        'tot_num_mpiprocs': 1
                    }
                }
            }
        },
    }

Esempio n. 5
0
import sys
if len(sys.argv) > 1 and sys.argv[1] == "--pre":
    pass
else:
    from aiida import orm, plugins
    from aiida.engine import run

    MultiplyAddWorkChain = plugins.WorkflowFactory('arithmetic.multiply_add')

    builder = MultiplyAddWorkChain.get_builder()
    builder.code = orm.load_code(label='add-local')
    builder.x = orm.Int(2)
    builder.y = orm.Int(3)
    builder.z = orm.Int(5)

    for i in range(20):
        run(builder)
Esempio n. 6
0
    return cleaned


def clean_workchain_calcs(workchain):
    """Clean all remote directories of a workchain's descendant calculations."""
    cleaned_calcs = []

    for called_descendant in workchain.called_descendants:
        if isinstance(called_descendant, orm.CalcJobNode):
            if clean_calcjob_remote(called_descendant):
                cleaned_calcs.append(called_descendant.pk)

    return cleaned_calcs


PwBaseWorkChain = plugins.WorkflowFactory('quantumespresso.pw.base')
DosCalculation = plugins.CalculationFactory('quantumespresso.dos')
ProjwfcCalculation = plugins.CalculationFactory('quantumespresso.projwfc')


class PdosWorkChain(ProtocolMixin, WorkChain):
    """A WorkChain to compute Total & Partial Density of States of a structure, using Quantum Espresso."""
    @classmethod
    def define(cls, spec):
        # yapf: disable
        """Define the process specification."""
        super().define(spec)
        spec.input('structure', valid_type=orm.StructureData, help='The input structure.')
        spec.input(
            'serial_clean',
            valid_type=orm.Bool,
Esempio n. 7
0
# -*- coding: utf-8 -*-
"""Tests for the :mod:`aiida_common_workflows.workflows.relax.gaussian` module."""
# pylint: disable=redefined-outer-name
import pytest

from aiida import engine
from aiida import plugins

WORKCHAIN = plugins.WorkflowFactory('common_workflows.relax.gaussian')
GENERATOR = WORKCHAIN.get_input_generator()


@pytest.fixture
def default_builder_inputs(generate_code, generate_structure):
    """Return a dictionary with minimum required inputs for the ``get_builder`` method of the inputs generator."""
    return {
        'structure': generate_structure(symbols=('Si', )),
        'engines': {
            'relax': {
                'code': generate_code('gaussian').store().uuid,
                'options': {
                    'resources': {
                        'num_machines': 1,
                        'tot_num_mpiprocs': 1
                    }
                }
            }
        },
    }

# -*- coding: utf-8 -*-
"""Tests for the :mod:`aiida_common_workflows.workflows.relax.quantum_espresso` module."""
# pylint: disable=redefined-outer-name
import pytest

from aiida import engine
from aiida import plugins
from qe_tools import CONSTANTS

from aiida_common_workflows.workflows.relax.generator import ElectronicType, RelaxType, SpinType

WORKCHAIN = plugins.WorkflowFactory('common_workflows.relax.quantum_espresso')
GENERATOR = WORKCHAIN.get_input_generator()


@pytest.fixture
def default_builder_inputs(generate_code, generate_structure):
    """Return a dictionary with minimum required inputs for the ``get_builder`` method of the inputs generator."""
    return {
        'structure': generate_structure(symbols=('Si', )),
        'engines': {
            'relax': {
                'code': generate_code('quantumespresso.pw').store().uuid,
                'options': {
                    'resources': {
                        'num_machines': 1,
                        'tot_num_mpiprocs': 1
                    }
                }
            }
        },
Esempio n. 9
0
import pytest

from aiida import engine
from aiida import plugins
from aiida.orm import StructureData
from aiida.plugins import WorkflowFactory
from aiida_castep.data.otfg import OTFGGroup
from ase.build.bulk import bulk

from aiida_common_workflows.workflows.relax.castep.workchain import CastepCommonRelaxWorkChain
from aiida_common_workflows.workflows.relax.castep.generator import (
    CastepCommonRelaxInputGenerator, generate_inputs, generate_inputs_base, generate_inputs_calculation,
    generate_inputs_relax, ensure_otfg_family, RelaxType, ElectronicType, SpinType
)

WORKCHAIN = plugins.WorkflowFactory('common_workflows.relax.castep')
GENERATOR = WORKCHAIN.get_input_generator()


@pytest.fixture
def nacl(with_database):  # pylint: disable=invalid-name
    """Get an NaCl structure"""
    structure = StructureData(ase=bulk('NaCl', 'rocksalt', 4.2))
    return structure


@pytest.fixture
def si(with_database):  # pylint: disable=invalid-name
    """Get an NaCl structure"""
    structure = StructureData(ase=bulk('Si', 'diamond', 5.43))
    return structure
Esempio n. 10
0
def test_default(
    generate_workchain_pdos,
    generate_workchain_pw,
    fixture_localhost,
    generate_remote_data,
    generate_calc_job,
    generate_calc_job_node,
    fixture_sandbox,
    generate_bands_data,
):
    """Test instantiating the WorkChain, then mock its process, by calling methods in the ``spec.outline``."""

    wkchain = generate_workchain_pdos()
    assert wkchain.setup() is None
    assert wkchain.serial_clean() is False

    # run scf
    scf_inputs = wkchain.run_scf()

    scf_wkchain = generate_workchain_pw(inputs=scf_inputs)
    scf_wkchain.node.set_process_state(ProcessState.FINISHED)
    scf_wkchain.node.set_exit_status(0)
    pw_input_helper(scf_wkchain.inputs.pw.parameters.get_dict(), scf_wkchain.inputs.pw.structure)

    remote = generate_remote_data(computer=fixture_localhost, remote_path='/path/on/remote')
    remote.store()
    remote.add_incoming(scf_wkchain.node, link_type=LinkType.RETURN, link_label='remote_folder')

    wkchain.ctx.workchain_scf = scf_wkchain.node
    wkchain.ctx.scf_parent_folder = remote

    assert wkchain.inspect_scf() is None

    # run nscf
    nscf_inputs = wkchain.run_nscf()

    # mock nscf outputs
    # TODO ensure this test fails if the output link from PwCalculation changes from `output_parameters` # pylint: disable=fixme
    mock_workchain = instantiate_process_cls(plugins.WorkflowFactory('quantumespresso.pw.base'), nscf_inputs)
    pw_input_helper(mock_workchain.inputs.pw.parameters.get_dict(), mock_workchain.inputs.pw.structure)

    mock_wknode = mock_workchain.node
    mock_wknode.set_exit_status(0)
    mock_wknode.set_process_state(engine.ProcessState.FINISHED)
    mock_wknode.store()

    remote = generate_remote_data(computer=fixture_localhost, remote_path='/path/on/remote')
    remote.store()
    remote.add_incoming(mock_wknode, link_type=LinkType.RETURN, link_label='remote_folder')

    result = orm.Dict(dict={'fermi_energy': 6.9029595890428})
    result.store()
    result.add_incoming(mock_wknode, link_type=LinkType.RETURN, link_label='output_parameters')

    bands_data = generate_bands_data()
    bands_data.store()
    bands_data.add_incoming(mock_wknode, link_type=LinkType.RETURN, link_label='output_band')

    wkchain.ctx.workchain_nscf = mock_wknode

    assert wkchain.inspect_nscf() is None

    # mock run dos and projwfc, and check that their inputs are acceptable
    dos_inputs, projwfc_inputs = wkchain.run_pdos_parallel()
    generate_calc_job(fixture_sandbox, 'quantumespresso.dos', dos_inputs)
    generate_calc_job(fixture_sandbox, 'quantumespresso.projwfc', projwfc_inputs)

    # mock dos & projwfc outputs
    for calc_type in ['dos', 'projwfc']:
        entry_point = 'quantumespresso.' + calc_type
        mock_calc = generate_calc_job_node(entry_point_name=entry_point, computer=fixture_localhost)
        mock_calc.set_exit_status(0)
        mock_calc.set_process_state(engine.ProcessState.FINISHED)

        result = orm.Dict()
        result.add_incoming(mock_calc, link_type=LinkType.CREATE, link_label='output_parameters')
        result.store()

        wkchain.ctx['calc_' + calc_type] = mock_calc

    assert wkchain.inspect_dos_serial() is None
    assert wkchain.inspect_projwfc_serial() is None
    assert wkchain.inspect_pdos_parallel() is None

    # store results
    wkchain.results()

    wkchain.update_outputs()

    assert set(wkchain.node.get_outgoing().all_link_labels()) == {
        'projwfc__output_parameters', 'dos__output_parameters', 'nscf__remote_folder', 'nscf__output_parameters',
        'nscf__output_band'
    }
Esempio n. 11
0
# -*- coding: utf-8 -*-
"""Tests for the :mod:`aiida_common_workflows.workflows.relax.fleur` module."""
# pylint: disable=redefined-outer-name
import pytest

from aiida import engine
from aiida import plugins

WORKCHAIN = plugins.WorkflowFactory('common_workflows.relax.fleur')
GENERATOR = WORKCHAIN.get_input_generator()


@pytest.fixture
def default_builder_inputs(generate_code, generate_structure):
    """Return a dictionary with minimum required inputs for the ``get_builder`` method of the inputs generator."""
    return {
        'structure': generate_structure(symbols=('Si',)),
        'engines': {
            'relax': {
                'code': generate_code('fleur.fleur').store().uuid,
                'options': {
                    'resources': {
                        'num_machines': 1,
                        'tot_num_mpiprocs': 1
                    }
                }
            },
            'inpgen': {
                'code': generate_code('fleur.inpgen').store().uuid,
                'options': {
                    'resources': {
# -*- coding: utf-8 -*-
"""Tests for the :mod:`aiida_common_workflows.workflows.relax.nwchem` module."""
# pylint: disable=redefined-outer-name
import pytest

from aiida import engine
from aiida import plugins

WORKCHAIN = plugins.WorkflowFactory('common_workflows.relax.nwchem')
GENERATOR = WORKCHAIN.get_input_generator()


@pytest.fixture
def default_builder_inputs(generate_code, generate_structure):
    """Return a dictionary with minimum required inputs for the ``get_builder`` method of the inputs generator."""
    return {
        'structure': generate_structure(symbols=('Si', )),
        'engines': {
            'relax': {
                'code': generate_code('nwchem.nwchem').store().uuid,
                'options': {
                    'resources': {
                        'num_machines': 1,
                        'tot_num_mpiprocs': 1
                    }
                }
            }
        },
    }

Esempio n. 13
0
# -*- coding: utf-8 -*-
"""Tests for the :mod:`aiida_common_workflows.workflows.relax.bigdft` module."""
# pylint: disable=redefined-outer-name
import pytest

from aiida import engine
from aiida import plugins

WORKCHAIN = plugins.WorkflowFactory('common_workflows.relax.bigdft')
GENERATOR = WORKCHAIN.get_input_generator()


@pytest.fixture
def default_builder_inputs(generate_code, generate_structure):
    """Return a dictionary with minimum required inputs for the ``get_builder`` method of the inputs generator."""
    return {
        'structure': generate_structure(symbols=('Si', )),
        'engines': {
            'relax': {
                'code': generate_code('bigdft').store().uuid,
                'options': {
                    'resources': {
                        'num_machines': 1,
                        'tot_num_mpiprocs': 1
                    }
                }
            }
        },
    }

def workchain(request) -> CommonRelaxWorkChain:
    """Fixture that parametrizes over all the registered implementations of the ``CommonRelaxWorkChain``."""
    return plugins.WorkflowFactory(request.param)
# -*- coding: utf-8 -*-
"""Tests for the :mod:`aiida_common_workflows.workflows.relax.abinit` module."""
# pylint: disable=redefined-outer-name
import pytest

from aiida import engine
from aiida import plugins

WORKCHAIN = plugins.WorkflowFactory('common_workflows.relax.abinit')
GENERATOR = WORKCHAIN.get_input_generator()


@pytest.fixture
def default_builder_inputs(generate_code, generate_structure):
    """Return a dictionary with minimum required inputs for the ``get_builder`` method of the inputs generator."""
    return {
        'structure': generate_structure(symbols=('Si', )),
        'engines': {
            'relax': {
                'code': generate_code('abinit').store().uuid,
                'options': {
                    'resources': {
                        'num_machines': 1,
                        'tot_num_mpiprocs': 1
                    }
                }
            }
        },
    }