Beispiel #1
0
def test_strained_fp_tb(
    configure_with_daemon,  # pylint: disable=unused-argument
    get_optimize_fp_tb_input,
):
    """
    Run the DFT tight-binding optimization workflow with strain on an InSb sample for three strain values.
    """
    from aiida.engine import run
    from aiida import orm
    from aiida_tbextraction.optimize_strained_fp_tb import OptimizeStrainedFirstPrinciplesTightBinding
    inputs = get_optimize_fp_tb_input()

    inputs['strain_kind'] = orm.Str('three_five.Biaxial001')
    inputs['strain_parameters'] = orm.Str('InSb')

    strain_list = [-0.1, 0, 0.1]
    inputs['strain_strengths'] = orm.List(list=strain_list)

    inputs['symmetry_repr_code'] = orm.Code.get_from_string('symmetry_repr')

    result = run(OptimizeStrainedFirstPrinciplesTightBinding, **inputs)
    print(result)
    for value in strain_list:
        suffix = '_{}'.format(value).replace('.', '_dot_').replace('-', 'm_')
        assert all(
            key + suffix in result
            for key in ['cost_value', 'tb_model', 'window']
        )
    def _generate_workchain_stm():

        entry_point_code_siesta = 'siesta.siesta'
        entry_point_code = 'siesta.stm'
        entry_point_wc = 'siesta.stm'

        psml = generate_psml_data('Si')

        structure = generate_structure()

        params = generate_param().get_dict()
        params[
            "%block local-density-of-states"] = "\n -9.6  -1.6 eV \n %endblock local-density-of-states"

        inputs = {
            'code':
            fixture_code(entry_point_code_siesta),
            'stm_code':
            fixture_code(entry_point_code),
            'stm_mode':
            orm.Str("constant-height"),
            'stm_spin':
            orm.Str("none"),
            'stm_value':
            orm.Float(1),
            'emin':
            orm.Float(-1),
            'emax':
            orm.Float(1),
            'structure':
            structure,
            'kpoints':
            generate_kpoints_mesh(2),
            'parameters':
            orm.Dict(dict=params),
            'basis':
            generate_basis(),
            'pseudos': {
                'Si': psml,
                'SiDiff': psml
            },
            'options':
            orm.Dict(
                dict={
                    'resources': {
                        'num_machines': 1
                    },
                    'max_wallclock_seconds': 1800,
                    'withmpi': False,
                })
        }

        process = generate_workchain(entry_point_wc, inputs)

        return process
Beispiel #3
0
def strain_inputs(configure, strain_kind, strain_parameters, sample):
    import pymatgen
    from aiida import orm

    structure = orm.StructureData()
    structure.set_pymatgen(pymatgen.Structure.from_file(sample('POSCAR')))

    return dict(structure=structure,
                strain_kind=orm.Str(strain_kind),
                strain_parameters=orm.Str(strain_parameters),
                strain_strengths=orm.List(list=[-0.2, -0.1, 0., 0.1, 0.2]))
def test_base(aiida_profile, fixture_sandbox, fixture_localhost,
              generate_calc_job, fixture_code, file_regression,
              generate_remote_data):
    """
    Test that a single STM calculation is submitted.
    """

    entry_point_name = 'siesta.stm'
    remote_ldos_folder = generate_remote_data(fixture_localhost,
                                              "/tmp/whatever", "siesta.siesta")

    inputs = {
        'code': fixture_code(entry_point_name),
        'ldos_folder': remote_ldos_folder,
        'mode': orm.Str("constant-current"),
        'value': orm.Float(2),
        'spin_option': orm.Str("s"),
        'metadata': {
            'options': {
                'resources': {
                    'num_machines': 1
                },
                'max_wallclock_seconds': 1800,
                'withmpi': False,
            }
        }
    }

    calc_info = generate_calc_job(fixture_sandbox, entry_point_name, inputs)
    subf = './'
    remote_copy_list = [(remote_ldos_folder.computer.uuid,
                         op.join(remote_ldos_folder.get_remote_path(), subf,
                                 '*.LDOS'), subf)]
    cmdline_params = ['-i', '2.00000', '-s', 's', 'aiida.LDOS']
    retrieve_list = ['*.STM', 'stm.out']

    # Check the attributes of the returned `CalcInfo`
    assert isinstance(calc_info, datastructures.CalcInfo)
    #check command line
    assert calc_info.codes_info[0].cmdline_params == cmdline_params
    assert sorted(calc_info.remote_copy_list) == sorted(remote_copy_list)
    assert sorted(calc_info.retrieve_list) == sorted(retrieve_list)

    with fixture_sandbox.open('stm.in') as handle:
        input_written = handle.read()
    # Checks on the files written to the sandbox folder as raw input
    # Here it bothers me. Why Si.psf and _aiidasubmit.sh are not in sandbox?
    assert sorted(fixture_sandbox.get_content_list()) == sorted(['stm.in'])
    file_regression.check(input_written, encoding='utf-8', extension='.fdf')
 def define(cls, spec):
     """Define inputs and outputs of the calculation."""
     # yapf: disable
     super(BigDFTCalculation, cls).define(spec)
     spec.input('metadata.options.resources', valid_type=dict, default={
                'num_machines': 1, 'num_mpiprocs_per_machine': 1})
     spec.input('metadata.options.parser_name',
                valid_type=six.string_types, default='bigdft')
     spec.input('metadata.options.output_filename',
                valid_type=six.string_types, default=cls._OUTPUT_FILE_NAME)
     spec.input('metadata.options.jobname',
                valid_type=six.string_types, required=False)
     spec.input('parameters', valid_type=BigDFTParameters,
                help='Command line parameters for BigDFT')
     spec.input('structure', valid_type=orm.StructureData,
                help='StructureData struct')
     spec.input('structurefile', valid_type=orm.Str, help='xyz file',
                default=lambda: orm.Str(cls._POSINP_FILE_NAME))
     spec.input('pseudos', valid_type=List, help='',
                default=lambda: List(), required=False)
     spec.input('kpoints', valid_type=Dict, help='kpoint mesh or kpoint path',
                default=lambda: Dict(dict={}), required=False)
     spec.input('extra_retrieved_files', valid_type=List,
                help='', default=lambda: List())
     spec.output('bigdft_logfile', valid_type=BigDFTLogfile,
                 help='BigDFT log file as a dict')
     spec.exit_code(100, 'ERROR_MISSING_OUTPUT_FILES',
                    message='Calculation did not produce all expected output files.')
def generate_inputs_base(protocol: Dict,
                         code: orm.Code,
                         structure: orm.StructureData,
                         otfg_family: OTFGGroup,
                         override: Dict[str, Any] = None) -> Dict[str, Any]:
    """Generate the inputs for the `PwBaseWorkChain` for a given code, structure and pseudo potential family.

    :param protocol: the dictionary with protocol inputs.
    :param code: the code to use.
    :param structure: the input structure.
    :param otfg_family: the pseudo potential family.
    :param override: a dictionary to override specific inputs.
    :return: the fully defined input dictionary.
    """
    merged = recursive_merge(protocol, override or {})

    # Here we pass the base namespace in
    calc_dictionary = generate_inputs_calculation(protocol, code, structure,
                                                  otfg_family,
                                                  override.get('calc', {}))
    # Structure and pseudo should be define at base level
    calc_dictionary.pop('structure')
    calc_dictionary.pop('pseudos')
    # Remove the kpoints input as here we use the spacing directly
    calc_dictionary.pop('kpoints', None)

    dictionary = {
        'kpoints_spacing': orm.Float(merged['kpoints_spacing']),
        'max_iterations': orm.Int(merged['max_iterations']),
        'pseudos_family': orm.Str(otfg_family.label),
        'calc': calc_dictionary
    }

    return dictionary
Beispiel #7
0
def generate_inputs_base(
        protocol: t.Dict,
        code: orm.Code,
        structure: orm.StructureData,
        otfg_family: OTFGGroup,
        override: t.Dict[str, t.Any] = None) -> t.Dict[str, t.Any]:
    """Generate the inputs for the `CastepBaseWorkChain` for a given code, structure and pseudo potential family.

    :param protocol: the dictionary with protocol inputs.
    :param code: the code to use.
    :param structure: the input structure.
    :param otfg_family: the pseudo potential family.
    :param override: a dictionary to override specific inputs.
    :return: the fully defined input dictionary.
    """
    merged = recursive_merge(protocol, override or {})

    # Here we pass the base namespace in
    calc_dictionary = generate_inputs_calculation(protocol, code, structure,
                                                  otfg_family, override)
    # Structure and pseudo should be define at base level
    calc_dictionary.pop('pseudos')
    # Remove the kpoints input as here we use the spacing directly
    calc_dictionary.pop('kpoints', None)

    dictionary = {
        # Convert to CASTEP convention - no 2pi factor for real/reciprocal space conversion
        # This is the convention that CastepBaseWorkChain uses
        'kpoints_spacing': orm.Float(merged['kpoints_spacing'] / 2 / pi),
        'max_iterations': orm.Int(merged['max_iterations']),
        'pseudos_family': orm.Str(otfg_family.label),
        'calc': calc_dictionary
    }

    return dictionary
 def define(cls, spec):
     super(BigDFTRelaxWorkChain, cls).define(spec)
     spec.expose_inputs(BigDFTBaseWorkChain,
                        exclude=['parameters',
                                 'extra_retrieved_files'])
     spec.input('parameters', valid_type=BigDFTParameters, required=False,
                default=lambda: orm.Dict(), help='param dictionary')
     spec.input('extra_retrieved_files', valid_type=List, required=False,
                help='', default=lambda: List())
     spec.input('relax.perform', valid_type=orm.Bool, required=False,
                default=lambda: orm.Bool(True), help='perform relaxation')
     spec.input('relax.algo', valid_type=orm.Str,
                default=lambda: orm.Str('FIRE'),
                help='algorithm to use during relaxation')
     spec.input('relax.threshold_forces', valid_type=orm.Float, required=False,
                default=lambda: orm.Float(0.0), help='energy cutoff value, in ev/Ang')
     spec.input('relax.steps', valid_type=orm.Int, required=False,
                default=lambda: orm.Int(50),
                help='number of relaxation steps to perform.')
     spec.outline(
         cls.relax,
         cls.results,
     )
     spec.expose_outputs(BigDFTBaseWorkChain)
     spec.output('relaxed_structure', valid_type=StructureData,
                 required=False)
     spec.output('forces', valid_type=ArrayData, required=False)
     spec.output('total_energy', valid_type=orm.Float, required=False)
     spec.exit_code(101, 'ERROR_FAILED_RELAX',
                    'Subprocess failed for relaxation')
Beispiel #9
0
def test_stm_default(aiida_profile, fixture_localhost, generate_calc_job_node,
                     generate_parser, generate_structure, data_regression):
    """Test a parser of a stm calculation.
    The output is created by running a dead simple SCF calculation for a silicon structure. This test should test the
    standard parsing of the stdout content and XML file stored in the standard results node.
    """
    name = 'default'
    entry_point_calc_job = 'siesta.stm'
    entry_point_parser = 'siesta.stm'

    inputs = AttributeDict({'spin_option': orm.Str("q")})

    attributes = AttributeDict({
        'input_filename': 'stm.in',
        'output_filename': 'stm.out'
    })

    node = generate_calc_job_node(entry_point_calc_job, fixture_localhost,
                                  name, inputs, attributes)
    parser = generate_parser(entry_point_parser)
    results, calcfunction = parser.parse_from_node(node,
                                                   store_provenance=False)

    assert calcfunction.is_finished, calcfunction.exception
    assert calcfunction.is_finished_ok, calcfunction.exit_message
    assert not orm.Log.objects.get_logs_for(node)
    assert 'stm_array' in results
    assert 'output_parameters' in results

    data_regression.check({
        'stm_array':
        results['stm_array'].attributes,
        'output_parameters':
        results['output_parameters'].attributes,
    })
Beispiel #10
0
    def test_normal_exception(self):
        """If a process, for example a FunctionProcess, excepts, the exception should be stored in the node."""
        exception = 'This process function excepted'

        with self.assertRaises(RuntimeError):
            _, node = self.function_excepts.run_get_node(exception=orm.Str(exception))
            self.assertTrue(node.is_excepted)
            self.assertEqual(node.exception, exception)
Beispiel #11
0
    def get_builder(self,
                    structure,
                    calc_engines,
                    protocol,
                    relaxation_type,
                    threshold_forces=None,
                    threshold_stress=None,
                    **kwargs):
        """Return a process builder for the corresponding workchain class with inputs set according to the protocol.

        :param structure: the structure to be relaxed
        :param calc_engines: ...
        :param protocol: the protocol to use when determining the workchain inputs
        :param relaxation_type: the type of relaxation to perform, instance of `RelaxType`
        :param threshold_forces: target threshold for the forces in eV/Å.
        :param threshold_stress: target threshold for the stress in eV/Å^3.
        :param kwargs: any inputs that are specific to the plugin.
        :return: a `aiida.engine.processes.ProcessBuilder` instance ready to be submitted.
        """
        # pylint: disable=too-many-locals
        from aiida_quantumespresso_epfl.common.protocol.pw import generate_inputs  # pylint: disable=import-error

        code = calc_engines['relax']['code']
        process_class = QuantumEspressoRelaxWorkChain._process_class  # pylint: disable=protected-access
        pseudo_family = kwargs.pop('pseudo_family')

        builder = QuantumEspressoRelaxWorkChain.get_builder()
        inputs = generate_inputs(process_class,
                                 protocol,
                                 code,
                                 structure,
                                 pseudo_family,
                                 override={'relax': {}})
        builder._update(inputs)  # pylint: disable=protected-access

        if relaxation_type == RelaxType.ATOMS:
            relaxation_schema = 'relax'
        elif relaxation_type == RelaxType.ATOMS_CELL:
            relaxation_schema = 'vc-relax'
        else:
            raise ValueError('relaxation type `{}` is not supported'.format(
                relaxation_type.value))

        builder.relaxation_scheme = orm.Str(relaxation_schema)

        if threshold_forces is not None:
            parameters = builder.base.parameters.get_dict()
            parameters.setdefault('CONTROL',
                                  {})['forc_conv_thr'] = threshold_forces
            builder.base.parameters = orm.Dict(dict=parameters)

        if threshold_stress is not None:
            parameters = builder.base.parameters.get_dict()
            parameters.setdefault('CELL',
                                  {})['press_conv_thr'] = threshold_stress
            builder.base.parameters = orm.Dict(dict=parameters)

        return builder
 def parse(self):
     """
     Runs the calculation to parse the Wannier90 output.
     """
     builder = self.setup_tbmodels('tbmodels.parse')
     builder.wannier_folder = self.ctx.wannier_calc.outputs.retrieved
     builder.pos_kind = orm.Str('nearest_atom')
     self.report("Parsing Wannier90 output to tbmodels format.")
     return ToContext(tbmodels_calc=self.submit(builder))
Beispiel #13
0
def test_leak_run_process():
    """Test whether running a dummy process leaks memory."""
    inputs = {'a': orm.Int(2), 'b': orm.Str('test')}
    run_finished_ok(test_processes.DummyProcess, **inputs)

    # check that no reference to the process is left in memory
    # some delay is necessary in order to allow for all callbacks to finish
    process_instances = get_instances(processes.Process, delay=0.2)
    assert not process_instances, f'Memory leak: process instances remain in memory: {process_instances}'
def submit_workchain(structure,
                     daemon,
                     protocol,
                     parameters,
                     pseudo_family,
                     num_machines,
                     num_mpiprocs_per_machine=4,
                     set_2d_mesh=False):
    print("running dft band structure calculation for {}".format(
        structure.get_formula()))

    # Set custom pseudo
    modifiers = {'parameters': parameters}
    """ if pseudo_family is not None:
        from aiida_quantumespresso.utils.protocols.pw import _load_pseudo_metadata
        pseudo_data = _load_pseudo_metadata(pseudo_family)
        modifiers.update({'pseudo': 'custom', 'pseudo_data': pseudo_data}) """
    # if pseudo_family is not None:
    #     from aiida_quantumespresso.utils.pseudopotential import get_pseudos_from_structure
    #     pseudo_data = get_pseudos_from_structure(structure, pseudo_family)
    #     modifiers.update({'pseudo': 'custom', 'pseudo_data': pseudo_data})

    # Submit the DFT bands workchain
    pwbands_workchain_parameters = {
        'code':
        code,
        'structure':
        structure,
        'protocol':
        orm.Dict(dict={
            'name': protocol,
            'modifiers': modifiers
        }),
        'options':
        orm.Dict(
            dict={
                'resources': {
                    'num_machines': num_machines,
                    'num_mpiprocs_per_machine': num_mpiprocs_per_machine
                },
                'max_wallclock_seconds': 3600 * 5,
                'withmpi': True,
            }),
        'set_2d_mesh':
        orm.Bool(set_2d_mesh)
    }
    if pseudo_family is not None:
        pwbands_workchain_parameters['pseudo_family'] = orm.Str(pseudo_family)
    if daemon:
        dft_workchain = submit(PwBandStructureWorkChain,
                               **pwbands_workchain_parameters)
    else:
        from aiida.engine import run_get_pk
        dft_workchain = run_get_pk(PwBandStructureWorkChain,
                                   **pwbands_workchain_parameters)
    return dft_workchain
Beispiel #15
0
    def run_bands(self):
        """Run the `PwBandsWorkChain` to compute the band structure."""

        def get_common_inputs():
            """Return the dictionary of inputs to be used as the basis for each `PwBaseWorkChain`."""
            protocol, protocol_modifiers = self._get_protocol()
            checked_pseudos = protocol.check_pseudos(
                modifier_name=protocol_modifiers.get('pseudo', None),
                pseudo_data=protocol_modifiers.get('pseudo_data', None)
            )
            known_pseudos = checked_pseudos['found']

            inputs = AttributeDict({
                'pw': {
                    'code': self.inputs.code,
                    'pseudos': get_pseudos_from_dict(self.inputs.structure, known_pseudos),
                    'parameters': self.ctx.parameters,
                    'metadata': {},
                }
            })

            if 'options' in self.inputs:
                inputs.pw.metadata.options = self.inputs.options.get_dict()
            else:
                inputs.pw.metadata.options = get_default_options(with_mpi=True)

            return inputs

        inputs = AttributeDict({
            'structure': self.inputs.structure,
            'relax': {
                'base': get_common_inputs(),
                'relaxation_scheme': orm.Str('vc-relax'),
                'meta_convergence': orm.Bool(self.ctx.protocol['meta_convergence']),
                'volume_convergence': orm.Float(self.ctx.protocol['volume_convergence']),
            },
            'scf': get_common_inputs(),
            'bands': get_common_inputs(),
        })

        inputs.relax.base.kpoints_distance = orm.Float(self.ctx.protocol['kpoints_mesh_density'])
        inputs.scf.kpoints_distance = orm.Float(self.ctx.protocol['kpoints_mesh_density'])
        inputs.bands.kpoints_distance = orm.Float(self.ctx.protocol['kpoints_distance_for_bands'])

        num_bands_factor = self.ctx.protocol.get('num_bands_factor', None)
        if num_bands_factor is not None:
            inputs.nbands_factor = orm.Float(num_bands_factor)

        running = self.submit(PwBandsWorkChain, **inputs)

        self.report(f'launching PwBandsWorkChain<{running.pk}>')

        return ToContext(workchain_bands=running)
Beispiel #16
0
    def define(cls, spec):
        # yapf: disable
        super(CifCleanWorkChain, cls).define(spec)
        spec.expose_inputs(CifFilterCalculation, namespace='cif_filter', exclude=('cif',))
        spec.expose_inputs(CifSelectCalculation, namespace='cif_select', exclude=('cif',))
        spec.input('cif', valid_type=orm.CifData,
            help='The CifData node that is to be cleaned.')
        spec.input('parse_engine', valid_type=orm.Str, default=orm.Str('pymatgen'),
            help='The atomic structure engine to parse the cif and create the structure.')
        spec.input('symprec', valid_type=orm.Float, default=orm.Float(5E-3),
            help='The symmetry precision used by SeeKpath for crystal symmetry refinement.')
        spec.input('site_tolerance', valid_type=orm.Float, default=orm.Float(5E-4),
            help='The fractional coordinate distance tolerance for finding overlapping sites (pymatgen only).')
        spec.input('group_cif', valid_type=orm.Group, required=False, non_db=True,
            help='An optional Group to which the final cleaned CifData node will be added.')
        spec.input('group_structure', valid_type=orm.Group, required=False, non_db=True,
            help='An optional Group to which the final reduced StructureData node will be added.')

        spec.outline(
            cls.run_filter_calculation,
            cls.inspect_filter_calculation,
            cls.run_select_calculation,
            cls.inspect_select_calculation,
            if_(cls.should_parse_cif_structure)(
                cls.parse_cif_structure,
            ),
            cls.results,
        )

        spec.output('cif', valid_type=orm.CifData,
            help='The cleaned CifData node.')
        spec.output('structure', valid_type=orm.StructureData, required=False,
            help='The primitive cell structure created with SeeKpath from the cleaned CifData.')

        spec.exit_code(401, 'ERROR_CIF_FILTER_FAILED',
            message='The CifFilterCalculation step failed.')
        spec.exit_code(402, 'ERROR_CIF_SELECT_FAILED',
            message='The CifSelectCalculation step failed.')
        spec.exit_code(410, 'ERROR_CIF_HAS_UNKNOWN_SPECIES',
            message='The cleaned CifData contains sites with unknown species.')
        spec.exit_code(411, 'ERROR_CIF_HAS_UNDEFINED_ATOMIC_SITES',
            message='The cleaned CifData defines no atomic sites.')
        spec.exit_code(412, 'ERROR_CIF_HAS_ATTACHED_HYDROGENS',
            message='The cleaned CifData defines sites with attached hydrogens with incomplete positional data.')
        spec.exit_code(413, 'ERROR_CIF_HAS_INVALID_OCCUPANCIES',
            message='The cleaned CifData defines sites with invalid atomic occupancies.')
        spec.exit_code(414, 'ERROR_CIF_STRUCTURE_PARSING_FAILED',
            message='Failed to parse a StructureData from the cleaned CifData.')
        spec.exit_code(420, 'ERROR_SEEKPATH_SYMMETRY_DETECTION_FAILED',
            message='SeeKpath failed to determine the primitive structure.')
        spec.exit_code(421, 'ERROR_SEEKPATH_INCONSISTENT_SYMMETRY',
            message='SeeKpath detected inconsistent symmetry operations.')
Beispiel #17
0
    def __init__(self,
                 base,
                 base_final_scf,
                 structure,
                 relaxation_scheme,
                 relax_type,
                 meta_convergence=True,
                 max_meta_convergence_iterations=5,
                 volume_convergence=0.01,
                 clean_workdir=True):

        self.base = base
        self.base_final_scf = base_final_scf
        self.structure = structure
        # self.final_scf = orm.Bool(final_scf)
        self.relaxation_scheme = orm.Str(relaxation_scheme)
        self.relax_type = orm.Str(relax_type)
        self.meta_convergence = orm.Bool(meta_convergence)
        self.max_meta_convergence_iterations = orm.Int(
            max_meta_convergence_iterations)
        self.volume_convergence = orm.Float(volume_convergence)
        self.clean_workdir = orm.Bool(clean_workdir)
Beispiel #18
0
    def test_return_exit_code(self):
        """
        A process function that returns an ExitCode namedtuple should have its exit status and message set FINISHED
        """
        exit_status = 418
        exit_message = 'I am a teapot'

        message = orm.Str(exit_message)
        _, node = self.function_exit_code.run_get_node(exit_status=orm.Int(exit_status), exit_message=message)

        self.assertTrue(node.is_finished)
        self.assertFalse(node.is_finished_ok)
        self.assertEqual(node.exit_status, exit_status)
        self.assertEqual(node.exit_message, exit_message)
Beispiel #19
0
    def test_input_link_creation(self):
        dummy_inputs = ['a', 'b', 'c', 'd']

        inputs = {string: orm.Str(string) for string in dummy_inputs}
        inputs['metadata'] = {'store_provenance': True}
        process = test_processes.DummyProcess(inputs)

        for entry in process.node.get_incoming().all():
            self.assertTrue(entry.link_label in inputs)
            self.assertEqual(entry.link_label, entry.node.value)
            dummy_inputs.remove(entry.link_label)

        # Make sure there are no other inputs
        self.assertFalse(dummy_inputs)
def test_validators(aiida_profile, fixture_sandbox, fixture_localhost,
                    generate_calc_job, fixture_code, file_regression,
                    generate_remote_data):
    """
    Test validators STM calculation is submitted.
    """

    entry_point_name = 'siesta.stm'
    remote_ldos_folder = generate_remote_data(fixture_localhost,
                                              "/tmp/whatever", "siesta.siesta")

    inputs = {
        'code': fixture_code(entry_point_name),
        'ldos_folder': remote_ldos_folder,
        'value': orm.Float(2),
        'metadata': {
            'options': {
                'resources': {
                    'num_machines': 1
                },
                'max_wallclock_seconds': 1800,
                'withmpi': False,
            }
        }
    }

    inputs["mode"] = orm.Str("wrong")
    with pytest.raises(ValueError):
        calc_info = generate_calc_job(fixture_sandbox, entry_point_name,
                                      inputs)
    inputs["mode"] = orm.Str("constant-height")

    inputs["spin_option"] = orm.Str("wrong")
    with pytest.raises(ValueError):
        calc_info = generate_calc_job(fixture_sandbox, entry_point_name,
                                      inputs)
Beispiel #21
0
def test_dict_as_single_input(configure_with_daemon):  # pylint: disable=unused-argument
    """
    Test setting an attribute of a nested Dict, as "bare" input.
    """
    res, node = run_get_node(
        AddInputsWorkChain,
        sub_process=EchoDictValue,
        inputs={'x': orm.Float(1)},
        added_input_values=orm.Float(2),
        added_input_keys=orm.Str('a:b.c')
    )
    assert node.is_finished
    assert 'x' in res
    assert 'c' in res
    assert res['x'].value == 1
    assert res['c'].value == 2
Beispiel #22
0
    def run_relax(self):
        """Run the PwRelaxWorkChain to run a relax PwCalculation."""
        inputs = AttributeDict({
            'structure': self.inputs.structure,
            'base': self._get_common_inputs(),
            'relaxation_scheme': orm.Str('vc-relax'),
            'meta_convergence': orm.Bool(self.ctx.protocol['meta_convergence']),
            'volume_convergence': orm.Float(self.ctx.protocol['volume_convergence']),
        })
        inputs.base.kpoints_distance = orm.Float(self.ctx.protocol['kpoints_mesh_density'])

        running = self.submit(PwRelaxWorkChain, **inputs)

        self.report('launching PwRelaxWorkChain<{}>'.format(running.pk))

        return ToContext(workchain_relax=running)
Beispiel #23
0
def test_basic_as_single_input(
    configure_with_daemon,  # pylint: disable=unused-argument
    echo_process  # pylint: disable=redefined-outer-name
):
    """
    Basic test for a single input, as "bare" input.
    """
    res, node = run_get_node(
        AddInputsWorkChain,
        sub_process=echo_process,
        added_input_values=orm.Float(1),
        added_input_keys=orm.Str('x'),
    )
    assert node.is_finished_ok
    assert 'result' in res
    assert res['result'].value == 1
Beispiel #24
0
    def test_querying_node_subclasses():
        """Test querying for groups with multiple types for nodes it contains."""
        group = orm.Group(label='group').store()
        data_int = orm.Int().store()
        data_str = orm.Str().store()
        data_bool = orm.Bool().store()

        group.add_nodes([data_int, data_str, data_bool])

        builder = orm.QueryBuilder().append(orm.Group, tag='group')
        builder.append((orm.Int, orm.Str), with_group='group', project='id')
        results = [entry[0] for entry in builder.iterall()]

        assert len(results) == 2
        assert data_int.pk in results
        assert data_str.pk in results
        assert data_bool.pk not in results
Beispiel #25
0
 def define(cls, spec):
     """Define the process specification."""
     # yapf: disable
     super().define(spec)
     spec.expose_inputs(PwBaseWorkChain, namespace='base',
         exclude=('clean_workdir', 'pw.structure', 'pw.parent_folder'),
         namespace_options={'help': 'Inputs for the `PwBaseWorkChain` for the main relax loop.'})
     spec.expose_inputs(PwBaseWorkChain, namespace='base_final_scf',
         exclude=('clean_workdir', 'pw.structure', 'pw.parent_folder'),
         namespace_options={'required': False, 'populate_defaults': False,
             'help': 'Inputs for the `PwBaseWorkChain` for the final scf.'})
     spec.input('structure', valid_type=orm.StructureData, help='The inputs structure.')
     spec.input('final_scf', valid_type=orm.Bool, default=lambda: orm.Bool(False), validator=validate_final_scf,
         help='If `True`, a final SCF calculation will be performed on the successfully relaxed structure.')
     spec.input('relaxation_scheme', valid_type=orm.Str, required=False, validator=validate_relaxation_scheme,
         help='The relaxation scheme to use: choose either `relax` or `vc-relax` for variable cell relax.')
     spec.input('relax_type', valid_type=orm.Str, default=lambda: orm.Str(RelaxType.ATOMS_CELL.value),
         validator=validate_relax_type,
         help='The relax type to use: should be a value of the enum ``common.types.RelaxType``.')
     spec.input('meta_convergence', valid_type=orm.Bool, default=lambda: orm.Bool(True),
         help='If `True` the workchain will perform a meta-convergence on the cell volume.')
     spec.input('max_meta_convergence_iterations', valid_type=orm.Int, default=lambda: orm.Int(5),
         help='The maximum number of variable cell relax iterations in the meta convergence cycle.')
     spec.input('volume_convergence', valid_type=orm.Float, default=lambda: orm.Float(0.01),
         help='The volume difference threshold between two consecutive meta convergence iterations.')
     spec.input('clean_workdir', valid_type=orm.Bool, default=lambda: orm.Bool(False),
         help='If `True`, work directories of all called calculation will be cleaned at the end of execution.')
     spec.outline(
         cls.setup,
         while_(cls.should_run_relax)(
             cls.run_relax,
             cls.inspect_relax,
         ),
         if_(cls.should_run_final_scf)(
             cls.run_final_scf,
             cls.inspect_final_scf,
         ),
         cls.results,
     )
     spec.exit_code(401, 'ERROR_SUB_PROCESS_FAILED_RELAX',
         message='the relax PwBaseWorkChain sub process failed')
     spec.exit_code(402, 'ERROR_SUB_PROCESS_FAILED_FINAL_SCF',
         message='the final scf PwBaseWorkChain sub process failed')
     spec.expose_outputs(PwBaseWorkChain, exclude=('output_structure',))
     spec.output('output_structure', valid_type=orm.StructureData, required=False,
         help='The successfully relaxed structure, unless `relax_type is RelaxType.NONE`.')
Beispiel #26
0
    def get_builder_from_protocol(cls,
                                  code,
                                  structure,
                                  protocol=None,
                                  overrides=None,
                                  **kwargs):
        """Return a builder prepopulated with inputs selected according to the chosen protocol.

        :param code: the ``Code`` instance configured for the ``quantumespresso.pw`` plugin.
        :param structure: the ``StructureData`` instance to use.
        :param protocol: protocol to use, if not specified, the default will be used.
        :param overrides: optional dictionary of inputs to override the defaults of the protocol.
        :param kwargs: additional keyword arguments that will be passed to the ``get_builder_from_protocol`` of all the
            sub processes that are called by this workchain.
        :return: a process builder instance with all inputs defined ready for launch.
        """
        args = (code, structure, protocol)
        inputs = cls.get_protocol_inputs(protocol, overrides)
        builder = cls.get_builder()

        base = PwBaseWorkChain.get_builder_from_protocol(*args,
                                                         overrides=inputs.get(
                                                             'base', None),
                                                         **kwargs)
        base_final_scf = PwBaseWorkChain.get_builder_from_protocol(
            *args, overrides=inputs.get('base_final_scf', None), **kwargs)

        base['pw'].pop('structure', None)
        base.pop('clean_workdir', None)
        base_final_scf['pw'].pop('structure', None)
        base_final_scf.pop('clean_workdir', None)

        builder.base = base
        builder.base_final_scf = base_final_scf
        builder.structure = structure
        builder.clean_workdir = orm.Bool(inputs['clean_workdir'])
        builder.max_meta_convergence_iterations = orm.Int(
            inputs['max_meta_convergence_iterations'])
        builder.meta_convergence = orm.Bool(inputs['meta_convergence'])
        builder.relax_type = orm.Str(inputs['relax_type'])
        builder.volume_convergence = orm.Float(inputs['volume_convergence'])

        return builder
Beispiel #27
0
 def setUp(self):
     super().setUp()
     self.assertIsNone(Process.current())
     self.process_class = CalculationFactory('templatereplacer')
     self.builder = self.process_class.get_builder()
     self.builder_workchain = ExampleWorkChain.get_builder()
     self.inputs = {
         'dynamic': {
             'namespace': {
                 'alp': orm.Int(1).store()
             }
         },
         'name': {
             'spaced': orm.Int(1).store(),
         },
         'name_spaced': orm.Str('underscored').store(),
         'boolean': orm.Bool(True).store(),
         'metadata': {}
     }
def test_runstm_failstm(aiida_profile, generate_workchain_stm,
                        generate_wc_job_node, generate_calc_job_node,
                        fixture_localhost):

    process = generate_workchain_stm()
    process.checks()

    ldos_basewc = generate_wc_job_node("siesta.base", fixture_localhost)
    ldos_basewc.set_process_state(ProcessState.FINISHED)
    ldos_basewc.set_exit_status(ExitCode(0).status)
    #Now is_finished_ok, but need to set outputs
    remote_folder = orm.RemoteData(computer=fixture_localhost,
                                   remote_path='/tmp')
    remote_folder.store()
    remote_folder.add_incoming(ldos_basewc,
                               link_type=LinkType.RETURN,
                               link_label='remote_folder')
    process.ctx.siesta_ldos = ldos_basewc

    process.run_stm()

    #Fake the stm calculation
    name = 'default'
    entry_point_calc_job = 'siesta.stm'
    inputs = AttributeDict({'spin_option': orm.Str("q")})
    attributes = AttributeDict({
        'input_filename': 'stm.in',
        'output_filename': 'stm.out'
    })
    stm_node = generate_calc_job_node(entry_point_calc_job, fixture_localhost,
                                      name, inputs, attributes)
    stm_node.set_process_state(ProcessState.FINISHED)
    process.ctx.stm_calc = stm_node

    result = process.run_results()

    assert result == SiestaSTMWorkChain.exit_codes.ERROR_STM_PLUGIN
def test_outputs(aiida_profile, generate_workchain_stm, generate_wc_job_node,
                 generate_calc_job_node, fixture_localhost):
    """Test `SiestaSTMWorkChain`."""

    process = generate_workchain_stm()
    process.checks()

    name = 'default'
    entry_point_calc_job = 'siesta.stm'
    inputs = AttributeDict({'spin_option': orm.Str("q")})
    attributes = AttributeDict({
        'input_filename': 'stm.in',
        'output_filename': 'stm.out'
    })
    stm_node = generate_calc_job_node(entry_point_calc_job, fixture_localhost,
                                      name, inputs, attributes)
    stm_node.set_process_state(ProcessState.FINISHED)
    stm_node.set_exit_status(ExitCode(0).status)
    stm_array = orm.ArrayData()
    stm_array.add_incoming(stm_node,
                           link_type=LinkType.CREATE,
                           link_label='stm_array')
    stm_array.store()
    process.ctx.stm_calc = stm_node

    first_basewc = generate_wc_job_node("siesta.base", fixture_localhost)
    out_par = orm.Dict(dict={"variable_geometry": False})
    out_par.store()
    out_par.add_incoming(first_basewc,
                         link_type=LinkType.RETURN,
                         link_label='output_parameters')
    process.ctx.workchain_base = first_basewc

    result = process.run_results()

    assert result == ExitCode(0)
    assert isinstance(process.outputs["stm_array"], orm.ArrayData)
Beispiel #30
0
 def run(self):
     if self.inputs.add_outputs:
         self.out('required_string', orm.Str('testing').store())
         self.out('integer.namespace.two', orm.Int(2).store())