Beispiel #1
0
    def _generate_projwfc_inputs(self):
        """Run Projwfc calculation, to generate partial Densities of State."""
        projwfc_inputs = AttributeDict(self.exposed_inputs(ProjwfcCalculation, 'projwfc'))
        projwfc_inputs.parent_folder = self.ctx.nscf_parent_folder
        projwfc_parameters = self.inputs.projwfc.parameters.get_dict()

        if projwfc_parameters.pop('align_to_fermi', False):
            projwfc_parameters['PROJWFC']['Emin'] = projwfc_parameters['Emin'] + self.ctx.nscf_fermi
            projwfc_parameters['PROJWFC']['Emax'] = projwfc_parameters['Emax'] + self.ctx.nscf_fermi

        projwfc_inputs.parameters = orm.Dict(dict=projwfc_parameters)
        projwfc_inputs['metadata']['call_link_label'] = 'projwfc'
        return projwfc_inputs
Beispiel #2
0
    def _generate_dos_inputs(self):
        """Run DOS calculation, to generate total Densities of State."""
        dos_inputs = AttributeDict(self.exposed_inputs(DosCalculation, 'dos'))
        dos_inputs.parent_folder = self.ctx.nscf_parent_folder
        dos_parameters = self.inputs.dos.parameters.get_dict()

        if dos_parameters.pop('align_to_fermi', False):
            dos_parameters['DOS']['Emin'] = dos_parameters['Emin'] + self.ctx.nscf_fermi
            dos_parameters['DOS']['Emax'] = dos_parameters['Emax'] + self.ctx.nscf_fermi

        dos_inputs.parameters = orm.Dict(dict=dos_parameters)
        dos_inputs['metadata']['call_link_label'] = 'dos'
        return dos_inputs
Beispiel #3
0
    def run_relax(self):
        """
        Run the PwRelaxWorkChain to run a relax calculation
        """
        inputs = AttributeDict(
            self.exposed_inputs(PwRelaxWorkChain, namespace='relax'))
        inputs.structure = self.ctx.current_structure

        running = self.submit(PwRelaxWorkChain, **inputs)

        self.report('launching PwRelaxWorkChain<{}>'.format(running.pk))

        return ToContext(workchain_relax=running)
    def run_bands(self):
        """Run the `PwBandsWorkChain` to compute the band structure."""
        def get_common_inputs():
            """Return the dictionary of inputs to be used as the basis for each `PwBaseWorkChain`."""
            protocol, protocol_modifiers = self._get_protocol()
            checked_pseudos = protocol.check_pseudos(
                modifier_name=protocol_modifiers.get('pseudo', None),
                pseudo_data=protocol_modifiers.get('pseudo_data', None))
            known_pseudos = checked_pseudos['found']

            inputs = AttributeDict({
                'pw': {
                    'code': self.inputs.code,
                    'pseudos': get_pseudos_from_dict(self.inputs.structure, known_pseudos),
                    'parameters': self.ctx.parameters,
                    'metadata': {},
                }
            })

            if 'options' in self.inputs:
                inputs.pw.metadata.options = self.inputs.options.get_dict()
            else:
                inputs.pw.metadata.options = get_default_options(with_mpi=True)

            return inputs

        inputs = AttributeDict({
            'structure': self.inputs.structure,
            # 'relax': {
            #     'base': get_common_inputs(),
            #     'relaxation_scheme': orm.Str('vc-relax'),
            #     'meta_convergence': orm.Bool(self.ctx.protocol['meta_convergence']),
            #     'volume_convergence': orm.Float(self.ctx.protocol['volume_convergence']),
            # },
            'scf': get_common_inputs(),
            'bands': get_common_inputs(),
        })

        # inputs.relax.base.kpoints_distance = orm.Float(self.ctx.protocol['kpoints_mesh_density'])
        inputs.scf.kpoints_distance = orm.Float(self.ctx.protocol['kpoints_mesh_density'])
        inputs.bands.kpoints_distance = orm.Float(self.ctx.protocol['kpoints_distance_for_bands'])

        num_bands_factor = self.ctx.protocol.get('num_bands_factor', None)
        if num_bands_factor is not None:
            inputs.nbands_factor = orm.Float(num_bands_factor)

        running = self.submit(PwBandsWorkChain, **inputs)

        self.report('launching PwBandsWorkChain<{}>'.format(running.pk))

        return ToContext(workchain_bands=running)
Beispiel #5
0
    def run_dosmain(self):
        """Run the DosmainCalculation subprocess."""
        inputs = AttributeDict(
            self.exposed_inputs(DosmainCalculation, namespace='dosmain'))
        openmx_calculation = self.ctx.openmx_calculation
        inputs.openmx_output_folder = openmx_calculation.outputs.remote_folder
        inputs.openmx_input_structure = openmx_calculation.inputs.structure
        inputs.openmx_orbital_configurations = openmx_calculation.inputs.orbital_configurations
        inputs.metadata.call_link_label = 'dosmain'

        running = self.submit(DosmainCalculation, **inputs)
        self.report(f'launching DosmainCalculation<{running.pk}>')

        return ToContext(dosmain_calculation=running)
Beispiel #6
0
    def run_relax(self):
        """Run the PwRelaxWorkChain to run a relax PwCalculation."""
        inputs = AttributeDict(
            self.exposed_inputs(PwRelaxWorkChain, namespace='relax'))
        inputs.structure = self.ctx.current_structure
        inputs.base.pw.pseudos = self.inputs.pseudos
        inputs.base.pw.code = self.inputs.code
        inputs.clean_workdir = self.inputs.clean_workdir

        running = self.submit(PwRelaxWorkChain, **inputs)

        self.report('launching PwRelaxWorkChain<{}>'.format(running.pk))

        return ToContext(workchain_relax=running)
Beispiel #7
0
    def get_inputs_scf(self):
        """
        Initialize inputs for the scf cycle
        """
        input_scf = AttributeDict(self.exposed_inputs(FleurScfWorkChain, namespace='scf'))
        input_scf.metadata.label = 'reference_scf_SSDisp'
        input_scf.metadata.description = ('The SCF workchain converging reference charge'
                                          ' density for Spin Spiral Dispersion')

        if 'wf_parameters' not in input_scf:
            scf_wf_dict = {}
        else:
            scf_wf_dict = input_scf.wf_parameters.get_dict()

        if 'inpxml_changes' not in scf_wf_dict:
            scf_wf_dict['inpxml_changes'] = []
        # set up q vector for the reference calculation
        string_ref_qss = ' '.join(map(str, self.ctx.wf_dict['ref_qss']))
        if [x for x in self.ctx.wf_dict['ref_qss'] if x != 0]:
            changes_dict = {'qss': string_ref_qss, 'l_noco': True, 'ctail': False, 'l_ss': True}
        else:
            changes_dict = {'qss': ' 0.0 0.0 0.0 ', 'l_noco': False, 'ctail': True, 'l_ss': False}

        scf_wf_dict['inpxml_changes'].append(('set_inpchanges', {'change_dict': changes_dict}))

        # change beta parameter
        for key, val in six.iteritems(self.ctx.wf_dict.get('beta')):
            scf_wf_dict['inpxml_changes'].append(('set_atomgr_att_label', {
                'attributedict': {
                    'nocoParams': [('beta', val)]
                },
                'atom_label': key
            }))

        input_scf.wf_parameters = Dict(dict=scf_wf_dict)

        if 'structure' in input_scf:  # add info about spin spiral propagation
            if 'calc_parameters' in input_scf:
                calc_parameters = input_scf.calc_parameters.get_dict()
            else:
                calc_parameters = {}
            calc_parameters['qss'] = {
                'x': self.ctx.wf_dict['prop_dir'][0],
                'y': self.ctx.wf_dict['prop_dir'][1],
                'z': self.ctx.wf_dict['prop_dir'][2]
            }
            input_scf.calc_parameters = Dict(dict=calc_parameters)

        return input_scf
Beispiel #8
0
    def run_nscf_crop(self):
        """Run the PwBaseWorkChain in nscf mode along the path of high-symmetry determined by seekpath."""
        inputs = AttributeDict(self.exposed_inputs(PwBaseWorkChain, namespace='nscf_crop'))
        inputs.metadata.call_link_label = 'nscf_crop'
        # inputs.pw.metadata.options.max_wallclock_seconds *= 4
        # inputs.kpoints_distance = self.inputs.kpoints_distance
        inputs.pw.structure = self.ctx.current_structure
        inputs.pw.parent_folder = self.ctx.current_folder
        inputs.pw.parameters = inputs.pw.parameters.get_dict()
        inputs.pw.parameters.setdefault('CONTROL', {})
        inputs.pw.parameters.setdefault('SYSTEM', {})
        inputs.pw.parameters.setdefault('ELECTRONS', {})

        # The following flags always have to be set in the parameters, regardless of what caller specified in the inputs
        inputs.pw.parameters['CONTROL']['calculation'] = 'nscf'

        inputs.pop('kpoints_distance', None)
        inputs.kpoints = self.ctx.kpoint_crop

        # Only set the following parameters if not directly explicitly defined in the inputs
        # inputs.pw.parameters['ELECTRONS'].setdefault('diagonalization', 'cg')
        # inputs.pw.parameters['ELECTRONS'].setdefault('diago_full_acc', True)

        # If `nbands_factor` is defined in the inputs we set the `nbnd` parameter
        if 'nbands_factor_crop' in self.inputs:
            factor = self.inputs.nbands_factor_crop.value
            parameters = self.ctx.workchain_scf.outputs.output_parameters.get_dict()
            if int(parameters['number_of_spin_components']) > 1:
                nspin_factor = 2
            else:
                nspin_factor = 1
            nbands = int(parameters['number_of_bands'])
            nelectron = int(parameters['number_of_electrons'])
            nbnd = max(
                int(0.5 * nelectron * nspin_factor * factor),
                int(0.5 * nelectron * nspin_factor) + 4 * nspin_factor,
                nbands)
            inputs.pw.parameters['SYSTEM']['nbnd'] = nbnd

        # Otherwise set the current number of bands, unless explicitly set in the inputs
        else:
            inputs.pw.parameters['SYSTEM'].setdefault('nbnd', self.ctx.current_number_of_bands)

        inputs = prepare_process_inputs(PwBaseWorkChain, inputs)
        running = self.submit(PwBaseWorkChain, **inputs)

        self.report('launching PwBaseWorkChain<{}> in {} mode for CROP grid'.format(running.pk, 'nscf'))

        return ToContext(workchain_nscf_crop=running)
    def prepare_eos(self):
        """
        Initialize inputs for eos workflow:
        wf_param, options, calculation parameters, codes, structure
        """
        inputs = AttributeDict(self.exposed_inputs(FleurEosWorkChain, namespace='eos'))
        inputs.metadata.label = 'EOS_substrate'
        inputs.metadata.description = 'The EOS workchain finding equilibrium substrate'
        # Here wf_dict nodes appears out of nowwhere.
        inputs.structure = create_substrate_bulk(Dict(dict=self.ctx.wf_dict))

        if not isinstance(inputs.structure, StructureData):
            return inputs, inputs.structure  # exit code thrown in create_substrate_bulk

        return inputs, None
def test_fleur_parser_MT_overlap_erroroutput(fixture_localhost, generate_parser, generate_calc_job_node,
                                             create_fleurinp, data_regression):
    """
    Default inpgen parser test of a failed fleur calculation.
    """

    name = 'mt_overlap_errorout'
    entry_point_calc_job = 'fleur.fleur'
    entry_point_parser = 'fleur.fleurparser'

    inputs = AttributeDict({'fleurinp': create_fleurinp(TEST_INP_XML_PATH1), 'metadata': {}})

    #change retrieve list to save space
    retrieve_list = ['out.xml', 'out.error', 'relax.xml']
    node = generate_calc_job_node(entry_point_calc_job,
                                  fixture_localhost,
                                  name,
                                  inputs,
                                  store=True,
                                  retrieve_list=retrieve_list)
    parser = generate_parser(entry_point_parser)
    results, calcfunction = parser.parse_from_node(node, store_provenance=False)

    assert calcfunction.is_finished, calcfunction.exception
    assert calcfunction.is_failed, calcfunction.exit_status
    assert calcfunction.exit_status == node.process_class.exit_codes.ERROR_MT_RADII_RELAX.status
    assert 'output_parameters' not in results
    assert 'output_params_complex' not in results
    assert 'relax_parameters' not in results
    assert 'error_params' in results
    data_regression.check({'error_params': results['error_params'].get_dict()})
def test_fleur_parser_complex_erroroutput(fixture_localhost, generate_parser, generate_calc_job_node, create_fleurinp,
                                          data_regression):
    """
    Default inpgen parser test of a successful inpgen calculation.
    Checks via data regression if attributes of fleurinp are the same
    """

    name = 'complex_errorout'
    entry_point_calc_job = 'fleur.fleur'
    entry_point_parser = 'fleur.fleurparser'

    inputs = AttributeDict({'fleurinp': create_fleurinp(TEST_INP_XML_PATH), 'metadata': {}})

    #change retrieve list to save space
    retrieve_list = ['out.xml', 'out.error', 'usage.json']
    node = generate_calc_job_node(entry_point_calc_job,
                                  fixture_localhost,
                                  name,
                                  inputs,
                                  store=True,
                                  retrieve_list=retrieve_list)
    parser = generate_parser(entry_point_parser)
    results, calcfunction = parser.parse_from_node(node, store_provenance=False)

    assert calcfunction.is_finished, calcfunction.exception
    assert calcfunction.is_failed, calcfunction.exit_status
    assert calcfunction.exit_status == node.process_class.exit_codes.ERROR_FLEUR_CALC_FAILED.status

    assert 'output_parameters' not in results
    assert 'output_params_complex' not in results
    assert 'relax_parameters' not in results
    assert 'error_params' not in results
Beispiel #12
0
 def setup(self):
     """Call the `setup` of the `BaseRestartWorkChain` and then create the inputs dictionary in `self.ctx.inputs`.
     This `self.ctx.inputs` dictionary will be used by the `BaseRestartWorkChain` to submit the calculations in the
     internal loop.
     """
     super(RaspaBaseWorkChain, self).setup()
     self.ctx.inputs = AttributeDict(self.exposed_inputs(RaspaCalculation, 'raspa'))
def generate_inputs():
    """Minimal input for pw2wannier90 calculations."""
    basepath = os.path.dirname(os.path.abspath(__file__))
    nnkp_filepath = os.path.join(basepath, 'fixtures', 'pw2wannier90', 'inputs', 'aiida.nnkp')

    parameters = {
        'inputpp': {
            'write_amn': False,
            'write_mmn': False,
            'write_unk': False,
            'scdm_proj': True,
            'scdm_entanglement': 'isolated',
        }
    }

    settings = {'ADDITIONAL_RETRIEVE_LIST': ['*.amn', '*.mmn', '*.eig']}

    # Since we don't actually run pw2wannier.x, we only pretend to have the output folder
    # of a parent pw.x calculation. The nnkp file, instead, is real.
    inputs = {
        'parent_folder': orm.FolderData().store(),
        'nnkp_file': orm.SinglefileData(file=nnkp_filepath).store(),
        'parameters': orm.Dict(dict=parameters),
        'settings': orm.Dict(dict=settings),
    }

    return AttributeDict(inputs)
Beispiel #14
0
 def run_cp2kmultistage(self):
     """Run CP2K-Multistage"""
     cp2k_ms_inputs = AttributeDict(self.exposed_inputs(Cp2kMultistageWorkChain))
     cp2k_ms_inputs['metadata']['call_link_label'] = 'call_cp2kmultistage'
     running = self.submit(Cp2kMultistageWorkChain, **cp2k_ms_inputs)
     self.report('Running Cp2MultistageWorkChain to move the structure')
     return ToContext(ms_wc=running)
    def prepare_process_inputs(self, inputs):
        """
        Prepare the inputs for submission for the given process, according
        to its spec. That is to say that when an input is found in the
        inputs that corresponds to an input port in the spec of the
        process that expects a Dict, yet the value in the inputs is a
        plain dictionary, the value will be wrapped in by the Dict
        class to create a valid input.

        :param process: sub class of Process for which to prepare the inputs dictionary
        :param inputs: a dictionary of inputs intended for submission of the process
        :return: a dictionary with all bare dictionaries wrapped in Dict if dictated by process spec

        """
        prepared_inputs = AttributeDict()
        process_spec = SiestaCalculation.spec()

        for key, value in six.iteritems(inputs):

            if key not in process_spec.inputs:
                continue

            if process_spec.inputs[key].valid_type == orm.Dict and isinstance(
                    value, dict):
                prepared_inputs[key] = orm.Dict(dict=value)
            else:
                prepared_inputs[key] = value

        return prepared_inputs
Beispiel #16
0
 def _prepare_process_inputs(self, inputs_dict):
     """Convert plain dictionary to Dict node"""
     out = AttributeDict(inputs_dict)
     for key in self._context_pain_dicts:
         if key in out:
             out[key] = Dict(dict=out[key])
     return out
def generate_inputs_spinorbit(generate_calc_job_node, fixture_localhost,
                              generate_structure, generate_kpoints_mesh):
    """Create the required inputs for the ``ProjwfcCalculation`` with lspinorb=.true."""
    entry_point_name = 'quantumespresso.pw'
    inputs = {
        'structure': generate_structure(),
        'kpoints': generate_kpoints_mesh(4)
    }

    parent_calcjob = generate_calc_job_node(entry_point_name,
                                            fixture_localhost,
                                            'default',
                                            inputs=inputs)
    params = orm.Dict(
        dict={
            'number_of_spin_components': 4,
            'non_colinear_calculation': True,
            'spin_orbit_calculation': True
        })
    params.add_incoming(parent_calcjob,
                        link_type=LinkType.CREATE,
                        link_label='output_parameters')
    params.store()
    inputs = {
        'parent_folder': parent_calcjob.outputs.remote_folder,
    }

    return AttributeDict(inputs)
    def prepare_relax(self):
        """
        Initialise inputs for Relax workchain
        """
        inputs = AttributeDict(self.exposed_inputs(FleurBaseRelaxWorkChain, namespace='relax'))
        inputs.metadata.label = 'Relax_symmetric_film'
        inputs.metadata.description = 'The Relax workchain relaxing film structure'

        if self.ctx.eos_needed or 'eos_output' in self.inputs:
            if not self.ctx.eos_needed:
                eos_output = self.inputs.eos_output
            else:
                try:
                    eos_output = self.ctx.eos_wc.outputs.output_eos_wc_para
                except NotExistent:
                    return self.ctx.ERROR_EOS_FAILED
            # print(eos_output.get_dict())
            scaling_param = eos_output.get_dict()['scaling_gs']

            out_create_structure = create_film_to_relax(wf_dict_node=Dict(dict=self.ctx.wf_dict),
                                                        scaling_parameter=Float(scaling_param),
                                                        suggestion_node=self.inputs.distance_suggestion)
            inputs.scf.structure = out_create_structure['structure']
            substrate = out_create_structure['substrate']
            # TODO: error handling might be needed
            self.ctx.substrate = substrate.uuid  # can not store aiida data nodes directly in ctx.

            if not isinstance(inputs.scf.structure, StructureData):
                return inputs, inputs.scf.structure

        return inputs, None
Beispiel #19
0
    def run_nscf(self):
        """Run an NSCF calculation, to generate eigenvalues with a denser k-point mesh.

        This calculation modifies the base scf calculation inputs by:

        - Using the parent folder from the scf calculation.
        - Replacing the kpoints, if an alternative is specified for nscf.
        - Changing ``SYSTEM.occupations`` to 'tetrahedra'.
        - Changing ``SYSTEM.nosym`` to True, to avoid generation of additional k-points in low symmetry cases.
        - Replace the ``pw.metadata.options``, if an alternative is specified for nscf.

        """
        inputs = AttributeDict(self.exposed_inputs(PwBaseWorkChain, 'nscf'))
        if 'scf' in self.inputs:
            inputs.pw.parent_folder = self.ctx.scf_parent_folder
        inputs.pw.structure = self.inputs.structure

        inputs.metadata.call_link_label = 'nscf'
        inputs = prepare_process_inputs(PwBaseWorkChain, inputs)

        if self.ctx.dry_run:
            return inputs

        future = self.submit(PwBaseWorkChain, **inputs)

        self.report(f'launching NSCF PwBaseWorkChain<{future.pk}>')

        return ToContext(workchain_nscf=future)
Beispiel #20
0
    def run_stage(self):
        """Check for restart, prepare input, submit and direct output to context."""

        # Update structure
        self.ctx.base_inp['cp2k']['structure'] = self.ctx.structure

        # Check if it is needed to restart the calculation and provide the parent folder and new structure
        if self.ctx.parent_calc_folder:
            self.ctx.base_inp['cp2k']['parent_calc_folder'] = self.ctx.parent_calc_folder
            self.ctx.cp2k_param['FORCE_EVAL']['DFT']['SCF']['SCF_GUESS'] = 'RESTART'
            self.ctx.cp2k_param['FORCE_EVAL']['DFT']['WFN_RESTART_FILE_NAME'] = './parent_calc/aiida-RESTART.wfn'
        else:
            self.ctx.cp2k_param['FORCE_EVAL']['DFT']['SCF']['SCF_GUESS'] = 'ATOMIC'

        # Overwrite the generated input with the custom cp2k/parameters
        if 'parameters' in self.exposed_inputs(Cp2kBaseWorkChain, 'cp2k_base')['cp2k']:
            merge_dict(
                self.ctx.cp2k_param,
                AttributeDict(self.exposed_inputs(Cp2kBaseWorkChain, 'cp2k_base')['cp2k']['parameters'].get_dict()))
        self.ctx.base_inp['cp2k']['parameters'] = Dict(dict=self.ctx.cp2k_param).store()

        # Update labels
        self.ctx.base_inp['metadata'].update({
            'label': '{}_{}'.format(self.ctx.stage_tag, self.ctx.settings_tag),
            'call_link_label': 'run_{}_{}'.format(self.ctx.stage_tag, self.ctx.settings_tag),
        })
        self.ctx.base_inp['cp2k']['metadata'].update(
            {'label': self.ctx.base_inp['cp2k']['parameters'].get_dict()['GLOBAL']['RUN_TYPE']})

        running_base = self.submit(Cp2kBaseWorkChain, **self.ctx.base_inp)
        self.report("submitted Cp2kBaseWorkChain for {}/{}".format(self.ctx.stage_tag, self.ctx.settings_tag))
        return ToContext(stages=append_(running_base))
Beispiel #21
0
 def run_wc2(self):
     """Run work chain 2."""
     wc2_inp = AttributeDict(self.exposed_inputs(IsothermCalcPEWorkChain))
     wc2_inp['structure'] = self.ctx.wc1.outputs.structure_ddec
     wc2_inp['metadata']['call_link_label'] = 'call_wc2'
     running = self.submit(IsothermCalcPEWorkChain, **wc2_inp)
     return ToContext(wc2=running)
Beispiel #22
0
 def generate_inputs():
     return AttributeDict({
         'structure':
         generate_structure(structure_id='water'),
         'parameters':
         orm.Dict(dict={}),
     })
Beispiel #23
0
def test_sequential_not_conv(aiida_profile, generate_workchain_seq_converger,
                             generate_wc_job_node, fixture_localhost):
    """
    We test here the SiestaSequentialConverger, in the case a Converger fails
    to converge.
    """

    from aiida.common.extendeddicts import AttributeDict

    process = generate_workchain_seq_converger()
    process.initialize()

    assert process.ctx.iteration_keys == ('iterate_over', )

    inputs = {"iterate_over": orm.Dict(dict={"s": [2, 2]})}
    convergerwc = generate_wc_job_node("siesta.converger", fixture_localhost,
                                       inputs)
    convergerwc.set_process_state(ProcessState.FINISHED)
    convergerwc.set_exit_status(ExitCode(0).status)
    out_conv = orm.Bool(False)
    out_conv.store()
    out_conv.add_incoming(convergerwc,
                          link_type=LinkType.RETURN,
                          link_label='converged')

    process.ctx.last_inputs = AttributeDict({"parameters": {"yo": "yo"}})

    process._analyze_process(convergerwc)

    assert process.ctx.already_converged == {}
    assert "parameters" in process.ctx.last_inputs
Beispiel #24
0
    def run_relax(self):
        """Run the `PwBaseWorkChain` to run a relax `PwCalculation`."""
        self.ctx.iteration += 1

        inputs = AttributeDict(
            self.exposed_inputs(PwBaseWorkChain, namespace='base'))
        inputs.pw.structure = self.ctx.current_structure
        inputs.pw.parameters = inputs.pw.parameters.get_dict()

        inputs.pw.parameters.setdefault('CELL', {})
        inputs.pw.parameters.setdefault('CONTROL', {})
        inputs.pw.parameters['CONTROL']['restart_mode'] = 'from_scratch'

        if 'relaxation_scheme' in self.inputs:
            if self.inputs.relaxation_scheme.value == 'relax':
                relax_type = RelaxType.ATOMS
            elif self.inputs.relaxation_scheme.value == 'vc-relax':
                relax_type = RelaxType.ATOMS_CELL
            else:
                raise ValueError(
                    'unsupported value for the `relaxation_scheme` input.')
        else:
            relax_type = RelaxType(self.inputs.relax_type)

        if relax_type in [
                RelaxType.NONE, RelaxType.VOLUME, RelaxType.SHAPE,
                RelaxType.CELL
        ]:
            inputs.pw.settings = self._fix_atomic_positions(
                inputs.pw.structure, inputs.pw.get('settings', None))

        if relax_type in [RelaxType.NONE, RelaxType.ATOMS]:
            inputs.pw.parameters['CONTROL']['calculation'] = 'relax'
            inputs.pw.parameters.pop('CELL', None)
        else:
            inputs.pw.parameters['CONTROL']['calculation'] = 'vc-relax'

        if relax_type in [RelaxType.VOLUME, RelaxType.ATOMS_VOLUME]:
            inputs.pw.parameters['CELL']['cell_dofree'] = 'volume'

        if relax_type in [RelaxType.SHAPE, RelaxType.ATOMS_SHAPE]:
            inputs.pw.parameters['CELL']['cell_dofree'] = 'shape'

        if relax_type in [RelaxType.CELL, RelaxType.ATOMS_CELL]:
            inputs.pw.parameters['CELL']['cell_dofree'] = 'all'

        # If one of the nested `PwBaseWorkChains` changed the number of bands, apply it here
        if self.ctx.current_number_of_bands is not None:
            inputs.pw.parameters.setdefault(
                'SYSTEM', {})['nbnd'] = self.ctx.current_number_of_bands

        # Set the `CALL` link label
        inputs.metadata.call_link_label = f'iteration_{self.ctx.iteration:02d}'

        inputs = prepare_process_inputs(PwBaseWorkChain, inputs)
        running = self.submit(PwBaseWorkChain, **inputs)

        self.report(f'launching PwBaseWorkChain<{running.pk}>')

        return ToContext(workchains=append_(running))
Beispiel #25
0
def generate_inputs_default():
    """Return only those inputs that the parser will expect to be there."""
    a = 5.43
    structure = orm.StructureData(
        cell=[[a / 2., a / 2., 0], [a / 2., 0, a / 2.], [0, a / 2., a / 2.]])
    structure.append_atom(position=(0., 0., 0.), symbols='Si', name='Si1')
    structure.append_atom(position=(a / 4., a / 4., a / 4.),
                          symbols='Si',
                          name='Si2')
    structure.store()
    parameters = {
        'CONTROL': {
            'calculation': 'scf'
        },
        'SYSTEM': {
            'ecutrho': 240.0,
            'ecutwfc': 30.0
        }
    }
    kpoints = orm.KpointsData()
    kpoints.set_cell_from_structure(structure)
    kpoints.set_kpoints_mesh_from_density(0.15)

    return AttributeDict({
        'structure': structure,
        'kpoints': kpoints,
        'parameters': orm.Dict(dict=parameters),
        'settings': orm.Dict()
    })
Beispiel #26
0
    def get_inputs_final_scf(self):
        """
        Initializes inputs for final scf on relaxed structure.
        """
        input_scf = AttributeDict(
            self.exposed_inputs(FleurScfWorkChain, namespace='scf'))
        input_final_scf = self.ctx.input_final_scf

        if 'wf_parameters' not in input_final_scf:
            # use parameters wf para of relax or defaults
            if 'wf_parameters' not in input_scf:
                scf_wf_dict = {}
            else:
                scf_wf_dict = input_scf.wf_parameters.get_dict()
                if 'inpxml_changes' in scf_wf_dict:
                    old_changes = scf_wf_dict['inpxml_changes']
                    new_changes = []
                    for change in old_changes:
                        if 'shift_value' not in change[0]:
                            new_changes.append(change)
                    scf_wf_dict['inpxml_changes'] = new_changes

            scf_wf_dict['mode'] = 'density'
            input_final_scf.wf_parameters = Dict(dict=scf_wf_dict)
        structure = self.ctx.final_structure
        formula = structure.get_formula()
        input_final_scf.structure = structure
        input_final_scf.fleur = input_scf.fleur
        input_final_scf.metadata.label = 'SCF_final_{}'.format(formula)
        input_final_scf.metadata.description = (
            'Final SCF workchain running on optimized structure {}, '
            'part of relax workchain'.format(formula))

        return input_final_scf
Beispiel #27
0
    def _wrap_bare_dict_inputs(self, port_namespace, inputs):
        """Wrap bare dictionaries in `inputs` in a `Dict` node if dictated by the corresponding inputs portnamespace.

        :param port_namespace: a `PortNamespace`
        :param inputs: a dictionary of inputs intended for submission of the process
        :return: an attribute dictionary with all bare dictionaries wrapped in `Dict` if dictated by the port namespace
        """
        from aiida.engine.processes import PortNamespace

        wrapped = {}

        for key, value in inputs.items():

            if key not in port_namespace:
                wrapped[key] = value
                continue

            port = port_namespace[key]

            if isinstance(port, PortNamespace):
                wrapped[key] = self._wrap_bare_dict_inputs(port, value)
            elif port.valid_type == orm.Dict and isinstance(value, dict):
                wrapped[key] = orm.Dict(dict=value)
            else:
                wrapped[key] = value

        return AttributeDict(wrapped)
Beispiel #28
0
    def get_pw_common_inputs(self):
        """Return the dictionary of inputs to be used as the basis for each `PwBaseWorkChain`."""
        protocol, protocol_modifiers = self._get_protocol()
        checked_pseudos = protocol.check_pseudos(
            modifier_name=protocol_modifiers.get('pseudo', None),
            pseudo_data=protocol_modifiers.get('pseudo_data', None))
        known_pseudos = checked_pseudos['found']

        inputs = AttributeDict({
            'pw': {
                'code':
                self.inputs.code.pw,
                'pseudos':
                get_pseudos_from_dict(self.ctx.current_structure,
                                      known_pseudos),
                'parameters':
                self.ctx.scf_parameters,
                'metadata': {},
            }
        })

        if 'options' in self.inputs:
            inputs.pw.metadata.options = self.inputs.options.get_dict()
        else:
            inputs.pw.metadata.options = self.get_default_options(
                with_mpi=True)

        return inputs
Beispiel #29
0
    def run_cp2kddec(self):
        """Pass the Cp2kMultistageWorkChain outputs as inputs for
        Cp2kDdecWorkChain: cp2k_base (metadata), cp2k_params, structure and WFN.
        """
        cp2k_ddec_inputs = AttributeDict(self.exposed_inputs(Cp2kDdecWorkChain))
        cp2k_ddec_inputs['cp2k_base'] = self.exposed_inputs(Cp2kMultistageWorkChain)['cp2k_base']
        cp2k_params_modify = Dict(
            dict={
                'FORCE_EVAL': {
                    'DFT': {
                        'WFN_RESTART_FILE_NAME': './parent_calc/aiida-RESTART.wfn',
                        'SCF': {
                            'SCF_GUESS': 'RESTART'
                        }
                    }
                }
            })
        cp2k_params = aiida_dict_merge(self.ctx.ms_wc.outputs.last_input_parameters, cp2k_params_modify)
        cp2k_ddec_inputs['cp2k_base']['cp2k']['parameters'] = cp2k_params

        if 'output_structure' in self.ctx.ms_wc.outputs:
            cp2k_ddec_inputs['cp2k_base']['cp2k']['structure'] = self.ctx.ms_wc.outputs.output_structure
        else:  # no output structure from a CP2K ENERGY calculation, use the input one.
            inp_structure = self.exposed_inputs(Cp2kMultistageWorkChain)['structure']
            cp2k_ddec_inputs['cp2k_base']['cp2k']['structure'] = inp_structure
        cp2k_ddec_inputs['cp2k_base']['cp2k']['parent_calc_folder'] = self.ctx.ms_wc.outputs.remote_folder
        cp2k_ddec_inputs['metadata']['call_link_label'] = 'call_cp2kddec'

        running = self.submit(Cp2kDdecWorkChain, **cp2k_ddec_inputs)
        return ToContext(cp2k_ddec_wc=running)
Beispiel #30
0
    def setup(self):
        """Initialize internal parameters"""
        self.ctx.iteration = 0
        self.ctx.is_converged = 0
        self.ctx.current_cell_volume = None
        self.ctx.current_structure = self.inputs.structure
        # A dictionary used to update the default inputs
        self.ctx.calc_update = {}  # Update to the calc namespace
        self.ctx.base_update = {}  # Update to the baes namespace
        self.ctx.inputs = AttributeDict(self.inputs)

        relax_options = self.inputs.get('relax_options', None)
        if relax_options is None:
            relax_options = {}
        else:
            relax_options = self.inputs.relax_options.get_dict()

        self.ctx.max_meta_iterations = relax_options.pop(
            'max_meta_iterations', self._max_meta_iterations)
        restart_mode = relax_options.pop('restart_mode', 'reuse')
        assert restart_mode in [
            "reuse", "continuation", "structure"
        ], "Invalid restart mode: {}".format(restart_mode)
        self.ctx.bypass_relax = relax_options.pop('bypass', False)
        self.ctx.restart_mode = restart_mode
        self.ctx.relax_options = relax_options