Beispiel #1
0
def Distribute(req, prop):
    """
    After the retrieval of the structure,
    we proceed with the distribution of the task
    according to the requested data
    : input req workfunction node
    : prop property to calculate
    """

    if prop == 'band_gap':
        # submit a bandgap workchain
        xx = WorkflowFactory('ext_aiida.BandGap')
        calcspecs = req.inputs.predefined['aiida']
        structure = req.outputs.structure
        pwcode = calcspecs['qe']
        code = load_node(pwcode)
        upfamily = calcspecs['upf']
        wf = submit(xx, structure=structure, code=code)  # aiida pk  # code pk
    if prop == 'band_structure':
        xx = WorkflowFactory('quantumespresso.pw.band_structure')
        calcspecs = req.inputs.predefined['aiida']
        structure = req.outputs.structure
        pwcode = calcspecs['qe']
        code = load_node(pwcode)
        print(('picc {}'.format(code)))
        # upfamily = calcspecs['upf']
        wf = submit(xx, structure=structure, code=code)  # aiida pk  # code pk

    return wf
Beispiel #2
0
def test_generate_inputs(castep_code, nacl, si):  # pylint: disable=invalid-name
    """
    Test for the generator
    """
    protocol = CastepRelaxInputGenerator(process_class=CastepRelaxWorkChain).get_protocol('moderate')
    override = {'base': {'metadata': {'label': 'test'}, 'calc': {}}}

    output = generate_inputs(WorkflowFactory('castep.relax'), copy.deepcopy(protocol), castep_code, si, override)
    assert output['calc']['parameters']['basis_precision'] == 'fine'
    assert 'structure' not in output['calc']

    output = generate_inputs(WorkflowFactory('castep.base'), copy.deepcopy(protocol), castep_code, si, override)
    assert output['calc']['parameters']['PARAM']['basis_precision'] == 'fine'
    assert 'structure' in output['calc']
def search_pk(uuid):
    """uuid can be pk."""
    IterHarmonicApprox = WorkflowFactory('phonopy.iter_ha')
    qb = QueryBuilder()
    qb.append(IterHarmonicApprox,
              tag='iter_ha',
              filters={'uuid': {
                  '==': uuid
              }})
    PhonopyWorkChain = WorkflowFactory('phonopy.phonopy')
    qb.append(PhonopyWorkChain, with_incoming='iter_ha')
    qb.order_by({PhonopyWorkChain: {'ctime': 'asc'}})
    pks = [n[0].pk for n in qb.all() if n[0].is_finished_ok]

    return pks
Beispiel #4
0
def main(code_string, datafiles, parameters):
    """Main method to setup the calculation."""

    # First, we need to fetch the AiiDA datatypes which will
    # house the inputs to our calculation
    dict_data = DataFactory('dict')

    # Then, we set the workchain we would like to call
    workchain = WorkflowFactory('logger.gc_example')

    # Set inputs for the following WorkChain execution
    inputs = AttributeDict()
    # inputs.metadata = {'options': {'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1},
    #                                'parser_name': 'logger',
    #                                'withmpi': False,
    #                                'output_filename': 'logger.out'}}
    # Set code
    inputs.code = Code.get_from_string(code_string)
    # Set datafiles
    inputs.datafiles = datafiles
    # Set parameters
    inputs.parameters = dict_data(dict=parameters)
    # Set workchain related inputs, in this case, give more explicit output to report
    inputs.verbose = Bool(True)
    # Submit the requested workchain with the supplied inputs
    run(workchain, **inputs)
Beispiel #5
0
def launch_aiida_bulk_modulus(structure,
                              code_string,
                              resources,
                              label="AlN VASP relax calculation"):
    incar_dict = {
        'PREC': 'Accurate',
        'EDIFF': 1e-8,
        'NELMIN': 5,
        'NELM': 100,
        'ENCUT': 500,
        'IALGO': 38,
        'ISMEAR': 0,
        'SIGMA': 0.01,
        'GGA': 'PS',
        'LREAL': False,
        'LCHARG': False,
        'LWAVE': False,
    }

    kpoints = KpointsData()
    kpoints.set_kpoints_mesh([6, 6, 4], offset=[0, 0, 0.5])

    options = {'resources': resources, 'max_wallclock_seconds': 3600 * 10}

    potential_family = 'PBE.54'
    potential_mapping = {'Al': 'Al', 'N': 'N'}

    parser_settings = {
        'add_energies': True,
        'add_forces': True,
        'add_stress': True
    }

    code = Code.get_from_string(code_string)
    Workflow = WorkflowFactory('vasp_bm.bulkmodulus')
    builder = Workflow.get_builder()
    builder.code = code
    builder.parameters = Dict(dict=incar_dict)
    builder.structure = structure
    builder.settings = Dict(dict={'parser_settings': parser_settings})
    builder.potential_family = Str(potential_family)
    builder.potential_mapping = Dict(dict=potential_mapping)
    builder.kpoints = kpoints
    builder.options = Dict(dict=options)
    builder.metadata.label = label
    builder.metadata.description = label
    builder.clean_workdir = Bool(False)
    builder.relax = Bool(True)
    builder.force_cutoff = Float(1e-8)
    builder.steps = Int(10)
    builder.positions = Bool(True)
    builder.shape = Bool(True)
    builder.volume = Bool(True)
    builder.convergence_on = Bool(True)
    builder.convergence_volume = Float(1e-8)
    builder.convergence_max_iterations = Int(2)
    builder.verbose = Bool(True)

    node = submit(builder)
    return node
Beispiel #6
0
def primitive_structure_from_cif(cif, parse_engine, symprec, site_tolerance):
    """Attempt to parse the given `CifData` and create a `StructureData` from it.

    First the raw CIF file is parsed with the given `parse_engine`. The resulting `StructureData` is then passed through
    SeeKpath to try and get the primitive cell. If that is successful, important structural parameters as determined by
    SeeKpath will be set as extras on the structure node which is then returned as output.

    :param cif: the `CifData` node
    :param parse_engine: the parsing engine, supported libraries 'ase' and 'pymatgen'
    :param symprec: a `Float` node with symmetry precision for determining primitive cell in SeeKpath
    :param site_tolerance: a `Float` node with the fractional coordinate distance tolerance for finding overlapping
        sites. This will only be used if the parse_engine is pymatgen
    :return: the primitive `StructureData` as determined by SeeKpath
    """
    CifCleanWorkChain = WorkflowFactory('codtools.cif_clean')  # pylint: disable=invalid-name

    try:
        structure = cif.get_structure(converter=parse_engine.value,
                                      site_tolerance=site_tolerance.value,
                                      store=False)
    except exceptions.UnsupportedSpeciesError:
        return CifCleanWorkChain.exit_codes.ERROR_CIF_HAS_UNKNOWN_SPECIES
    except InvalidOccupationsError:
        return CifCleanWorkChain.exit_codes.ERROR_CIF_HAS_INVALID_OCCUPANCIES
    except Exception:  # pylint: disable=broad-except
        return CifCleanWorkChain.exit_codes.ERROR_CIF_STRUCTURE_PARSING_FAILED

    try:
        seekpath_results = get_kpoints_path(structure, symprec=symprec)
    except ValueError:
        return CifCleanWorkChain.exit_codes.ERROR_SEEKPATH_INCONSISTENT_SYMMETRY
    except SymmetryDetectionError:
        return CifCleanWorkChain.exit_codes.ERROR_SEEKPATH_SYMMETRY_DETECTION_FAILED

    # Store important information that should be easily queryable as attributes in the StructureData
    parameters = seekpath_results['parameters'].get_dict()
    structure = seekpath_results['primitive_structure']

    # Store the formula as a string, in both hill as well as hill-compact notation, so it can be easily queried for
    extras = {
        'formula_hill':
        structure.get_formula(mode='hill'),
        'formula_hill_compact':
        structure.get_formula(mode='hill_compact'),
        'chemical_system':
        '-{}-'.format('-'.join(sorted(structure.get_symbols_set()))),
    }

    for key in [
            'spacegroup_international', 'spacegroup_number', 'bravais_lattice',
            'bravais_lattice_extended'
    ]:
        try:
            extras[key] = parameters[key]
        except KeyError:
            pass

    structure.set_extra_many(extras)

    return structure
Beispiel #7
0
def launch_phono3py(cutoff_energy=350, is_nac=False):
    """Launch calculation."""
    structure, forces_config, nac_config, phonon_settings = get_settings(
        cutoff_energy, is_nac)
    Phono3pyWorkChain = WorkflowFactory("phonopy.phono3py")
    builder = Phono3pyWorkChain.get_builder()
    builder.structure = structure
    builder.calculator_settings = Dict(dict={
        "forces": forces_config,
        "nac": nac_config
    })
    builder.run_phono3py = Bool(False)
    builder.remote_phono3py = Bool(False)
    builder.code_string = Str("phonopy@nancy")
    builder.phonon_settings = Dict(dict=phonon_settings)
    builder.symmetry_tolerance = Float(1e-5)
    builder.options = Dict(dict=forces_config["options"])
    dim = phonon_settings["supercell_matrix"]
    kpoints_mesh = forces_config["kpoints_mesh"]
    label = "ZnTe phono3py %dx%dx%d kpt %dx%dx%d PBEsol %d eV" % (
        tuple(dim) + tuple(kpoints_mesh) + (cutoff_energy, ))
    builder.metadata.label = label
    builder.metadata.description = label

    future = submit(builder)
    print(label)
    print(future)
    print("Running workchain with pk={}".format(future.pk))
Beispiel #8
0
def launch_mae(structure, inpgen, calc_parameters, fleurinp, fleur,
               wf_parameters, scf_parameters, parent_folder, daemon, settings,
               option_node):
    """
    Launch a mae workchain
    """
    workchain_class = WorkflowFactory('fleur.mae')
    inputs = {
        'scf': {
            'wf_parameters': scf_parameters,
            'structure': structure,
            'calc_parameters': calc_parameters,
            'settings': settings,
            'inpgen': inpgen,
            'fleur': fleur
        },
        'wf_parameters': wf_parameters,
        'fleurinp': fleurinp,
        'remote': parent_folder,
        'fleur': fleur,
        'options': option_node
    }

    inputs = clean_nones(inputs)
    builder = workchain_class.get_builder()
    builder.update(inputs)
    launch_process(builder, daemon)
def get_parameters(previous_workchain):
    """
    Extracts the FLAPW parameter for inpgen from a given previous workchain
    It finds the last Fleur Calcjob or Inpgen calc and extracts the
    parameters from its fleurinpdata node
    :param previous_workchain: Some workchain which contains at least one
                               Fleur CalcJob or Inpgen CalcJob.
    :return: Dict node of parameters ready to use, or None
    """
    from aiida.plugins import WorkflowFactory
    from aiida.common.exceptions import NotExistent
    from aiida_fleur.tools.common_fleur_wf import find_last_submitted_workchain

    fleur_scf_wc = WorkflowFactory('fleur.scf')
    # Find Fleurinp
    try:
        last_base_relax = find_last_submitted_workchain(previous_workchain)
        last_relax = find_last_submitted_workchain(
            orm.load_node(last_base_relax))
        last_scf = find_last_submitted_workchain(orm.load_node(last_relax))
        last_scf = orm.load_node(last_scf)
    except NotExistent:
        # something went wrong in the previous workchain run
        #.. we just continue without previous parameters but defaults.
        return None
    if last_scf.process_class is fleur_scf_wc:
        fleurinp = last_scf.outputs.fleurinp
    else:
        return None
    # Be aware that this parameter node is incomplete. LOs and econfig is
    # currently missing for example.
    parameters = fleurinp.get_parameterdata_ncf(
    )  # This is not a calcfunction!
    return parameters
Beispiel #10
0
 def run_two_volumes(self):
     self.report("run_two_volumes")
     for strain, future_name in zip((0.99, 1.01), ('minus', 'plus')):
         Workflow = WorkflowFactory('vasp.relax')
         builder = Workflow.get_builder()
         for key in self.ctx.inputs:
             builder[key] = self.ctx.inputs[key]
         if 'label' in self.ctx.inputs.metadata:
             label = self.ctx.inputs.metadata['label'] + " " + future_name
             builder.metadata['label'] = label
         if 'description' in self.ctx.inputs.metadata:
             description = self.ctx.inputs.metadata['description']
             description += " " + future_name
             builder.metadata['description'] = description
         builder.structure = self.ctx['structure_%s' % future_name]
         relax = AttributeDict()
         relax.perform = Bool(True)
         relax.force_cutoff = Float(1e-8)
         relax.positions = Bool(True)
         relax.shape = Bool(True)
         relax.volume = Bool(False)
         relax.convergence_on = Bool(False)
         builder.relax = relax
         future = self.submit(builder)
         self.to_context(**{future_name: future})
Beispiel #11
0
def test_eosworkchain_inpgen(aiida_profile, fixture_code, generate_structure):
    """Test the validation of subclasses of `InputsGenerator`."""

    from aiida_siesta.utils.protocols_system.input_generators import EosWorkChainInputGenerator

    inp_gen = EosWorkChainInputGenerator(WorkflowFactory("siesta.eos"))
    structure = generate_structure()
    protocol = inp_gen.get_default_protocol_name()
    code = fixture_code("siesta.siesta")
    code.store()
    calc_engines = {
        "siesta": {
            'code': code.uuid,
            'options': {
                "resources": {
                    "num_mpiprocs_per_machine": 1
                },
                "max_wallclock_seconds": 360
            }
        }
    }

    build = inp_gen.get_filled_builder(structure,
                                       calc_engines,
                                       protocol,
                                       relaxation_type="atoms_only")

    assert "parameters" in build
Beispiel #12
0
class QuantumEspressoCommonRelaxWorkChain(CommonRelaxWorkChain):
    """Implementation of `aiida_common_workflows.common.relax.workchain.CommonRelaxWorkChain` for Quantum ESPRESSO."""

    _process_class = WorkflowFactory('quantumespresso.pw.relax')
    _generator_class = QuantumEspressoCommonRelaxInputGenerator

    def convert_outputs(self):
        """Convert the outputs of the sub workchain to the common output specification."""
        outputs = self.ctx.workchain.outputs

        result = extract_from_parameters(outputs.output_parameters).values()
        forces, stress = extract_from_trajectory(
            outputs.output_trajectory).values()

        try:
            total_energy, total_magnetization = result
        except ValueError:
            total_energy, total_magnetization = list(result)[0], None

        if 'output_structure' in outputs:
            self.out('relaxed_structure', outputs.output_structure)

        if total_magnetization is not None:
            self.out('total_magnetization', total_magnetization)

        self.out('total_energy', total_energy)
        self.out('forces', forces)
        self.out('stress', stress)
Beispiel #13
0
def launch_workflow(code, calculation, kpoints_mesh, clean_workdir,
                    max_num_machines, max_wallclock_seconds, with_mpi, daemon):
    """Run the `PhBaseWorkChain` for a previously completed `PwCalculation`."""
    from aiida.orm import Bool, Dict
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    inputs = {
        'ph': {
            'code': code,
            'qpoints': kpoints_mesh,
            'parent_folder': calculation.outputs.remote_folder,
            'parameters': Dict(dict={'INPUTPH': {}}),
            'metadata': {
                'options':
                get_default_options(max_num_machines, max_wallclock_seconds,
                                    with_mpi),
            }
        }
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    launch.launch_process(WorkflowFactory('quantumespresso.ph.base'), daemon,
                          **inputs)
Beispiel #14
0
def launch_workflow(code, datum, kpoints_mesh, clean_workdir, max_num_machines,
                    max_wallclock_seconds, with_mpi, daemon):
    """Run the `MatdynBaseWorkChain` for a previously completed `Q2rCalculation`."""
    from aiida.orm import Bool
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    inputs = {
        'matdyn': {
            'code': code,
            'kpoints': kpoints_mesh,
            'force_constants': datum,
            'metadata': {
                'options':
                get_default_options(max_num_machines, max_wallclock_seconds,
                                    with_mpi),
            }
        }
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    launch.launch_process(WorkflowFactory('quantumespresso.matdyn.base'),
                          daemon, **inputs)
Beispiel #15
0
def launch_workflow(code, calculation, clean_workdir, max_num_machines,
                    max_wallclock_seconds, with_mpi, daemon):
    """Run the `Q2rBaseWorkChain` for a previously completed `PhCalculation`."""
    from aiida.orm import Bool
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    expected_process_type = 'aiida.calculations:quantumespresso.ph'
    if calculation.process_type != expected_process_type:
        raise click.BadParameter(
            'The input calculation node has a process_type: {}; should be {}'.
            format(calculation.process_type, expected_process_type))

    inputs = {
        'q2r': {
            'code': code,
            'parent_folder': calculation.outputs.remote_folder,
            'metadata': {
                'options':
                get_default_options(max_num_machines, max_wallclock_seconds,
                                    with_mpi),
            }
        }
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    launch.launch_process(WorkflowFactory('quantumespresso.q2r.base'), daemon,
                          **inputs)
Beispiel #16
0
    def run_scf(self):
        """
        Run the SCF calculation
        """

        base_work = WorkflowFactory(self._base_wk_string)
        inputs = AttributeDict(self.exposed_inputs(base_work, namespace='scf'))
        inputs.metadata.call_link_label = 'scf'
        inputs.calc.structure = self.ctx.current_structure

        # Ensure that writing the check/castep_bin
        param_dict = inputs.calc.parameters.get_dict()
        if 'PARAM' in param_dict:
            ensure_checkpoint(param_dict['PARAM'])
        else:
            ensure_checkpoint(param_dict)

        # Update if changes are made
        if param_dict != inputs.calc.parameters.get_dict():
            self.report(
                "Updated the PARAM to make sure castep_bin file will be written"
            )
            inputs.calc.parameters = orm.Dict(dict=param_dict)

        running = self.submit(base_work, **inputs)
        self.report('Running SCF calculation {}'.format(running))
        self.to_context(workchain_scf=running)
Beispiel #17
0
def test_relax_generator(castep_code, nacl, with_otfg):
    """Test for generating the relax namespace"""
    CastepCommonRelaxWorkChain = WorkflowFactory('castep.relax')  # pylint: disable=invalid-name
    protocol = CastepCommonRelaxInputGenerator(
        process_class=CastepCommonRelaxWorkChain).get_protocol(
            'moderate')['relax']
    override = {
        'base': {
            'metadata': {
                'label': 'test'
            },
            'calc': {
                'parameters': {
                    'cut_off_energy': 220
                },
                'metadata': {
                    'label': 'test'
                }
            }
        }
    }
    otfg = OTFGGroup.objects.get(label='C19')
    generated = generate_inputs_relax(protocol, castep_code, nacl, otfg,
                                      override)

    assert 'structure' in generated
    paramd = generated['calc']['parameters']
    assert 'basis_precision' not in paramd
    assert 'kpoints_spacing' in generated['base']
    assert 'kpoints' not in generated['calc']

    assert generated['calc']['metadata']['label'] == 'test'
    assert generated['base']['metadata']['label'] == 'test'
def find_latest_uuid():
    IterHarmonicApprox = WorkflowFactory('phonopy.iter_ha')
    qb = QueryBuilder()
    qb.append(IterHarmonicApprox)
    qb.order_by({IterHarmonicApprox: {'ctime': 'desc'}})
    qb.first()
    return qb.first()[0].uuid
Beispiel #19
0
class FleurCommonRelaxWorkChain(CommonRelaxWorkChain):
    """Implementation of `aiida_common_workflows.common.relax.workchain.CommonRelaxWorkChain` for FLEUR."""

    _process_class = WorkflowFactory('fleur.base_relax')
    _generator_class = FleurCommonRelaxInputGenerator

    def convert_outputs(self):
        """Convert the outputs of the sub workchain to the common output specification."""

        outputs = self.ctx.workchain.outputs

        if 'optimized_structure' in outputs:
            self.out('relaxed_structure', outputs.optimized_structure)

        output_parameters = outputs.output_relax_wc_para
        out_para_dict = output_parameters.get_dict()
        if 'total_magnetic_moment_cell' in out_para_dict:
            if out_para_dict.get('total_magnetic_moment_cell',
                                 None) is not None:
                self.out('total_magnetization',
                         get_total_magnetization(output_parameters))
        self.out('total_energy',
                 get_total_energy(outputs.output_relax_wc_para))
        self.out('forces',
                 get_forces_from_trajectory(outputs.output_relax_wc_para))
Beispiel #20
0
    def post(self, prop):
        """
        Route to manage the requests from ext
        Access is through a JSON file passed to the serveri
        containing the input required for calculation
        Data is handled and responded accordingly

        :input prop is the quantity we required for calculation
        """
        # workfunction to process the incoming json dictionary
        # here it needs a validation by
        cao = 'cao\n'
        print((
            cao,
            cao,
            'data',
            request.data,
            cao,
            'args',
            request.args.getall(),
            cao,
            'form',
            request.form,
            cao,
            'files',
            request.files,
            cao,
            request.values,
            cao,
            request.base_url,
            cao,
            request.date,
            cao,
        ))
        xx = WorkflowFactory('ext_aiida.ProcessInputs')
        wf = submit(
            xx,
            request=Dict(dict=request.get_json()),
            predefined=Dict(dict=CALCULATION_OPTIONS),
            property=Str(prop),
        )
        sleep(2)
        if not wf.is_finished_ok:
            msg = 'Structure retrieval error. See node uuid={} for more specific report'.format(
                wf.uuid)
            return {
                'error': wf.exit_message,
                'message': msg,
                'stored_request': wf.inputs.request.get_dict(),
            }
        else:
            exwf = Distribute(wf, prop)
            msg = ' Successful retrieval of structure, {}, workflow at uuid {}'.format(
                exwf.inputs.structure.pk, exwf.pk)
            return {
                'error': wf.exit_message,
                'message': msg,
                'stored_request': wf.inputs.request.get_dict(),
            }
Beispiel #21
0
def launch_workflow(code, structure, pseudo_family, kpoints_distance, ecutwfc,
                    ecutrho, hubbard_u, hubbard_v, hubbard_file_pk,
                    starting_magnetization, smearing,
                    automatic_parallelization, clean_workdir, max_num_machines,
                    max_wallclock_seconds, with_mpi, daemon):
    """Run a `PwBaseWorkChain`."""
    from aiida.orm import Bool, Float, Str, Dict
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options, get_automatic_parallelization_options

    builder = WorkflowFactory('quantumespresso.pw.base').get_builder()

    parameters = {
        'SYSTEM': {
            'ecutwfc': ecutwfc,
            'ecutrho': ecutrho,
        },
    }

    try:
        hubbard_file = validate.validate_hubbard_parameters(
            structure, parameters, hubbard_u, hubbard_v, hubbard_file_pk)
    except ValueError as exception:
        raise click.BadParameter(str(exception))

    try:
        validate.validate_starting_magnetization(structure, parameters,
                                                 starting_magnetization)
    except ValueError as exception:
        raise click.BadParameter(str(exception))

    try:
        validate.validate_smearing(parameters, smearing)
    except ValueError as exception:
        raise click.BadParameter(str(exception))

    builder.pw.code = code
    builder.pw.structure = structure
    builder.pw.parameters = Dict(dict=parameters)
    builder.pseudo_family = Str(pseudo_family)
    builder.kpoints_distance = Float(kpoints_distance)

    if hubbard_file:
        builder.hubbard_file = hubbard_file

    if automatic_parallelization:
        automatic_parallelization = get_automatic_parallelization_options(
            max_num_machines, max_wallclock_seconds)
        builder.automatic_parallelization = Dict(
            dict=automatic_parallelization)
    else:
        builder.pw.metadata.options = get_default_options(
            max_num_machines, max_wallclock_seconds, with_mpi)

    if clean_workdir:
        builder.clean_workdir = Bool(True)

    launch.launch_process(builder, daemon)
Beispiel #22
0
def export_isotherm(
    sample,
    node,
    file_name: str = None,
    aiidalab_instance: str = "unknown",
):
    """Export Isotherm object."""
    source_info = {
        "uuid": node.uuid,
        "url": aiidalab_instance,
        "name": "Isotherm simulated using the isotherm app on AiiDAlab",
    }

    # Workaround till the Isotherm object is not ready.
    isotherm_wf = WorkflowFactory("lsmo.isotherm")
    query = (orm.QueryBuilder().append(orm.Dict,
                                       filters={
                                           "uuid": node.uuid
                                       },
                                       tag="isotherm_data").append(
                                           isotherm_wf,
                                           with_outgoing="isotherm_data",
                                           tag="isotherm_wf").append(
                                               orm.Str,
                                               with_outgoing="isotherm_wf",
                                               project="attributes.value"))
    adsorptive = query.all(flat=True)
    adsorptive = adsorptive[0] if adsorptive else None

    meta = {
        "adsorptive": adsorptive,
        "temperature": node["temperature"],
        "method": "GCMC",
    }
    jcamp = from_dict(
        {
            "x": {
                "data": node["isotherm"]["pressure"],
                "unit": node["isotherm"]["pressure_unit"],
                "type": "INDEPENDENT",
            },
            "y": {
                "data": node["isotherm"]["loading_absolute_average"],
                "unit": node["isotherm"]["loading_absolute_unit"],
                "type": "DEPENDENT",
            },
        },
        data_type="Adsorption Isotherm",
        meta=meta,
    )
    sample.put_data(
        data_type="isotherm",
        file_name=f"{node.uuid}.jcamp"
        if file_name is None else f"{file_name}.jcamp",
        file_content=jcamp,
        metadata=meta,
        source_info=source_info,
    )
Beispiel #23
0
 def _set_aiida_twinboundary_relax(self):
     """
     Set twinboundary relax pk.
     """
     tb_rlx_wf = WorkflowFactory('twinpy.twinboundary_relax')
     tb_rlx_struct_pk = self._node.inputs.twinboundary_relax_structure.pk
     tb_rlx = get_create_node(tb_rlx_struct_pk, tb_rlx_wf)
     self._aiida_twinboundary_relax \
             = AiidaTwinBoudnaryRelaxWorkChain(tb_rlx)
def test_bad_settings(db_test_app):
    """test bad settings dict """
    with pytest.raises(ValidationError):
        results, node = run_get_node(
            WorkflowFactory("crystal17.sym3d"),
            settings=DataFactory("dict")(dict={
                "a": 1
            }),
        )
def test_no_structure(db_test_app):
    """test no StructureData or CifData """
    wflow_cls = WorkflowFactory("crystal17.sym3d")
    results, node = run_get_node(wflow_cls,
                                 settings=DataFactory("dict")(dict={
                                     "symprec": 0.01
                                 }))
    assert node.is_failed, node.exit_status
    assert node.exit_status == wflow_cls.exit_codes.ERROR_INVALID_INPUT_RESOURCES.status
Beispiel #26
0
def launch_fleur(fleurinp, fleur, parent_folder, settings, daemon,
                 max_num_machines, max_wallclock_seconds,
                 num_mpiprocs_per_machine, option_node, with_mpi, launch_base):
    """
    Launch a base_fleur workchain.
    If launch_base is False launch a single fleur calcjob instead.

    """

    process_class = CalculationFactory('fleur.fleur')
    workchain_class = WorkflowFactory('fleur.base')

    inputs = {
        'code': fleur,
        'fleurinpdata': fleurinp,
        'parent_folder': parent_folder,
        'settings': settings,
        'metadata': {
            'options': {
                'withmpi': with_mpi,
                'max_wallclock_seconds': max_wallclock_seconds,
                'resources': {
                    'num_machines': max_num_machines,
                    'num_mpiprocs_per_machine': num_mpiprocs_per_machine,
                }
            }
        }
    }

    if not launch_base:
        inputs = clean_nones(inputs)
        builder = process_class.get_builder()
        builder.update(inputs)
    else:
        if option_node is None:
            option_node = Dict(
                dict={
                    'withmpi': with_mpi,
                    'max_wallclock_seconds': max_wallclock_seconds,
                    'resources': {
                        'num_machines': max_num_machines,
                        'num_mpiprocs_per_machine': num_mpiprocs_per_machine
                    }
                })

        inputs_base = {
            'code': fleur,
            'fleurinpdata': fleurinp,
            'parent_folder': parent_folder,
            'settings': settings,
            'options': option_node
        }
        inputs_base = clean_nones(inputs_base)
        builder = workchain_class.get_builder()
        builder.update(**inputs_base)

    launch_process(builder, daemon)
Beispiel #27
0
def validate_sub_process_class(value, _):
    """Validate the sub process class."""
    try:
        process_class = WorkflowFactory(value)
    except exceptions.EntryPointError:
        return f'`{value}` is not a valid or registered workflow entry point.'

    if not inspect.isclass(process_class) or not issubclass(process_class, CommonRelaxWorkChain):
        return f'`{value}` is not a subclass of the `CommonRelaxWorkChain` common workflow.'
Beispiel #28
0
def main(code_string, incar, kmesh, structure, potential_family,
         potential_mapping, options):
    """Main method to setup the calculation."""

    # We set the workchain you would like to call
    workchain = WorkflowFactory('vasp.relax')

    # And finally, we declare the options, settings and input containers
    settings = AttributeDict()
    inputs = AttributeDict()

    # Organize settings
    settings.parser_settings = {}

    # Set inputs for the following WorkChain execution
    # Set code
    inputs.code = Code.get_from_string(code_string)
    # Set structure
    inputs.structure = structure
    # Set k-points grid density
    kpoints = DataFactory('array.kpoints')()
    kpoints.set_kpoints_mesh(kmesh)
    inputs.kpoints = kpoints
    # Set parameters
    inputs.parameters = DataFactory('dict')(dict=incar)
    # Set potentials and their mapping
    inputs.potential_family = DataFactory('str')(potential_family)
    inputs.potential_mapping = DataFactory('dict')(dict=potential_mapping)
    # Set options
    inputs.options = DataFactory('dict')(dict=options)
    # Set settings
    inputs.settings = DataFactory('dict')(dict=settings)
    # Set workchain related inputs, in this case, give more explicit output to report
    inputs.verbose = DataFactory('bool')(True)

    # Relaxation related parameters that is passed to the relax workchain
    relax = AttributeDict()
    # Turn on relaxation
    relax.perform = DataFactory('bool')(True)
    # Select relaxation algorithm
    relax.algo = DataFactory('str')('cg')
    # Set force cutoff limit (EDIFFG, but no sign needed)
    relax.force_cutoff = DataFactory('float')(0.01)
    # Turn on relaxation of positions (strictly not needed as the default is on)
    # The three next parameters correspond to the well known ISIF=3 setting
    relax.positions = DataFactory('bool')(True)
    # Turn on relaxation of the cell shape (defaults to False)
    relax.shape = DataFactory('bool')(True)
    # Turn on relaxation of the volume (defaults to False)
    relax.volume = DataFactory('bool')(True)
    # Set maximum number of ionic steps
    relax.steps = DataFactory('int')(100)
    # Set the relaxation parameters on the inputs
    inputs.relax = relax
    # Submit the requested workchain with the supplied inputs
    submit(workchain, **inputs)
Beispiel #29
0
    def run_relax(self):
        """Run the relaxation"""
        relax_work = WorkflowFactory(self._relax_wk_string)
        inputs = self.exposed_inputs(relax_work, 'relax', agglomerate=True)
        inputs = AttributeDict(inputs)
        inputs.metadata.call_link_label = 'relax'
        inputs.structure = self.ctx.current_structure

        running = self.submit(relax_work, **inputs)
        return self.to_context(workchain_relax=running)
Beispiel #30
0
 def _set_relaxes(self):
     """
     Set relax in ShearWorkChain.
     """
     relax_wf = WorkflowFactory('vasp.relax')
     rlx_pks = get_called_nodes(pk=self._node.pk, called_node_type=relax_wf)
     self._relax_pks = rlx_pks
     self._relaxes = [
         AiidaRelaxWorkChain(node=load_node(pk)) for pk in self._relax_pks
     ]