コード例 #1
0
def launch_phono3py(cutoff_energy=350, is_nac=False):
    """Launch calculation."""
    structure, forces_config, nac_config, phonon_settings = get_settings(
        cutoff_energy, is_nac)
    Phono3pyWorkChain = WorkflowFactory("phonopy.phono3py")
    builder = Phono3pyWorkChain.get_builder()
    builder.structure = structure
    builder.calculator_settings = Dict(dict={
        "forces": forces_config,
        "nac": nac_config
    })
    builder.run_phono3py = Bool(False)
    builder.remote_phono3py = Bool(False)
    builder.code_string = Str("phonopy@nancy")
    builder.phonon_settings = Dict(dict=phonon_settings)
    builder.symmetry_tolerance = Float(1e-5)
    builder.options = Dict(dict=forces_config["options"])
    dim = phonon_settings["supercell_matrix"]
    kpoints_mesh = forces_config["kpoints_mesh"]
    label = "ZnTe phono3py %dx%dx%d kpt %dx%dx%d PBEsol %d eV" % (
        tuple(dim) + tuple(kpoints_mesh) + (cutoff_energy, ))
    builder.metadata.label = label
    builder.metadata.description = label

    future = submit(builder)
    print(label)
    print(future)
    print("Running workchain with pk={}".format(future.pk))
コード例 #2
0
ファイル: submit.py プロジェクト: lan496/aiida-vasp-bm
def launch_aiida_bulk_modulus(structure,
                              code_string,
                              resources,
                              label="AlN VASP relax calculation"):
    incar_dict = {
        'PREC': 'Accurate',
        'EDIFF': 1e-8,
        'NELMIN': 5,
        'NELM': 100,
        'ENCUT': 500,
        'IALGO': 38,
        'ISMEAR': 0,
        'SIGMA': 0.01,
        'GGA': 'PS',
        'LREAL': False,
        'LCHARG': False,
        'LWAVE': False,
    }

    kpoints = KpointsData()
    kpoints.set_kpoints_mesh([6, 6, 4], offset=[0, 0, 0.5])

    options = {'resources': resources, 'max_wallclock_seconds': 3600 * 10}

    potential_family = 'PBE.54'
    potential_mapping = {'Al': 'Al', 'N': 'N'}

    parser_settings = {
        'add_energies': True,
        'add_forces': True,
        'add_stress': True
    }

    code = Code.get_from_string(code_string)
    Workflow = WorkflowFactory('vasp_bm.bulkmodulus')
    builder = Workflow.get_builder()
    builder.code = code
    builder.parameters = Dict(dict=incar_dict)
    builder.structure = structure
    builder.settings = Dict(dict={'parser_settings': parser_settings})
    builder.potential_family = Str(potential_family)
    builder.potential_mapping = Dict(dict=potential_mapping)
    builder.kpoints = kpoints
    builder.options = Dict(dict=options)
    builder.metadata.label = label
    builder.metadata.description = label
    builder.clean_workdir = Bool(False)
    builder.relax = Bool(True)
    builder.force_cutoff = Float(1e-8)
    builder.steps = Int(10)
    builder.positions = Bool(True)
    builder.shape = Bool(True)
    builder.volume = Bool(True)
    builder.convergence_on = Bool(True)
    builder.convergence_volume = Float(1e-8)
    builder.convergence_max_iterations = Int(2)
    builder.verbose = Bool(True)

    node = submit(builder)
    return node
コード例 #3
0
 def run_two_volumes(self):
     self.report("run_two_volumes")
     for strain, future_name in zip((0.99, 1.01), ('minus', 'plus')):
         Workflow = WorkflowFactory('vasp.relax')
         builder = Workflow.get_builder()
         for key in self.ctx.inputs:
             builder[key] = self.ctx.inputs[key]
         if 'label' in self.ctx.inputs.metadata:
             label = self.ctx.inputs.metadata['label'] + " " + future_name
             builder.metadata['label'] = label
         if 'description' in self.ctx.inputs.metadata:
             description = self.ctx.inputs.metadata['description']
             description += " " + future_name
             builder.metadata['description'] = description
         builder.structure = self.ctx['structure_%s' % future_name]
         relax = AttributeDict()
         relax.perform = Bool(True)
         relax.force_cutoff = Float(1e-8)
         relax.positions = Bool(True)
         relax.shape = Bool(True)
         relax.volume = Bool(False)
         relax.convergence_on = Bool(False)
         builder.relax = relax
         future = self.submit(builder)
         self.to_context(**{future_name: future})
コード例 #4
0
def launch_mae(structure, inpgen, calc_parameters, fleurinp, fleur,
               wf_parameters, scf_parameters, parent_folder, daemon, settings,
               option_node):
    """
    Launch a mae workchain
    """
    workchain_class = WorkflowFactory('fleur.mae')
    inputs = {
        'scf': {
            'wf_parameters': scf_parameters,
            'structure': structure,
            'calc_parameters': calc_parameters,
            'settings': settings,
            'inpgen': inpgen,
            'fleur': fleur
        },
        'wf_parameters': wf_parameters,
        'fleurinp': fleurinp,
        'remote': parent_folder,
        'fleur': fleur,
        'options': option_node
    }

    inputs = clean_nones(inputs)
    builder = workchain_class.get_builder()
    builder.update(inputs)
    launch_process(builder, daemon)
コード例 #5
0
def Distribute(req, prop):
    """
    After the retrieval of the structure,
    we proceed with the distribution of the task
    according to the requested data
    : input req workfunction node
    : prop property to calculate
    """

    if prop == 'band_gap':
        # submit a bandgap workchain
        xx = WorkflowFactory('ext_aiida.BandGap')
        calcspecs = req.inputs.predefined['aiida']
        structure = req.outputs.structure
        pwcode = calcspecs['qe']
        code = load_node(pwcode)
        upfamily = calcspecs['upf']
        wf = submit(xx, structure=structure, code=code)  # aiida pk  # code pk
    if prop == 'band_structure':
        xx = WorkflowFactory('quantumespresso.pw.band_structure')
        calcspecs = req.inputs.predefined['aiida']
        structure = req.outputs.structure
        pwcode = calcspecs['qe']
        code = load_node(pwcode)
        print(('picc {}'.format(code)))
        # upfamily = calcspecs['upf']
        wf = submit(xx, structure=structure, code=code)  # aiida pk  # code pk

    return wf
コード例 #6
0
def launch_fleur(fleurinp, fleur, parent_folder, settings, daemon,
                 max_num_machines, max_wallclock_seconds,
                 num_mpiprocs_per_machine, option_node, with_mpi, launch_base):
    """
    Launch a base_fleur workchain.
    If launch_base is False launch a single fleur calcjob instead.

    """

    process_class = CalculationFactory('fleur.fleur')
    workchain_class = WorkflowFactory('fleur.base')

    inputs = {
        'code': fleur,
        'fleurinpdata': fleurinp,
        'parent_folder': parent_folder,
        'settings': settings,
        'metadata': {
            'options': {
                'withmpi': with_mpi,
                'max_wallclock_seconds': max_wallclock_seconds,
                'resources': {
                    'num_machines': max_num_machines,
                    'num_mpiprocs_per_machine': num_mpiprocs_per_machine,
                }
            }
        }
    }

    if not launch_base:
        inputs = clean_nones(inputs)
        builder = process_class.get_builder()
        builder.update(inputs)
    else:
        if option_node is None:
            option_node = Dict(
                dict={
                    'withmpi': with_mpi,
                    'max_wallclock_seconds': max_wallclock_seconds,
                    'resources': {
                        'num_machines': max_num_machines,
                        'num_mpiprocs_per_machine': num_mpiprocs_per_machine
                    }
                })

        inputs_base = {
            'code': fleur,
            'fleurinpdata': fleurinp,
            'parent_folder': parent_folder,
            'settings': settings,
            'options': option_node
        }
        inputs_base = clean_nones(inputs_base)
        builder = workchain_class.get_builder()
        builder.update(**inputs_base)

    launch_process(builder, daemon)
コード例 #7
0
    def submit(self, _=None):
        assert self.input_structure is not None

        builder = WorkflowFactory('quantumespresso.pw.relax').get_builder()
        builder.base.pw.code = self.code_group.selected_code
        builder.base.pw.parameters = load_default_parameters()
        builder.base.pw.metadata.options = self.options
        builder.base.kpoints_distance = Float(0.8)
        builder.base.pseudo_family = Str(self.pseudo_family_selection.value)
        builder.structure = self.input_structure

        self.process = submit(builder)
コード例 #8
0
    def get_sub_workchain_builder(self, scale_factor, previous_workchain=None):
        """Return the builder for the relax workchain."""
        structure = scale_structure(self.inputs.structure, scale_factor)
        process_class = WorkflowFactory(self.inputs.sub_process_class)

        builder = process_class.get_inputs_generator().get_builder(
            structure,
            previous_workchain=previous_workchain,
            **self.inputs.generator_inputs
        )
        builder._update(**self.inputs.get('sub_process', {}))  # pylint: disable=protected-access

        return builder
コード例 #9
0
def test_generate_inputs(castep_code, nacl, si):  # pylint: disable=invalid-name
    """
    Test for the generator
    """
    protocol = CastepRelaxInputGenerator(process_class=CastepRelaxWorkChain).get_protocol('moderate')
    override = {'base': {'metadata': {'label': 'test'}, 'calc': {}}}

    output = generate_inputs(WorkflowFactory('castep.relax'), copy.deepcopy(protocol), castep_code, si, override)
    assert output['calc']['parameters']['basis_precision'] == 'fine'
    assert 'structure' not in output['calc']

    output = generate_inputs(WorkflowFactory('castep.base'), copy.deepcopy(protocol), castep_code, si, override)
    assert output['calc']['parameters']['PARAM']['basis_precision'] == 'fine'
    assert 'structure' in output['calc']
コード例 #10
0
ファイル: bulkmodulus.py プロジェクト: lan496/aiida-vasp-bm
 def run_relax(self):
     self.report("run_relax")
     Workflow = WorkflowFactory('vasp.relax')
     builder = Workflow.get_builder()
     for key in self.ctx.inputs:
         builder[key] = self.ctx.inputs[key]
     if 'label' in self.ctx.inputs.metadata:
         label = self.ctx.inputs.metadata['label'] + " relax"
         builder.metadata['label'] = label
     if 'description' in self.ctx.inputs.metadata:
         description = self.ctx.inputs.metadata['description'] + " relax"
         builder.metadata['description'] = description
     future = self.submit(builder)
     self.to_context(**{'relax': future})
コード例 #11
0
def search_pk(uuid):
    """uuid can be pk."""
    IterHarmonicApprox = WorkflowFactory('phonopy.iter_ha')
    qb = QueryBuilder()
    qb.append(IterHarmonicApprox,
              tag='iter_ha',
              filters={'uuid': {
                  '==': uuid
              }})
    PhonopyWorkChain = WorkflowFactory('phonopy.phonopy')
    qb.append(PhonopyWorkChain, with_incoming='iter_ha')
    qb.order_by({PhonopyWorkChain: {'ctime': 'asc'}})
    pks = [n[0].pk for n in qb.all() if n[0].is_finished_ok]

    return pks
コード例 #12
0
    def get_sub_workchain_builder(self, distance, previous_workchain=None):
        """Return the builder for the relax workchain."""
        molecule = set_distance(self.inputs.molecule, distance)
        process_class = WorkflowFactory(self.inputs.sub_process_class)

        builder = process_class.get_inputs_generator().get_builder(
            molecule,
            previous_workchain=previous_workchain,
            **self.inputs.generator_inputs
        )
        builder._update(**self.inputs.get('sub_process', {}))  # pylint: disable=protected-access

        distance_node = molecule.creator.inputs.distance

        return builder, distance_node
コード例 #13
0
def validate_inputs(value, _):
    """Validate the entire input namespace."""
    if 'scale_factors' not in value:
        if 'scale_count' not in value or 'scale_increment' not in value:
            return 'neither `scale_factors` nor the pair of `scale_count` and `scale_increment` were defined.'

    # Validate that the provided ``generator_inputs`` are valid for the associated input generator.
    process_class = WorkflowFactory(value['sub_process_class'])
    generator = process_class.get_input_generator()

    try:
        generator.get_builder(structure=value['structure'],
                              **value['generator_inputs'])
    except Exception as exc:  # pylint: disable=broad-except
        return f'`{generator.__class__.__name__}.get_builder()` fails for the provided `generator_inputs`: {exc}'
コード例 #14
0
def primitive_structure_from_cif(cif, parse_engine, symprec, site_tolerance):
    """Attempt to parse the given `CifData` and create a `StructureData` from it.

    First the raw CIF file is parsed with the given `parse_engine`. The resulting `StructureData` is then passed through
    SeeKpath to try and get the primitive cell. If that is successful, important structural parameters as determined by
    SeeKpath will be set as extras on the structure node which is then returned as output.

    :param cif: the `CifData` node
    :param parse_engine: the parsing engine, supported libraries 'ase' and 'pymatgen'
    :param symprec: a `Float` node with symmetry precision for determining primitive cell in SeeKpath
    :param site_tolerance: a `Float` node with the fractional coordinate distance tolerance for finding overlapping
        sites. This will only be used if the parse_engine is pymatgen
    :return: the primitive `StructureData` as determined by SeeKpath
    """
    CifCleanWorkChain = WorkflowFactory('codtools.cif_clean')  # pylint: disable=invalid-name

    try:
        structure = cif.get_structure(converter=parse_engine.value,
                                      site_tolerance=site_tolerance.value,
                                      store=False)
    except exceptions.UnsupportedSpeciesError:
        return CifCleanWorkChain.exit_codes.ERROR_CIF_HAS_UNKNOWN_SPECIES
    except InvalidOccupationsError:
        return CifCleanWorkChain.exit_codes.ERROR_CIF_HAS_INVALID_OCCUPANCIES
    except Exception:  # pylint: disable=broad-except
        return CifCleanWorkChain.exit_codes.ERROR_CIF_STRUCTURE_PARSING_FAILED

    try:
        seekpath_results = get_kpoints_path(structure, symprec=symprec)
    except ValueError:
        return CifCleanWorkChain.exit_codes.ERROR_SEEKPATH_INCONSISTENT_SYMMETRY
    except SymmetryDetectionError:
        return CifCleanWorkChain.exit_codes.ERROR_SEEKPATH_SYMMETRY_DETECTION_FAILED

    # Store important information that should be easily queryable as attributes in the StructureData
    parameters = seekpath_results['parameters'].get_dict()
    structure = seekpath_results['primitive_structure']

    # Store the formula as a string, in both hill as well as hill-compact notation, so it can be easily queried for
    extras = {
        'formula_hill':
        structure.get_formula(mode='hill'),
        'formula_hill_compact':
        structure.get_formula(mode='hill_compact'),
        'chemical_system':
        '-{}-'.format('-'.join(sorted(structure.get_symbols_set()))),
    }

    for key in [
            'spacegroup_international', 'spacegroup_number', 'bravais_lattice',
            'bravais_lattice_extended'
    ]:
        try:
            extras[key] = parameters[key]
        except KeyError:
            pass

    structure.set_extra_many(extras)

    return structure
コード例 #15
0
def main(code_string, datafiles, parameters):
    """Main method to setup the calculation."""

    # First, we need to fetch the AiiDA datatypes which will
    # house the inputs to our calculation
    dict_data = DataFactory('dict')

    # Then, we set the workchain we would like to call
    workchain = WorkflowFactory('logger.gc_example')

    # Set inputs for the following WorkChain execution
    inputs = AttributeDict()
    # inputs.metadata = {'options': {'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1},
    #                                'parser_name': 'logger',
    #                                'withmpi': False,
    #                                'output_filename': 'logger.out'}}
    # Set code
    inputs.code = Code.get_from_string(code_string)
    # Set datafiles
    inputs.datafiles = datafiles
    # Set parameters
    inputs.parameters = dict_data(dict=parameters)
    # Set workchain related inputs, in this case, give more explicit output to report
    inputs.verbose = Bool(True)
    # Submit the requested workchain with the supplied inputs
    run(workchain, **inputs)
コード例 #16
0
def test_eosworkchain_inpgen(aiida_profile, fixture_code, generate_structure):
    """Test the validation of subclasses of `InputsGenerator`."""

    from aiida_siesta.utils.protocols_system.input_generators import EosWorkChainInputGenerator

    inp_gen = EosWorkChainInputGenerator(WorkflowFactory("siesta.eos"))
    structure = generate_structure()
    protocol = inp_gen.get_default_protocol_name()
    code = fixture_code("siesta.siesta")
    code.store()
    calc_engines = {
        "siesta": {
            'code': code.uuid,
            'options': {
                "resources": {
                    "num_mpiprocs_per_machine": 1
                },
                "max_wallclock_seconds": 360
            }
        }
    }

    build = inp_gen.get_filled_builder(structure,
                                       calc_engines,
                                       protocol,
                                       relaxation_type="atoms_only")

    assert "parameters" in build
コード例 #17
0
def launch_workflow(code, calculation, clean_workdir, max_num_machines,
                    max_wallclock_seconds, with_mpi, daemon):
    """Run the `Q2rBaseWorkChain` for a previously completed `PhCalculation`."""
    from aiida.orm import Bool
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    expected_process_type = 'aiida.calculations:quantumespresso.ph'
    if calculation.process_type != expected_process_type:
        raise click.BadParameter(
            'The input calculation node has a process_type: {}; should be {}'.
            format(calculation.process_type, expected_process_type))

    inputs = {
        'q2r': {
            'code': code,
            'parent_folder': calculation.outputs.remote_folder,
            'metadata': {
                'options':
                get_default_options(max_num_machines, max_wallclock_seconds,
                                    with_mpi),
            }
        }
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    launch.launch_process(WorkflowFactory('quantumespresso.q2r.base'), daemon,
                          **inputs)
コード例 #18
0
class FleurCommonRelaxWorkChain(CommonRelaxWorkChain):
    """Implementation of `aiida_common_workflows.common.relax.workchain.CommonRelaxWorkChain` for FLEUR."""

    _process_class = WorkflowFactory('fleur.base_relax')
    _generator_class = FleurCommonRelaxInputGenerator

    def convert_outputs(self):
        """Convert the outputs of the sub workchain to the common output specification."""

        outputs = self.ctx.workchain.outputs

        if 'optimized_structure' in outputs:
            self.out('relaxed_structure', outputs.optimized_structure)

        output_parameters = outputs.output_relax_wc_para
        out_para_dict = output_parameters.get_dict()
        if 'total_magnetic_moment_cell' in out_para_dict:
            if out_para_dict.get('total_magnetic_moment_cell',
                                 None) is not None:
                self.out('total_magnetization',
                         get_total_magnetization(output_parameters))
        self.out('total_energy',
                 get_total_energy(outputs.output_relax_wc_para))
        self.out('forces',
                 get_forces_from_trajectory(outputs.output_relax_wc_para))
コード例 #19
0
def launch_workflow(code, datum, kpoints_mesh, clean_workdir, max_num_machines,
                    max_wallclock_seconds, with_mpi, daemon):
    """Run the `MatdynBaseWorkChain` for a previously completed `Q2rCalculation`."""
    from aiida.orm import Bool
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    inputs = {
        'matdyn': {
            'code': code,
            'kpoints': kpoints_mesh,
            'force_constants': datum,
            'metadata': {
                'options':
                get_default_options(max_num_machines, max_wallclock_seconds,
                                    with_mpi),
            }
        }
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    launch.launch_process(WorkflowFactory('quantumespresso.matdyn.base'),
                          daemon, **inputs)
コード例 #20
0
def get_parameters(previous_workchain):
    """
    Extracts the FLAPW parameter for inpgen from a given previous workchain
    It finds the last Fleur Calcjob or Inpgen calc and extracts the
    parameters from its fleurinpdata node
    :param previous_workchain: Some workchain which contains at least one
                               Fleur CalcJob or Inpgen CalcJob.
    :return: Dict node of parameters ready to use, or None
    """
    from aiida.plugins import WorkflowFactory
    from aiida.common.exceptions import NotExistent
    from aiida_fleur.tools.common_fleur_wf import find_last_submitted_workchain

    fleur_scf_wc = WorkflowFactory('fleur.scf')
    # Find Fleurinp
    try:
        last_base_relax = find_last_submitted_workchain(previous_workchain)
        last_relax = find_last_submitted_workchain(
            orm.load_node(last_base_relax))
        last_scf = find_last_submitted_workchain(orm.load_node(last_relax))
        last_scf = orm.load_node(last_scf)
    except NotExistent:
        # something went wrong in the previous workchain run
        #.. we just continue without previous parameters but defaults.
        return None
    if last_scf.process_class is fleur_scf_wc:
        fleurinp = last_scf.outputs.fleurinp
    else:
        return None
    # Be aware that this parameter node is incomplete. LOs and econfig is
    # currently missing for example.
    parameters = fleurinp.get_parameterdata_ncf(
    )  # This is not a calcfunction!
    return parameters
コード例 #21
0
def launch_workflow(code, calculation, kpoints_mesh, clean_workdir,
                    max_num_machines, max_wallclock_seconds, with_mpi, daemon):
    """Run the `PhBaseWorkChain` for a previously completed `PwCalculation`."""
    from aiida.orm import Bool, Dict
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    inputs = {
        'ph': {
            'code': code,
            'qpoints': kpoints_mesh,
            'parent_folder': calculation.outputs.remote_folder,
            'parameters': Dict(dict={'INPUTPH': {}}),
            'metadata': {
                'options':
                get_default_options(max_num_machines, max_wallclock_seconds,
                                    with_mpi),
            }
        }
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    launch.launch_process(WorkflowFactory('quantumespresso.ph.base'), daemon,
                          **inputs)
コード例 #22
0
class QuantumEspressoCommonRelaxWorkChain(CommonRelaxWorkChain):
    """Implementation of `aiida_common_workflows.common.relax.workchain.CommonRelaxWorkChain` for Quantum ESPRESSO."""

    _process_class = WorkflowFactory('quantumespresso.pw.relax')
    _generator_class = QuantumEspressoCommonRelaxInputGenerator

    def convert_outputs(self):
        """Convert the outputs of the sub workchain to the common output specification."""
        outputs = self.ctx.workchain.outputs

        result = extract_from_parameters(outputs.output_parameters).values()
        forces, stress = extract_from_trajectory(
            outputs.output_trajectory).values()

        try:
            total_energy, total_magnetization = result
        except ValueError:
            total_energy, total_magnetization = list(result)[0], None

        if 'output_structure' in outputs:
            self.out('relaxed_structure', outputs.output_structure)

        if total_magnetization is not None:
            self.out('total_magnetization', total_magnetization)

        self.out('total_energy', total_energy)
        self.out('forces', forces)
        self.out('stress', stress)
コード例 #23
0
def test_relax_generator(castep_code, nacl, with_otfg):
    """Test for generating the relax namespace"""
    CastepCommonRelaxWorkChain = WorkflowFactory('castep.relax')  # pylint: disable=invalid-name
    protocol = CastepCommonRelaxInputGenerator(
        process_class=CastepCommonRelaxWorkChain).get_protocol(
            'moderate')['relax']
    override = {
        'base': {
            'metadata': {
                'label': 'test'
            },
            'calc': {
                'parameters': {
                    'cut_off_energy': 220
                },
                'metadata': {
                    'label': 'test'
                }
            }
        }
    }
    otfg = OTFGGroup.objects.get(label='C19')
    generated = generate_inputs_relax(protocol, castep_code, nacl, otfg,
                                      override)

    assert 'structure' in generated
    paramd = generated['calc']['parameters']
    assert 'basis_precision' not in paramd
    assert 'kpoints_spacing' in generated['base']
    assert 'kpoints' not in generated['calc']

    assert generated['calc']['metadata']['label'] == 'test'
    assert generated['base']['metadata']['label'] == 'test'
コード例 #24
0
ファイル: bands.py プロジェクト: zhubonan/aiida-castep
    def run_scf(self):
        """
        Run the SCF calculation
        """

        base_work = WorkflowFactory(self._base_wk_string)
        inputs = AttributeDict(self.exposed_inputs(base_work, namespace='scf'))
        inputs.metadata.call_link_label = 'scf'
        inputs.calc.structure = self.ctx.current_structure

        # Ensure that writing the check/castep_bin
        param_dict = inputs.calc.parameters.get_dict()
        if 'PARAM' in param_dict:
            ensure_checkpoint(param_dict['PARAM'])
        else:
            ensure_checkpoint(param_dict)

        # Update if changes are made
        if param_dict != inputs.calc.parameters.get_dict():
            self.report(
                "Updated the PARAM to make sure castep_bin file will be written"
            )
            inputs.calc.parameters = orm.Dict(dict=param_dict)

        running = self.submit(base_work, **inputs)
        self.report('Running SCF calculation {}'.format(running))
        self.to_context(workchain_scf=running)
コード例 #25
0
def find_latest_uuid():
    IterHarmonicApprox = WorkflowFactory('phonopy.iter_ha')
    qb = QueryBuilder()
    qb.append(IterHarmonicApprox)
    qb.order_by({IterHarmonicApprox: {'ctime': 'desc'}})
    qb.first()
    return qb.first()[0].uuid
コード例 #26
0
ファイル: app.py プロジェクト: d-tomerini/aiida-post
    def post(self, prop):
        """
        Route to manage the requests from ext
        Access is through a JSON file passed to the serveri
        containing the input required for calculation
        Data is handled and responded accordingly

        :input prop is the quantity we required for calculation
        """
        # workfunction to process the incoming json dictionary
        # here it needs a validation by
        cao = 'cao\n'
        print((
            cao,
            cao,
            'data',
            request.data,
            cao,
            'args',
            request.args.getall(),
            cao,
            'form',
            request.form,
            cao,
            'files',
            request.files,
            cao,
            request.values,
            cao,
            request.base_url,
            cao,
            request.date,
            cao,
        ))
        xx = WorkflowFactory('ext_aiida.ProcessInputs')
        wf = submit(
            xx,
            request=Dict(dict=request.get_json()),
            predefined=Dict(dict=CALCULATION_OPTIONS),
            property=Str(prop),
        )
        sleep(2)
        if not wf.is_finished_ok:
            msg = 'Structure retrieval error. See node uuid={} for more specific report'.format(
                wf.uuid)
            return {
                'error': wf.exit_message,
                'message': msg,
                'stored_request': wf.inputs.request.get_dict(),
            }
        else:
            exwf = Distribute(wf, prop)
            msg = ' Successful retrieval of structure, {}, workflow at uuid {}'.format(
                exwf.inputs.structure.pk, exwf.pk)
            return {
                'error': wf.exit_message,
                'message': msg,
                'stored_request': wf.inputs.request.get_dict(),
            }
コード例 #27
0
def launch_banddos(fleurinp, fleur, wf_parameters, parent_folder, daemon,
                   settings, option_node):
    """
    Launch a banddos workchain
    """
    workchain_class = WorkflowFactory('fleur.banddos')
    inputs = {
        'wf_parameters': wf_parameters,
        'fleur': fleur,
        'remote': parent_folder,
        'fleurinp': fleurinp,
        'options': option_node
    }
    inputs = clean_nones(inputs)
    builder = workchain_class.get_builder()
    builder.update(inputs)
    launch_process(builder, daemon)
コード例 #28
0
def export_isotherm(
    sample,
    node,
    file_name: str = None,
    aiidalab_instance: str = "unknown",
):
    """Export Isotherm object."""
    source_info = {
        "uuid": node.uuid,
        "url": aiidalab_instance,
        "name": "Isotherm simulated using the isotherm app on AiiDAlab",
    }

    # Workaround till the Isotherm object is not ready.
    isotherm_wf = WorkflowFactory("lsmo.isotherm")
    query = (orm.QueryBuilder().append(orm.Dict,
                                       filters={
                                           "uuid": node.uuid
                                       },
                                       tag="isotherm_data").append(
                                           isotherm_wf,
                                           with_outgoing="isotherm_data",
                                           tag="isotherm_wf").append(
                                               orm.Str,
                                               with_outgoing="isotherm_wf",
                                               project="attributes.value"))
    adsorptive = query.all(flat=True)
    adsorptive = adsorptive[0] if adsorptive else None

    meta = {
        "adsorptive": adsorptive,
        "temperature": node["temperature"],
        "method": "GCMC",
    }
    jcamp = from_dict(
        {
            "x": {
                "data": node["isotherm"]["pressure"],
                "unit": node["isotherm"]["pressure_unit"],
                "type": "INDEPENDENT",
            },
            "y": {
                "data": node["isotherm"]["loading_absolute_average"],
                "unit": node["isotherm"]["loading_absolute_unit"],
                "type": "DEPENDENT",
            },
        },
        data_type="Adsorption Isotherm",
        meta=meta,
    )
    sample.put_data(
        data_type="isotherm",
        file_name=f"{node.uuid}.jcamp"
        if file_name is None else f"{file_name}.jcamp",
        file_content=jcamp,
        metadata=meta,
        source_info=source_info,
    )
コード例 #29
0
def run_workchain(
    number=1,
    code="sleep@slurm",
    time=1,
    payload=100,
    output_dict=100,
    output_array=100,
    fail=False,
    submit=False,
):
    """Run the `SleepWorkChain`

    :param number: Number of children `SleepCalculation`
    :param code: code label
    :param time: seconds for which each `SleepCalculation` runs `sleep`
    :param payload: number of fields in `payload` input dictionary of CalcJob
    :param output: number of fields in output dictionary of CalcJob
    :param output_array: Size of output array
    :param fail: Intentionally  fail all `SleepCalculation`
    :param submit: whether to submit to daemon, otherwise run
    :return: workchain node
    """
    from aiida.engine import run_get_node
    from aiida.engine import submit as submit_func
    from aiida.orm import load_code
    from aiida.plugins import WorkflowFactory

    builder = WorkflowFactory("sleep").get_builder()
    builder.children = number
    builder.calcjob.code = load_code(code)
    builder.calcjob.time = time
    builder.calcjob.payload = {
        f"input_key_{i}": f"value_{i}"
        for i in range(payload)
    }
    builder.calcjob.metadata.options.fail_calcjob = fail
    builder.calcjob.metadata.options.output_dict_size = output_dict
    builder.calcjob.metadata.options.output_array_size = output_array

    if submit:
        node = submit_func(builder)
    else:
        node = run_get_node(builder).node

    return node
コード例 #30
0
def validate_sub_process_class(value, _):
    """Validate the sub process class."""
    try:
        process_class = WorkflowFactory(value)
    except exceptions.EntryPointError:
        return f'`{value}` is not a valid or registered workflow entry point.'

    if not inspect.isclass(process_class) or not issubclass(process_class, CommonRelaxWorkChain):
        return f'`{value}` is not a subclass of the `CommonRelaxWorkChain` common workflow.'