def test_submit(default_builder_inputs):
    """Test submitting the builder returned by ``get_builder`` called with default arguments.

    This will actually create the ``WorkChain`` instance, so if it doesn't raise, that means the input spec was valid.
    """
    builder = GENERATOR.get_builder(**default_builder_inputs)
    engine.submit(builder)
示例#2
0
def Distribute(req, prop):
    """
    After the retrieval of the structure,
    we proceed with the distribution of the task
    according to the requested data
    : input req workfunction node
    : prop property to calculate
    """

    if prop == 'band_gap':
        # submit a bandgap workchain
        xx = WorkflowFactory('ext_aiida.BandGap')
        calcspecs = req.inputs.predefined['aiida']
        structure = req.outputs.structure
        pwcode = calcspecs['qe']
        code = load_node(pwcode)
        upfamily = calcspecs['upf']
        wf = submit(xx, structure=structure, code=code)  # aiida pk  # code pk
    if prop == 'band_structure':
        xx = WorkflowFactory('quantumespresso.pw.band_structure')
        calcspecs = req.inputs.predefined['aiida']
        structure = req.outputs.structure
        pwcode = calcspecs['qe']
        code = load_node(pwcode)
        print(('picc {}'.format(code)))
        # upfamily = calcspecs['upf']
        wf = submit(xx, structure=structure, code=code)  # aiida pk  # code pk

    return wf
    def on_btn_submit_press(self, _=None):
        """When submit button is pressed."""

        if not self.append_output:
            self.submit_out.value = ''

        inputs = self.inputs_generator()
        if inputs is None:
            if self.append_output:
                self.submit_out.value += "SubmitButtonWidget: did not recieve the process inputs.<br>"
            else:
                self.submit_out.value = "SubmitButtonWidget: did not recieve the process inputs."
        else:
            if self.disable_after_submit:
                self.btn_submit.disabled = True
            if isinstance(inputs, ProcessBuilder):
                self.process = submit(inputs)
            else:
                self.process = submit(self._process_class, **inputs)

            if self.append_output:
                self.submit_out.value += f"""Submitted process {self.process}. Click
                <a href={self.path_to_root}aiidalab-widgets-base/process.ipynb?id={self.process.pk}
                target="_blank">here</a> to follow.<br>"""
            else:
                self.submit_out.value = f"""Submitted process {self.process}. Click
                <a href={self.path_to_root}aiidalab-widgets-base/process.ipynb?id={self.process.pk}
                target="_blank">here</a> to follow."""

            for func in self._run_after_submitted:
                func(self.process)
示例#4
0
def submit_mixlmp(structure, dummy_index, steps, temp, lambda_f, dft_graphs,
                  dum_graphs):
    builder = TemplateCalculation.get_builder()
    # builder.metadata.dry_run = True
    builder.code = Code.get_from_string('lammps@metal')
    builder.settings = {'additional_retrieve_list': ['*.xyz', '*.out']}
    builder.metadata.options.queue_name = 'large'
    builder.metadata.options.resources = {'tot_num_mpiprocs': 4}
    builder.metadata.options.custom_scheduler_commands = '#BSUB -R "span[ptile=4]"'
    builder.template = os.path.abspath('template_lmp.in')
    files = {}
    for i, graph in enumerate(dft_graphs):
        files.update({
            f'dft_graph_{i}':
            SinglefileData(file=graph, filename=f'dft_graph_{i}.pb')
        })
    for i, graph in enumerate(dum_graphs):
        files.update({
            f'dum_graph_{i}':
            SinglefileData(file=graph, filename=f'dum_graph_{i}.pb')
        })
    builder.file = files
    builder.variables = {
        'TBD_STEPS':
        steps,
        'TBD_TEMP':
        temp,
        'TBD_LAMBDA_f':
        lambda_f,
        'TBD_DFT':
        ' '.join([
            f + '.pb'
            for f in filter(lambda x: x.startswith('dft'), files.keys())
        ]),
        'TBD_DUM':
        ' '.join([
            f + '.pb'
            for f in filter(lambda x: x.startswith('dum'), files.keys())
        ]),
        'TBD_vel':
        np.random.randint(10000000),
        'TBD_INPUT':
        TemplateCalculation._INPUT_STRUCTURE
    }
    builder.kinds = ['O', 'H', 'Na', 'Cl', 'X']
    if isinstance(structure, Atoms):
        atoms = structure
    elif isinstance(structure, StructureData):
        atoms = structure.get_ase()
    else:
        raise TypeError(
            "Unknown structure format, please use ase.Atoms or aiida.orm.StructureData"
        )
    builder.structure = StructureData(
        ase=generate_atoms_dummy(atoms, dummy_index))
    submit(builder)
示例#5
0
def main(code_string, incar, kmesh, structure, potential_family,
         potential_mapping, options):
    """Main method to setup the calculation."""

    # We set the workchain you would like to call
    workchain = WorkflowFactory('vasp.relax')

    # And finally, we declare the options, settings and input containers
    settings = AttributeDict()
    inputs = AttributeDict()

    # Organize settings
    settings.parser_settings = {}

    # Set inputs for the following WorkChain execution
    # Set code
    inputs.code = Code.get_from_string(code_string)
    # Set structure
    inputs.structure = structure
    # Set k-points grid density
    kpoints = DataFactory('array.kpoints')()
    kpoints.set_kpoints_mesh(kmesh)
    inputs.kpoints = kpoints
    # Set parameters
    inputs.parameters = DataFactory('dict')(dict=incar)
    # Set potentials and their mapping
    inputs.potential_family = DataFactory('str')(potential_family)
    inputs.potential_mapping = DataFactory('dict')(dict=potential_mapping)
    # Set options
    inputs.options = DataFactory('dict')(dict=options)
    # Set settings
    inputs.settings = DataFactory('dict')(dict=settings)
    # Set workchain related inputs, in this case, give more explicit output to report
    inputs.verbose = DataFactory('bool')(True)

    # Relaxation related parameters that is passed to the relax workchain
    relax = AttributeDict()
    # Turn on relaxation
    relax.perform = DataFactory('bool')(True)
    # Select relaxation algorithm
    relax.algo = DataFactory('str')('cg')
    # Set force cutoff limit (EDIFFG, but no sign needed)
    relax.force_cutoff = DataFactory('float')(0.01)
    # Turn on relaxation of positions (strictly not needed as the default is on)
    # The three next parameters correspond to the well known ISIF=3 setting
    relax.positions = DataFactory('bool')(True)
    # Turn on relaxation of the cell shape (defaults to False)
    relax.shape = DataFactory('bool')(True)
    # Turn on relaxation of the volume (defaults to False)
    relax.volume = DataFactory('bool')(True)
    # Set maximum number of ionic steps
    relax.steps = DataFactory('int')(100)
    # Set the relaxation parameters on the inputs
    inputs.relax = relax
    # Submit the requested workchain with the supplied inputs
    submit(workchain, **inputs)
示例#6
0
    def test_launchers(self):
        """Verify that the various launchers are working."""
        result = run(self.function_return_true)
        self.assertTrue(result)

        result, node = run_get_node(self.function_return_true)
        self.assertTrue(result)
        self.assertEqual(result, get_true_node())
        self.assertTrue(isinstance(node, orm.CalcFunctionNode))

        with self.assertRaises(AssertionError):
            submit(self.function_return_true)
示例#7
0
def main(code_string, incar, kmesh, structure, potential_family,
         potential_mapping, options):
    """Main method to setup the calculation."""

    # First, we need to fetch the AiiDA datatypes which will
    # house the inputs to our calculation
    dict_data = DataFactory('dict')
    kpoints_data = DataFactory('array.kpoints')

    # Then, we set the workchain you would like to call
    workchain = WorkflowFactory('vasp.master')

    # And finally, we declare the options, settings and input containers
    settings = AttributeDict()
    inputs = AttributeDict()

    # Organize settings
    settings.parser_settings = {
        'output_params': ['total_energies', 'maximum_force']
    }

    # Set inputs for the following WorkChain execution
    # Set code
    inputs.code = Code.get_from_string(code_string)
    # Set structure
    inputs.structure = structure
    # Set k-points grid density
    kpoints = kpoints_data()
    kpoints.set_kpoints_mesh(kmesh)
    inputs.kpoints = kpoints
    # Set parameters
    inputs.parameters = dict_data(dict=incar)
    # Set potentials and their mapping
    inputs.potential_family = Str(potential_family)
    inputs.potential_mapping = dict_data(dict=potential_mapping)
    # Set options
    inputs.options = dict_data(dict=options)
    # Set settings
    inputs.settings = dict_data(dict=settings)
    # Set workchain related inputs, in this case, give more explicit output to repor
    inputs.verbose = Bool(True)

    # Master, convergence and relaxation related parameters that is passed to the master,
    # convergence and relaxation workchain, respectively
    # Turn of relaxation
    relax = AttributeDict()
    relax.perform = Bool(False)
    inputs.relax = relax
    # Extract electronic band structure
    inputs.extract_bands = Bool(True)
    # Submit the requested workchain with the supplied inputs
    submit(workchain, **inputs)
示例#8
0
def launch_aiida_bulk_modulus(structure,
                              code_string,
                              resources,
                              label="AlN VASP relax calculation"):
    incar_dict = {
        'PREC': 'Accurate',
        'EDIFF': 1e-8,
        'NELMIN': 5,
        'NELM': 100,
        'ENCUT': 500,
        'IALGO': 38,
        'ISMEAR': 0,
        'SIGMA': 0.01,
        'GGA': 'PS',
        'LREAL': False,
        'LCHARG': False,
        'LWAVE': False,
    }

    kpoints = KpointsData()
    kpoints.set_kpoints_mesh([6, 6, 4], offset=[0, 0, 0.5])

    options = {'resources': resources, 'max_wallclock_seconds': 3600 * 10}

    potential_family = 'PBE.54'
    potential_mapping = {'Al': 'Al', 'N': 'N'}

    parser_settings = {
        'add_energies': True,
        'add_forces': True,
        'add_stress': True
    }

    code = Code.get_from_string(code_string)
    Workflow = WorkflowFactory('vasp_bm.bulkmodulus')
    builder = Workflow.get_builder()
    builder.code = code
    builder.parameters = Dict(dict=incar_dict)
    builder.structure = structure
    builder.settings = Dict(dict={'parser_settings': parser_settings})
    builder.potential_family = Str(potential_family)
    builder.potential_mapping = Dict(dict=potential_mapping)
    builder.kpoints = kpoints
    builder.options = Dict(dict=options)
    builder.metadata.label = label
    builder.metadata.description = label
    builder.clean_workdir = Bool(False)
    builder.relax = Bool(True)
    builder.force_cutoff = Float(1e-8)
    builder.steps = Int(10)
    builder.positions = Bool(True)
    builder.shape = Bool(True)
    builder.volume = Bool(True)
    builder.convergence_on = Bool(True)
    builder.convergence_volume = Float(1e-8)
    builder.convergence_max_iterations = Int(2)
    builder.verbose = Bool(True)

    node = submit(builder)
    return node
示例#9
0
def run_via_daemon(workchains, inputs, sleep, timeout):
    """Run via the daemon, polling until it is terminated or timeout."""
    from aiida.engine import submit

    workchain = submit(workchains.Polish00WorkChain, **inputs)
    start_time = time.time()
    timed_out = True

    while time.time() - start_time < timeout:
        time.sleep(sleep)

        if workchain.is_terminated:
            timed_out = False
            total_time = time.time() - start_time
            break

    if timed_out:
        click.secho('Failed: ', fg='red', bold=True, nl=False)
        click.secho(
            f'the workchain<{workchain.pk}> did not finish in time and the operation timed out',
            bold=True)
        return None

    try:
        result = workchain.outputs.result
    except AttributeError:
        click.secho('Failed: ', fg='red', bold=True, nl=False)
        click.secho(
            f'the workchain<{workchain.pk}> did not return a result output node',
            bold=True)
        click.echo(str(workchain.attributes))
        return None

    return result, workchain, total_time
示例#10
0
    def on_btn_submit_press(self, _=None):
        """When submit button is pressed."""

        if not self.append_output:
            self.submit_out.value = ''

        input_dict = self.input_dictionary_function()
        if input_dict is None:
            if self.append_output:
                self.submit_out.value += "SubmitButtonWidget: did not recieve input dictionary.<br>"
            else:
                self.submit_out.value = "SubmitButtonWidget: did not recieve input dictionary."
        else:
            self.btn_submit.disabled = self.disable_after_submit
            self.process = submit(self._process_class, **input_dict)

            if self.append_output:
                self.submit_out.value += """Submitted process {0}. Click
                <a href={1}aiidalab-widgets-base/process.ipynb?id={2} target="_blank">here</a>
                to follow.<br>""".format(self.process, self.path_to_root, self.process.pk)
            else:
                self.submit_out.value = """Submitted process {0}. Click
                <a href={1}aiidalab-widgets-base/process.ipynb?id={2} target="_blank">here</a>
                to follow.""".format(self.process, self.path_to_root, self.process.pk)

            for func in self._run_after_submitted:
                func(self.process)
def test_vasp_hybrid_bands(
        configure_with_daemon,  # pylint: disable=unused-argument
        assert_finished,
        wait_for,
        get_insb_input  # pylint: disable=redefined-outer-name
):
    """
    Runs the VASP + hybrids reference bands workflow with InSb, on a very coarse grid.
    """
    from aiida.orm import Bool
    from aiida.plugins import DataFactory, load_node
    from aiida.engine import submit
    from aiida_tbextraction.fp_run.reference_bands import VaspReferenceBands

    KpointsData = orm.KpointsData
    kpoints_mesh = KpointsData()
    kpoints_mesh.set_kpoints_mesh([2, 2, 2])

    kpoints = KpointsData()
    kpoints.set_kpoints_path([('G', (0, 0, 0), 'M', (0.5, 0.5, 0.5))])

    pk = submit(VaspReferenceBands,
                merge_kpoints=Bool(True),
                kpoints=kpoints,
                kpoints_mesh=kpoints_mesh,
                **get_insb_input).pk
    wait_for(pk)
    assert_finished(pk)
    result = load_node(pk).get_outputs_dict()
    assert 'bands' in result
    assert (result['bands'].get_bands().shape == (len(kpoints.get_kpoints()),
                                                  36))
示例#12
0
    def calculate_thermal_conductivity(self):

        data_gp = ArrayData()
        for i in self.ctx.labels:
            calc = self.ctx.get('gp_{}'.format(i))
            print('collect gp_{}'.format(i))
            outputs_dict = calc.get_outputs_dict()

            for key in outputs_dict:
                if key.startswith('kappa') and len(key.split('_')) == 2:
                    num = key.split('_')[1]
                    # print 'num', num
                    for array_name in outputs_dict[key].get_arraynames():
                        array = outputs_dict[key].get_array(array_name)
                        data_gp.set_array(array_name + '_' + num, array)

        if 'nac' in self.inputs:
            nac_data = self.ctx.inputs.nac
        else:
            nac_data = None

        JobCalculation, calculation_input = generate_phono3py_params(
            structure=self.inputs.structure,
            parameters=self.inputs.parameters,
            force_sets=self.inputs.force_sets,
            nac_data=nac_data,
            grid_data=data_gp)

        future = submit(JobCalculation, **calculation_input)
        return ToContext(thermal_conductivity=future)
示例#13
0
def launch_phono3py(cutoff_energy=350, is_nac=False):
    """Launch calculation."""
    structure, forces_config, nac_config, phonon_settings = get_settings(
        cutoff_energy, is_nac)
    Phono3pyWorkChain = WorkflowFactory("phonopy.phono3py")
    builder = Phono3pyWorkChain.get_builder()
    builder.structure = structure
    builder.calculator_settings = Dict(dict={
        "forces": forces_config,
        "nac": nac_config
    })
    builder.run_phono3py = Bool(False)
    builder.remote_phono3py = Bool(False)
    builder.code_string = Str("phonopy@nancy")
    builder.phonon_settings = Dict(dict=phonon_settings)
    builder.symmetry_tolerance = Float(1e-5)
    builder.options = Dict(dict=forces_config["options"])
    dim = phonon_settings["supercell_matrix"]
    kpoints_mesh = forces_config["kpoints_mesh"]
    label = "ZnTe phono3py %dx%dx%d kpt %dx%dx%d PBEsol %d eV" % (
        tuple(dim) + tuple(kpoints_mesh) + (cutoff_energy, ))
    builder.metadata.label = label
    builder.metadata.description = label

    future = submit(builder)
    print(label)
    print(future)
    print("Running workchain with pk={}".format(future.pk))
示例#14
0
        def do_submit():
            calc_node = submit(test_processes.DummyProcess)
            yield self.wait_for_calc(calc_node)

            self.assertTrue(calc_node.is_finished_ok)
            self.assertEqual(calc_node.process_state.value,
                             plumpy.ProcessState.FINISHED.value)
示例#15
0
        def do_exception():
            calc_node = submit(test_processes.ExceptionProcess)
            yield self.wait_for_calc(calc_node)

            self.assertFalse(calc_node.is_finished_ok)
            self.assertEqual(calc_node.process_state.value,
                             plumpy.ProcessState.EXCEPTED.value)
def main(codelabel, run_test):
    try:
        code = Code.get_from_string(codelabel)
    except NotExistent:
        print("The code '{}' does not exist".format(codelabel))
        sys.exit(1)

    allstructures = [
        "/home/kevin/Dropbox (LSMO)/proj61_metal_channels_shared/8_benchmark_daint/structures/dft_opt/NAVJAW.cif"
    ]

    for num_nodes in [1, 2, 4, 8, 12, 16, 32]:
        for s in allstructures:
            cif = CifData(file=s)
            name = Path(s).stem
            structure = cif.get_structure()
            structure.label = name

            structure.store()

            parameters = Dict(dict={})
            options = {
                "resources": {"num_machines": num_nodes, "num_cores_per_mpiproc": 1},
                "max_wallclock_seconds": 1 * 60 * 60,
            }
            inputs = {
                "protocol_tag": Str("sp"),
                "cp2k_base": {
                    "cp2k": {
                        "structure": structure,
                        "parameters": parameters,
                        "code": code,
                        "metadata": {"options": options},
                    }
                },
                "metadata": {"label": "scaling_test_" + str(num_nodes)},
            }

            if run_test:
                submit(Cp2kMultistageWorkChain, **inputs)
            else:
                print("Generating test input ...")
                inputs["base"]["cp2k"]["metadata"]["dry_run"] = True
                inputs["base"]["cp2k"]["metadata"]["store_provenance"] = False
                run(Cp2kMultistageWorkChain, **inputs)
                print("Submission test successful")
                print("In order to actually submit, add '--run'")
示例#17
0
    def post(self, prop):
        """
        Route to manage the requests from ext
        Access is through a JSON file passed to the serveri
        containing the input required for calculation
        Data is handled and responded accordingly

        :input prop is the quantity we required for calculation
        """
        # workfunction to process the incoming json dictionary
        # here it needs a validation by
        cao = 'cao\n'
        print((
            cao,
            cao,
            'data',
            request.data,
            cao,
            'args',
            request.args.getall(),
            cao,
            'form',
            request.form,
            cao,
            'files',
            request.files,
            cao,
            request.values,
            cao,
            request.base_url,
            cao,
            request.date,
            cao,
        ))
        xx = WorkflowFactory('ext_aiida.ProcessInputs')
        wf = submit(
            xx,
            request=Dict(dict=request.get_json()),
            predefined=Dict(dict=CALCULATION_OPTIONS),
            property=Str(prop),
        )
        sleep(2)
        if not wf.is_finished_ok:
            msg = 'Structure retrieval error. See node uuid={} for more specific report'.format(
                wf.uuid)
            return {
                'error': wf.exit_message,
                'message': msg,
                'stored_request': wf.inputs.request.get_dict(),
            }
        else:
            exwf = Distribute(wf, prop)
            msg = ' Successful retrieval of structure, {}, workflow at uuid {}'.format(
                exwf.inputs.structure.pk, exwf.pk)
            return {
                'error': wf.exit_message,
                'message': msg,
                'stored_request': wf.inputs.request.get_dict(),
            }
示例#18
0
        def do_launch():
            term_a = Int(5)
            term_b = Int(10)

            calc_node = submit(test_processes.AddProcess, a=term_a, b=term_b)
            yield self.wait_for_process(calc_node)
            self.assertTrue(calc_node.is_finished_ok)
            self.assertEqual(calc_node.process_state.value, plumpy.ProcessState.FINISHED.value)
def main(codelabel, run):
    try:
        code = Code.get_from_string(codelabel)
    except NotExistent:
        print("The code '{}' does not exist".format(codelabel))
        sys.exit(1)

    allstructures = [
        "/home/kevin/Dropbox (LSMO)/proj61_metal_channels_shared/8_benchmark_daint/structures/from_curated_mofs/UTEWOG.cif"
    ]

    for s in allstructures:
        print("submitting mulitstage cellopt on {}".format(s))
        cif = CifData(file=s)
        name = Path(s).stem
        structure = cif.get_structure()
        structure.label = name

        structure.store()
        parameters = Dict(dict={})
        options = {
            "resources": {"num_machines": 1},
            "max_wallclock_seconds": 15 * 60 * 60,
        }
        inputs = {
            "protocol_tag": Str("standard"),
            "cp2k_base": {
                "cp2k": {
                    "structure": structure,
                    "parameters": parameters,
                    "code": code,
                    "metadata": {"options": options},
                }
            },
        }

        if run:
            submit(Cp2kMultistageWorkChain, **inputs)
        else:
            print("Generating test input ...")
            inputs["base"]["cp2k"]["metadata"]["dry_run"] = True
            inputs["base"]["cp2k"]["metadata"]["store_provenance"] = False
            run(Cp2kMultistageWorkChain, **inputs)
            print("Submission test successful")
            print("In order to actually submit, add '--run'")
示例#20
0
def main(code_string, incar, kmesh, structure, potential_family,
         potential_mapping, options):
    """Main method to setup the calculation."""

    # First, we need to fetch the AiiDA datatypes which will
    # house the inputs to our calculation
    dict_data = DataFactory('dict')
    kpoints_data = DataFactory('array.kpoints')

    # Then, we set the workchain you would like to call
    calculation = CalculationFactory('vasp.vasp2w90')

    # And finally, we declare the options, settings and input containers
    settings = AttributeDict()
    inputs = AttributeDict()

    # Organize settings
    settings.parser_settings = {
        'output_params': ['total_energies', 'maximum_force']
    }

    # Set inputs
    # Set code
    inputs.code = Code.get_from_string(code_string)
    # Set structure
    inputs.structure = structure
    # Set k-points grid density
    kpoints = kpoints_data()
    kpoints.set_kpoints_mesh(kmesh)
    inputs.kpoints = kpoints
    # Set parameters
    inputs.parameters = dict_data(dict=incar)
    # Set potentials and their mapping
    inputs.potential = DataFactory('vasp.potcar').get_potcars_from_structure(
        structure=inputs.structure,
        family_name=potential_family,
        mapping=potential_mapping)
    # Set options
    inputs.metadata = AttributeDict({'options': options})
    # Set settings
    inputs.settings = dict_data(dict=settings)
    # Set Wannier90 projectors
    inputs.wannier_projections = DataFactory('list')(list=['Si: sp3'])
    # Submit the requested calculation with the supplied inputs
    submit(calculation, **inputs)
示例#21
0
def launch_calculation(code, counter, inputval):
    """
    Launch calculations to the daemon through the Process layer
    """
    process, inputs, expected_result = create_calculation_process(
        code=code, inputval=inputval)
    calc = submit(process, **inputs)
    print(f'[{counter}] launched calculation {calc.uuid}, pk={calc.pk}')
    return calc, expected_result
def submit_workchain(structure,
                     daemon,
                     protocol,
                     parameters,
                     pseudo_family,
                     num_machines,
                     num_mpiprocs_per_machine=4,
                     set_2d_mesh=False):
    print("running dft band structure calculation for {}".format(
        structure.get_formula()))

    # Set custom pseudo
    modifiers = {'parameters': parameters}
    """ if pseudo_family is not None:
        from aiida_quantumespresso.utils.protocols.pw import _load_pseudo_metadata
        pseudo_data = _load_pseudo_metadata(pseudo_family)
        modifiers.update({'pseudo': 'custom', 'pseudo_data': pseudo_data}) """
    # if pseudo_family is not None:
    #     from aiida_quantumespresso.utils.pseudopotential import get_pseudos_from_structure
    #     pseudo_data = get_pseudos_from_structure(structure, pseudo_family)
    #     modifiers.update({'pseudo': 'custom', 'pseudo_data': pseudo_data})

    # Submit the DFT bands workchain
    pwbands_workchain_parameters = {
        'code':
        code,
        'structure':
        structure,
        'protocol':
        orm.Dict(dict={
            'name': protocol,
            'modifiers': modifiers
        }),
        'options':
        orm.Dict(
            dict={
                'resources': {
                    'num_machines': num_machines,
                    'num_mpiprocs_per_machine': num_mpiprocs_per_machine
                },
                'max_wallclock_seconds': 3600 * 5,
                'withmpi': True,
            }),
        'set_2d_mesh':
        orm.Bool(set_2d_mesh)
    }
    if pseudo_family is not None:
        pwbands_workchain_parameters['pseudo_family'] = orm.Str(pseudo_family)
    if daemon:
        dft_workchain = submit(PwBandStructureWorkChain,
                               **pwbands_workchain_parameters)
    else:
        from aiida.engine import run_get_pk
        dft_workchain = run_get_pk(PwBandStructureWorkChain,
                                   **pwbands_workchain_parameters)
    return dft_workchain
示例#23
0
def main(code_string, incar, kmesh, structures, potential_family,
         potential_mapping, options):
    """Main method to setup the calculation."""

    # First, we need to fetch the AiiDA datatypes which will
    # house the inputs to our calculation
    dict_data = DataFactory('dict')
    kpoints_data = DataFactory('array.kpoints')

    # Then, we set the workchain you would like to call
    workchain = EosWorkChain

    # And finally, we declare the options, settings and input containers
    settings = AttributeDict()
    inputs = AttributeDict()

    # organize settings
    settings.parser_settings = {
        'output_params': ['total_energies', 'maximum_force']
    }

    # set inputs for the following WorkChain execution
    # set code
    inputs.code = Code.get_from_string(code_string)
    # set structures
    inputs.structures = structures
    # set k-points grid density
    kpoints = kpoints_data()
    kpoints.set_kpoints_mesh(kmesh)
    inputs.kpoints = kpoints
    # set parameters
    inputs.parameters = dict_data(dict=incar)
    # set potentials and their mapping
    inputs.potential_family = Str(potential_family)
    inputs.potential_mapping = dict_data(dict=potential_mapping)
    # set options
    inputs.options = dict_data(dict=options)
    # set settings
    inputs.settings = dict_data(dict=settings)
    # set workchain related inputs, in this case, give more explicit output to report
    inputs.verbose = Bool(True)
    # submit the requested workchain with the supplied inputs
    submit(workchain, **inputs)
示例#24
0
def example_dft(gaussian_code):
    """Run simple DFT calculation"""

    print("Testing Gaussian Input Creation")

    # structure
    structure = StructureData(
        pymatgen_molecule=mg.Molecule.from_file('./ch4.xyz'))

    num_cores = 2
    memory_mb = 300

    # parameters
    parameters = Dict(
        dict={
            'link0_parameters': {
                '%chk': 'aiida.chk',
                '%mem': '%dMB' % memory_mb,
                '%nprocshared': num_cores,
            },
            'functional': 'PBE1PBE',
            'basis_set': '6-31g',
            'route_parameters': {
                'nosymm': None,
                'Output': 'WFX'
            },
            'input_parameters': {
                'output.wfx': None
            },
        })

    # Construct process builder

    builder = GaussianCalculation.get_builder()

    builder.structure = structure
    builder.parameters = parameters
    builder.code = gaussian_code

    builder.metadata.options.resources = {
        "num_machines": 1,
        "tot_num_mpiprocs": num_cores,
    }

    # Should ask for extra +25% extra memory
    builder.metadata.options.max_memory_kb = int(1.25 * memory_mb) * 1024
    builder.metadata.options.max_wallclock_seconds = 3 * 60

    builder.metadata.dry_run = True
    builder.metadata.store_provenance = False

    process_node = submit(builder)

    print("Submitted dry_run in" + str(process_node.dry_run_info))
示例#25
0
        def do_pause():
            calc_node = submit(test_processes.WaitProcess)
            while calc_node.process_state != ProcessState.WAITING:
                yield

            self.assertFalse(calc_node.paused)

            future = yield with_timeout(controller.pause_process(calc_node.pk))
            result = yield self.wait_future(future)
            self.assertTrue(result)
            self.assertTrue(calc_node.paused)
示例#26
0
def launch_calcfunction(inputval):
    """Launch workfunction to the daemon"""
    inputs = {
        'x': Int(inputval),
        'y': Int(inputval),
    }
    res = inputval + inputval
    expected_result = Int(res)
    process = submit(add, **inputs)
    print(f'launched calcfunction {process.uuid}, pk={process.pk}')
    return process, expected_result
示例#27
0
    def get_thermal_conductivity(self):

        inputs = {
            'structure': self.ctx.final_structure,
            'parameters': self.inputs.ph_settings,
            'force_sets': self.ctx.anharmonic.out.force_sets
        }

        if bool(self.inputs.use_nac):
            inputs.update({'nac_data': self.ctx.harmonic.out.nac_data})

        if int(self.inputs.gp_chunks) > 1:
            inputs.update({'gp_chunks': self.inputs.gp_chunks})
            future = submit(Phono3pyDist, **inputs)
        else:
            JobCalculation, calculation_input = generate_phono3py_params(
                **inputs)
            future = submit(JobCalculation, **calculation_input)
            print('phono3py (pk = {})'.format(future.pid))

        return ToContext(thermal_conductivity=future)
示例#28
0
    def submit(self, _=None):
        assert self.input_structure is not None

        builder = WorkflowFactory('quantumespresso.pw.relax').get_builder()
        builder.base.pw.code = self.code_group.selected_code
        builder.base.pw.parameters = load_default_parameters()
        builder.base.pw.metadata.options = self.options
        builder.base.kpoints_distance = Float(0.8)
        builder.base.pseudo_family = Str(self.pseudo_family_selection.value)
        builder.structure = self.input_structure

        self.process = submit(builder)
示例#29
0
def main(codelabel, run_test):
    try:
        code = Code.get_from_string(codelabel)
    except NotExistent:
        print("The code '{}' does not exist".format(codelabel))
        sys.exit(1)

    atoms = ase.build.molecule("H2O")
    atoms.center(vacuum=2.0)
    structure = StructureData(ase=atoms)

    parameters = Dict(dict={})
    options = {
        "resources": {"num_machines": 1, "num_cores_per_mpiproc": 1},
        "max_wallclock_seconds": int(0.5 * 60 * 60),
    }
    inputs = {
        "protocol_tag": Str("sp"),
        "cp2k_base": {
            "cp2k": {
                "structure": structure,
                "parameters": parameters,
                "code": code,
                "metadata": {"options": options},
            }
        },
        "metadata":{
            "label": "testing_daint_setup"
        }
    }

    if run_test:
        submit(Cp2kMultistageWorkChain, **inputs)
    else:
        print("Generating test input ...")
        inputs["cp2k_base"]["cp2k"]["metadata"]["dry_run"] = True
        inputs["cp2k_base"]["cp2k"]["metadata"]["store_provenance"] = False
        run(Cp2kMultistageWorkChain, **inputs)
        print("Submission test successful")
        print("In order to actually submit, add '--run'")
示例#30
0
        def do_kill():
            calc_node = submit(test_processes.WaitProcess)
            self.assertFalse(calc_node.is_killed)
            while calc_node.process_state != ProcessState.WAITING:
                yield

            kill_message = 'Sorry, you have to go mate'
            future = yield with_timeout(controller.kill_process(calc_node.pk, msg=kill_message))
            result = yield self.wait_future(future)
            self.assertTrue(result)

            self.wait_for_process(calc_node)
            self.assertTrue(calc_node.is_killed)
            self.assertEqual(calc_node.process_status, kill_message)