示例#1
0
def launch_calculation(code, calculation, max_num_machines,
                       max_wallclock_seconds, with_mpi, daemon):
    """Run a Q2rCalculation."""
    from aiida.plugins import CalculationFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    # Check that the parent calculation node comes from quantumespresso.ph.
    # I cannot move this check into the option declaration, because CalcJobNode is not subclassed by the specific
    # calculation plugins (only Process is), and there is no feature yet to filter by the associated process_type.
    expected_process_type = 'aiida.calculations:quantumespresso.ph'
    if calculation.process_type != expected_process_type:
        raise click.BadParameter(
            'The input calculation node has a process_type: {}; should be {}'.
            format(calculation.process_type, expected_process_type))

    inputs = {
        'code': code,
        'parent_folder': calculation.outputs.remote_folder,
        'metadata': {
            'options':
            get_default_options(max_num_machines, max_wallclock_seconds,
                                with_mpi),
        }
    }

    launch.launch_process(CalculationFactory('quantumespresso.q2r'), daemon,
                          **inputs)
示例#2
0
def test_ph_qpoint_list(
    aiida_profile, fixture_localhost, fixture_sandbox, generate_calc_job, fixture_code, generate_structure,
    generate_kpoints_mesh, generate_remote_data, file_regression
):
    """Test a `PhCalculation` with a qpoint list instead of a mesh."""
    entry_point_name = 'quantumespresso.ph'
    parent_entry_point = 'quantumespresso.pw'
    remote_path = fixture_sandbox.abspath

    structure = generate_structure()
    kpoints = generate_kpoints_mesh(2).get_kpoints_mesh(print_list=True)
    qpoints = orm.KpointsData()
    qpoints.set_cell(structure.cell)
    qpoints.set_kpoints(kpoints)

    inputs = {
        'code': fixture_code(entry_point_name),
        'parent_folder': generate_remote_data(fixture_localhost, remote_path, parent_entry_point),
        'qpoints': qpoints,
        'parameters': orm.Dict(dict={'INPUTPH': {}}),
        'metadata': {
            'options': get_default_options()
        }
    }

    generate_calc_job(fixture_sandbox, entry_point_name, inputs)

    with fixture_sandbox.open('aiida.in') as handle:
        input_written = handle.read()

    file_regression.check(input_written, encoding='utf-8', extension='.in')
示例#3
0
def test_pw_wrong_ibrav(fixture_sandbox, generate_calc_job, fixture_code, generate_kpoints_mesh, generate_upf_data):
    """Test that a `PwCalculation` with an incorrect `ibrav` raises."""
    entry_point_name = 'quantumespresso.pw'

    parameters = {'CONTROL': {'calculation': 'scf'}, 'SYSTEM': {'ecutrho': 240.0, 'ecutwfc': 30.0, 'ibrav': 2}}

    # Here we use the wrong order of unit cell vectors on purpose.
    param = 5.43
    cell = [[0, param / 2., param / 2.], [-param / 2., 0, param / 2.], [-param / 2., param / 2., 0]]
    structure = orm.StructureData(cell=cell)
    structure.append_atom(position=(0., 0., 0.), symbols='Si', name='Si')
    structure.append_atom(position=(param / 4., param / 4., param / 4.), symbols='Si', name='Si')

    upf = generate_upf_data('Si')
    inputs = {
        'code': fixture_code(entry_point_name),
        'structure': structure,
        'kpoints': generate_kpoints_mesh(2),
        'parameters': orm.Dict(dict=parameters),
        'pseudos': {
            'Si': upf
        },
        'metadata': {
            'options': get_default_options()
        }
    }

    with pytest.raises(QEInputValidationError):
        generate_calc_job(fixture_sandbox, entry_point_name, inputs)
示例#4
0
def launch_calculation(code, parent_folder, single_file, max_num_machines, max_wallclock_seconds, with_mpi, daemon):
    """Run a Pw2wannier90Calculation with some sample parameters and the provided inputs."""
    from aiida.orm import Dict
    from aiida.plugins import CalculationFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    parameters = {
        'inputpp': {
            'write_amn': True,
            'write_mmn': True,
            'write_unk': False,
            'scdm_proj': True,
            'scdm_entanglement': 'isolated',
        }
    }

    settings = {'ADDITIONAL_RETRIEVE_LIST': ['*.amn', '*.mmn', '*.eig']}

    inputs = {
        'code': code,
        'parent_folder': parent_folder,
        'nnkp_file': single_file,
        'parameters': Dict(dict=parameters),
        'settings': Dict(dict=settings),
        'metadata': {
            'options': get_default_options(max_num_machines, max_wallclock_seconds, with_mpi),
        }
    }

    launch.launch_process(CalculationFactory('quantumespresso.pw2wannier90'), daemon, **inputs)
示例#5
0
    def validate_inputs(self):
        """
        Validate inputs that depend might depend on each other and cannot be validated by the spec. Also define
        dictionary `inputs` in the context, that will contain the inputs for the calculation that will be launched
        in the `run_calculation` step.
        """
        self.ctx.inputs = AttributeDict({
            'code':
            self.inputs.code,
            'qpoints':
            self.inputs.qpoints,
            'parent_folder':
            self.inputs.parent_folder,
        })

        if 'parameters' in self.inputs:
            self.ctx.inputs.parameters = self.inputs.parameters.get_dict()
        else:
            self.ctx.inputs.parameters = {}

        if 'INPUTPH' not in self.ctx.inputs.parameters:
            self.ctx.inputs.parameters['INPUTPH'] = {}

        if 'settings' in self.inputs:
            self.ctx.inputs.settings = self.inputs.settings.get_dict()
        else:
            self.ctx.inputs.settings = {}

        if 'options' in self.inputs:
            self.ctx.inputs._options = self.inputs.options.get_dict()
        else:
            self.ctx.inputs._options = get_default_options()

        if self.inputs.only_initialization.value:
            self.ctx.inputs.settings['ONLY_INITIALIZATION'] = True
示例#6
0
def launch_workflow(code, datum, kpoints_mesh, clean_workdir, max_num_machines,
                    max_wallclock_seconds, with_mpi, daemon):
    """Run the `MatdynBaseWorkChain` for a previously completed `Q2rCalculation`."""
    from aiida.orm import Bool
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    inputs = {
        'matdyn': {
            'code': code,
            'kpoints': kpoints_mesh,
            'force_constants': datum,
            'metadata': {
                'options':
                get_default_options(max_num_machines, max_wallclock_seconds,
                                    with_mpi),
            }
        }
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    launch.launch_process(WorkflowFactory('quantumespresso.matdyn.base'),
                          daemon, **inputs)
示例#7
0
def launch(code, calculation, kpoints, max_num_machines, max_wallclock_seconds,
           daemon, clean_workdir):
    """
    Run the PhBaseWorkChain for a previously completed PwCalculation
    """
    from aiida.orm.data.base import Bool
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.utils import WorkflowFactory
    from aiida.work.launch import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options

    PhBaseWorkChain = WorkflowFactory('quantumespresso.ph.base')

    parameters = {'INPUTPH': {}}

    options = get_default_options(max_num_machines, max_wallclock_seconds)

    inputs = {
        'code': code,
        'qpoints': kpoints,
        'parent_folder': calculation.out.remote_folder,
        'parameters': ParameterData(dict=parameters),
        'options': ParameterData(dict=options),
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    if daemon:
        workchain = submit(PhBaseWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            PhBaseWorkChain.__name__, workchain.pk))
    else:
        run(PhBaseWorkChain, **inputs)
示例#8
0
def test_pw_ibrav_tol(fixture_sandbox, generate_calc_job, fixture_code, generate_kpoints_mesh, generate_upf_data):
    """Test that `IBRAV_TOLERANCE` controls the tolerance when checking cell consistency."""
    entry_point_name = 'quantumespresso.pw'

    parameters = {'CONTROL': {'calculation': 'scf'}, 'SYSTEM': {'ecutrho': 240.0, 'ecutwfc': 30.0, 'ibrav': 2}}

    # The structure needs to be rotated in the same way QE does it for ibrav=2.
    param = 5.43
    eps = 0.1
    cell = [[-param / 2., eps, param / 2.], [-eps, param / 2. + eps, param / 2.], [-param / 2., param / 2., 0]]
    structure = orm.StructureData(cell=cell)
    structure.append_atom(position=(0., 0., 0.), symbols='Si', name='Si')
    structure.append_atom(position=(param / 4., param / 4., param / 4.), symbols='Si', name='Si')

    upf = generate_upf_data('Si')
    inputs = {
        'code': fixture_code(entry_point_name),
        'structure': structure,
        'kpoints': generate_kpoints_mesh(2),
        'parameters': orm.Dict(dict=parameters),
        'pseudos': {
            'Si': upf
        },
        'metadata': {
            'options': get_default_options()
        },
    }
    # Without adjusting the tolerance, the check fails.
    with pytest.raises(QEInputValidationError):
        generate_calc_job(fixture_sandbox, entry_point_name, inputs)

    # After adjusting the tolerance, the input validation no longer fails.
    inputs['settings'] = orm.Dict(dict={'ibrav_cell_tolerance': eps})
    generate_calc_job(fixture_sandbox, entry_point_name, inputs)
        def get_common_inputs():
            """Return the dictionary of inputs to be used as the basis for each `PwBaseWorkChain`."""
            protocol, protocol_modifiers = self._get_protocol()
            checked_pseudos = protocol.check_pseudos(
                modifier_name=protocol_modifiers.get('pseudo', None),
                pseudo_data=protocol_modifiers.get('pseudo_data', None))
            known_pseudos = checked_pseudos['found']

            inputs = AttributeDict({
                'pw': {
                    'code': self.inputs.code,
                    'parameters': self.ctx.parameters,
                    'metadata': {},
                }
            })

            if 'pseudo_family' in self.inputs:
                inputs.pw['pseudos'] = get_pseudos_from_structure(
                    self.inputs.structure, self.inputs.pseudo_family.value)
            else:
                inputs.pw['pseudos'] = get_pseudos_from_dict(
                    self.inputs.structure, known_pseudos)

            if 'set_2d_mesh' in self.inputs:
                inputs['set_2d_mesh'] = self.inputs.set_2d_mesh

            if 'options' in self.inputs:
                inputs.pw.metadata.options = self.inputs.options.get_dict()
            else:
                inputs.pw.metadata.options = get_default_options(with_mpi=True)

            return inputs
示例#10
0
def launch(
    code, parent_calc, kpoints, max_num_machines, max_wallclock_seconds, daemon):
    """
    Run the MatdynBaseWorkChain for a previously completed Q2rCalculation
    """
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.utils import CalculationFactory, WorkflowFactory
    from aiida.work.run import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options

    MatdynBaseWorkChain = WorkflowFactory('quantumespresso.matdyn.base')

    options = get_default_options(max_num_machines, max_wallclock_seconds)

    inputs = {
        'code': code,
        'kpoints': kpoints,
        'parent_folder': parent_calc.out.force_constants,
        'options': ParameterData(dict=options),
    }

    if daemon:
        workchain = submit(MatdynBaseWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(MatdynBaseWorkChain.__name__, workchain.pid))
    else:
        run(MatdynBaseWorkChain, **inputs)
示例#11
0
    def _generate_inputs_pw():
        """Generate default inputs for a `PwCalculation."""
        from aiida.orm import Dict
        from aiida_quantumespresso.utils.resources import get_default_options

        inputs = {
            'code': fixture_code('quantumespresso.pw'),
            'structure': generate_structure(),
            'kpoints': generate_kpoints_mesh(2),
            'parameters': Dict(dict={
                'CONTROL': {
                    'calculation': 'scf'
                },
                'SYSTEM': {
                    'ecutrho': 240.0,
                    'ecutwfc': 30.0
                }
            }),
            'pseudos': {
                'Si': generate_upf_data('Si')
            },
            'metadata': {
                'options': get_default_options()
            }
        }

        return inputs
示例#12
0
def launch_workflow(code, calculation, kpoints_mesh, clean_workdir,
                    max_num_machines, max_wallclock_seconds, with_mpi, daemon):
    """Run the `PhBaseWorkChain` for a previously completed `PwCalculation`."""
    from aiida.orm import Bool, Dict
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    inputs = {
        'ph': {
            'code': code,
            'qpoints': kpoints_mesh,
            'parent_folder': calculation.outputs.remote_folder,
            'parameters': Dict(dict={'INPUTPH': {}}),
            'metadata': {
                'options':
                get_default_options(max_num_machines, max_wallclock_seconds,
                                    with_mpi),
            }
        }
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    launch.launch_process(WorkflowFactory('quantumespresso.ph.base'), daemon,
                          **inputs)
示例#13
0
def launch_calculation(code, kpoints_mesh, calculation, max_num_machines,
                       max_wallclock_seconds, with_mpi, daemon):
    """Run a PhCalculation."""
    from aiida import orm
    from aiida.plugins import CalculationFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    # Check that the parent calculation node comes from quantumespresso.pw.
    # I cannot move this check into the option declaration, because CalcJobNode is not subclassed by the specific
    # calculation plugins (only Process is), and there is no feature yet to filter by the associated process_type.
    expected_process_type = 'aiida.calculations:quantumespresso.pw'
    if calculation.process_type != expected_process_type:
        raise click.BadParameter(
            'The input calculation node has a process_type: {}; should be {}'.
            format(calculation.process_type, expected_process_type))

    parent_folder = calculation.get_outgoing(
        node_class=orm.RemoteData,
        link_label_filter='remote_folder').one().node

    inputs = {
        'code': code,
        'qpoints': kpoints_mesh,
        'parameters': orm.Dict(dict={'INPUTPH': {}}),
        'parent_folder': parent_folder,
        'metadata': {
            'options':
            get_default_options(max_num_machines, max_wallclock_seconds,
                                with_mpi),
        }
    }

    launch.launch_process(CalculationFactory('quantumespresso.ph'), daemon,
                          **inputs)
示例#14
0
    def validate_inputs(self):
        """
        Validate inputs that depend might depend on each other and cannot be validated by the spec. Also define
        dictionary `inputs` in the context, that will contain the inputs for the calculation that will be launched
        in the `run_calculation` step.
        """
        self.ctx.inputs = AttributeDict({
            'code':
            self.inputs.code,
            'parent_folder':
            self.inputs.parent_folder,
        })

        if 'parameters' in self.inputs:
            self.ctx.inputs.parameters = self.inputs.parameters.get_dict()
        else:
            self.ctx.inputs.parameters = {'INPUT': {}}

        if 'settings' in self.inputs:
            self.ctx.inputs.settings = self.inputs.settings.get_dict()
        else:
            self.ctx.inputs.settings = {}

        if 'options' in self.inputs:
            self.ctx.inputs._options = self.inputs.options.get_dict()
        else:
            self.ctx.inputs._options = get_default_options()
示例#15
0
def launch_workflow(code, calculation, clean_workdir, max_num_machines,
                    max_wallclock_seconds, with_mpi, daemon):
    """Run the `Q2rBaseWorkChain` for a previously completed `PhCalculation`."""
    from aiida.orm import Bool
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    expected_process_type = 'aiida.calculations:quantumespresso.ph'
    if calculation.process_type != expected_process_type:
        raise click.BadParameter(
            'The input calculation node has a process_type: {}; should be {}'.
            format(calculation.process_type, expected_process_type))

    inputs = {
        'q2r': {
            'code': code,
            'parent_folder': calculation.outputs.remote_folder,
            'metadata': {
                'options':
                get_default_options(max_num_machines, max_wallclock_seconds,
                                    with_mpi),
            }
        }
    }

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    launch.launch_process(WorkflowFactory('quantumespresso.q2r.base'), daemon,
                          **inputs)
示例#16
0
def launch_workflow(code, structure, pseudo_family, kpoints_distance, ecutwfc,
                    ecutrho, hubbard_u, hubbard_v, hubbard_file_pk,
                    starting_magnetization, smearing,
                    automatic_parallelization, clean_workdir, max_num_machines,
                    max_wallclock_seconds, with_mpi, daemon):
    """Run a `PwBaseWorkChain`."""
    from aiida.orm import Bool, Float, Str, Dict
    from aiida.plugins import WorkflowFactory
    from aiida_quantumespresso.utils.resources import get_default_options, get_automatic_parallelization_options

    builder = WorkflowFactory('quantumespresso.pw.base').get_builder()

    parameters = {
        'SYSTEM': {
            'ecutwfc': ecutwfc,
            'ecutrho': ecutrho,
        },
    }

    try:
        hubbard_file = validate.validate_hubbard_parameters(
            structure, parameters, hubbard_u, hubbard_v, hubbard_file_pk)
    except ValueError as exception:
        raise click.BadParameter(str(exception))

    try:
        validate.validate_starting_magnetization(structure, parameters,
                                                 starting_magnetization)
    except ValueError as exception:
        raise click.BadParameter(str(exception))

    try:
        validate.validate_smearing(parameters, smearing)
    except ValueError as exception:
        raise click.BadParameter(str(exception))

    builder.pw.code = code
    builder.pw.structure = structure
    builder.pw.parameters = Dict(dict=parameters)
    builder.pseudo_family = Str(pseudo_family)
    builder.kpoints_distance = Float(kpoints_distance)

    if hubbard_file:
        builder.hubbard_file = hubbard_file

    if automatic_parallelization:
        automatic_parallelization = get_automatic_parallelization_options(
            max_num_machines, max_wallclock_seconds)
        builder.automatic_parallelization = Dict(
            dict=automatic_parallelization)
    else:
        builder.pw.metadata.options = get_default_options(
            max_num_machines, max_wallclock_seconds, with_mpi)

    if clean_workdir:
        builder.clean_workdir = Bool(True)

    launch.launch_process(builder, daemon)
示例#17
0
def launch(code, structure, pseudo_family, kpoints, max_num_machines,
           max_wallclock_seconds, daemon, automatic_parallelization,
           clean_workdir, final_scf, group):
    """
    Run the PwRelaxWorkChain for a given input structure
    """
    from aiida.orm.data.base import Bool, Str
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.utils import WorkflowFactory
    from aiida.work.run import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options

    PwRelaxWorkChain = WorkflowFactory('quantumespresso.pw.relax')

    parameters = {
        'SYSTEM': {
            'ecutwfc': 30.,
            'ecutrho': 240.,
        },
    }

    inputs = {
        'code': code,
        'structure': structure,
        'pseudo_family': Str(pseudo_family),
        'kpoints': kpoints,
        'parameters': ParameterData(dict=parameters),
    }

    if automatic_parallelization:
        parallelization = {
            'max_num_machines': max_num_machines,
            'target_time_seconds': 0.5 * max_wallclock_seconds,
            'max_wallclock_seconds': max_wallclock_seconds
        }
        inputs['automatic_parallelization'] = ParameterData(
            dict=parallelization)
    else:
        options = get_default_options(max_num_machines, max_wallclock_seconds)
        inputs['options'] = ParameterData(dict=options)

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    if final_scf:
        inputs['final_scf'] = Bool(True)

    if group:
        inputs['group'] = Str(group)

    if daemon:
        workchain = submit(PwRelaxWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            PwRelaxWorkChain.__name__, workchain.pid))
    else:
        run(PwRelaxWorkChain, **inputs)
示例#18
0
def launch(code, structure, pseudo_family, kpoints, max_num_machines,
           max_wallclock_seconds, daemon, mode):
    """
    Run a PwCalculation for a given input structure
    """
    from aiida.orm import load_node
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.data.upf import get_pseudos_from_structure
    from aiida.orm.utils import CalculationFactory
    from aiida.work.run import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options

    PwCalculation = CalculationFactory('quantumespresso.pw')

    parameters = {
        'CONTROL': {
            'calculation': mode,
        },
        'SYSTEM': {
            'ecutwfc': 30.,
            'ecutrho': 240.,
        },
    }

    inputs = {
        'code': code,
        'structure': structure,
        'pseudo': get_pseudos_from_structure(structure, pseudo_family),
        'kpoints': kpoints,
        'parameters': ParameterData(dict=parameters),
        'settings': ParameterData(dict={}),
        '_options': get_default_options(max_num_machines,
                                        max_wallclock_seconds),
    }

    process = PwCalculation.process()

    if daemon:
        calculation = submit(process, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            PwCalculation.__name__, calculation.pid))
    else:
        click.echo('Running a PwCalculation in the {} mode... '.format(mode))
        results, pk = run(process, _return_pid=True, **inputs)
        calculation = load_node(pk)

        click.echo('PwCalculation<{}> terminated with state: {}'.format(
            pk, calculation.get_state()))
        click.echo('\n{link:25s} {node}'.format(link='Output link',
                                                node='Node pk and type'))
        click.echo('{s}'.format(s='-' * 60))
        for link, node in sorted(calculation.get_outputs(also_labels=True)):
            click.echo('{:25s} <{}> {}'.format(link, node.pk,
                                               node.__class__.__name__))
def launch(code, calculation, kpoints, max_num_machines, max_wallclock_seconds,
           daemon):
    """
    Run a HpCalculation for a previously completed PwCalculation
    """
    from aiida.orm import load_node
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.data.upf import get_pseudos_from_structure
    from aiida.orm.utils import CalculationFactory
    from aiida.work.launch import run_get_pid, submit
    from aiida_quantumespresso.utils.resources import get_default_options
    from aiida_quantumespresso_hp.utils.validation import validate_parent_calculation

    HpCalculation = CalculationFactory('quantumespresso.hp')

    try:
        validate_parent_calculation(calculation)
    except ValueError as exception:
        raise click.BadParameter(
            'invalid parent calculation: {}'.format(exception))

    parameters = {'INPUTHP': {}}

    inputs = {
        'code': code,
        'qpoints': kpoints,
        'parameters': ParameterData(dict=parameters),
        'parent_folder': calculation.out.remote_folder,
        'options': get_default_options(max_num_machines,
                                       max_wallclock_seconds),
    }

    click.echo('Running a hp.x calculation ... ')

    process = HpCalculation.process()

    if daemon:
        calculation = submit(process, **inputs)
        pk = calculation.pk
        click.echo('Submitted {}<{}> to the daemon'.format(
            HpCalculation.__name__, calculation.pk))
    else:
        results, pk = run_get_pid(process, **inputs)

    calculation = load_node(pk)

    click.echo('HpCalculation<{}> terminated with state: {}'.format(
        pk, calculation.get_state()))
    click.echo('\n{link:25s} {node}'.format(link='Output link',
                                            node='Node pk and type'))
    click.echo('{s}'.format(s='-' * 60))
    for link, node in sorted(calculation.get_outputs(also_labels=True)):
        click.echo('{:25s} <{}> {}'.format(link, node.pk,
                                           node.__class__.__name__))
示例#20
0
def inputs(fixture_code):
    """Fixture: inputs for Z2packBaseWorkChain."""
    inputs = {
        'code': fixture_code('z2pack.z2pack'),
        'pw_code': fixture_code('quantumespresso.pw'),
        'overlap_code': fixture_code('quantumespresso.pw2wannier90'),
        'wannier90_code': fixture_code('wannier90.wannier90'),
        'metadata': {
            'options': get_default_options()
        }
    }
    return inputs
示例#21
0
    def _generate_inputs_q2r():
        """Generate default inputs for a `Q2rCalculation."""
        from aiida_quantumespresso.utils.resources import get_default_options

        inputs = {
            'code': fixture_code('quantumespresso.q2r'),
            'parent_folder': generate_remote_data(fixture_localhost, fixture_sandbox.abspath, 'quantumespresso.ph'),
            'metadata': {
                'options': get_default_options()
            }
        }

        return inputs
示例#22
0
def launch_calculation(code, structure, pseudo_family, max_num_machines,
                       max_wallclock_seconds, with_mpi, daemon):
    """Run a CpCalculation."""
    from aiida.orm import Dict
    from aiida.orm.nodes.data.upf import get_pseudos_from_structure
    from aiida.plugins import CalculationFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    parameters = {
        'CONTROL': {
            'calculation': 'cp',
            'restart_mode': 'from_scratch',
            'wf_collect': False,
            'iprint': 1,
            'isave': 100,
            'dt': 3.0,
            'max_seconds': 25 * 60,
            'nstep': 10,
        },
        'SYSTEM': {
            'ecutwfc': 30.0,
            'ecutrho': 240.0,
            'nr1b': 24,
            'nr2b': 24,
            'nr3b': 24,
        },
        'ELECTRONS': {
            'electron_damping': 1.0e-1,
            'electron_dynamics': 'damp',
            'emass': 400.0,
            'emass_cutoff': 3.0,
        },
        'IONS': {
            'ion_dynamics': 'none'
        },
    }

    inputs = {
        'code': code,
        'structure': structure,
        'pseudos': get_pseudos_from_structure(structure, pseudo_family),
        'parameters': Dict(dict=parameters),
        'metadata': {
            'options':
            get_default_options(max_num_machines, max_wallclock_seconds,
                                with_mpi),
        }
    }

    launch.launch_process(CalculationFactory('quantumespresso.cp'), daemon,
                          **inputs)
示例#23
0
    def _generate_inputs_matdyn():
        """Generate default inputs for a `MatdynCalculation."""
        from aiida_quantumespresso.utils.resources import get_default_options

        inputs = {
            'code': fixture_code('quantumespresso.matdyn'),
            'force_constants': generate_force_constants_data,
            'kpoints': generate_kpoints_mesh(2),
            'metadata': {
                'options': get_default_options()
            }
        }

        return inputs
示例#24
0
def inputs(fixture_code, remote, parameters, settings):
    """Fixture: inputs for Z2packBaseWorkChain."""
    from aiida_quantumespresso.utils.resources import get_default_options

    inputs = {
        'code': fixture_code('quantumespresso.pw2gw'),
        'parent_folder': remote,
        'parameters': orm.Dict(dict=parameters),
        'settings': orm.Dict(dict=settings),
        'metadata': {
            'options': get_default_options()
        }
    }
    return inputs
示例#25
0
def test_pw_ibrav(
    fixture_sandbox, generate_calc_job, fixture_code, generate_kpoints_mesh, generate_upf_data, file_regression
):
    """Test a `PwCalculation` where `ibrav` is explicitly specified."""
    entry_point_name = 'quantumespresso.pw'

    parameters = {'CONTROL': {'calculation': 'scf'}, 'SYSTEM': {'ecutrho': 240.0, 'ecutwfc': 30.0, 'ibrav': 2}}

    # The structure needs to be rotated in the same way QE does it for ibrav=2.
    param = 5.43
    cell = [[-param / 2., 0, param / 2.], [0, param / 2., param / 2.], [-param / 2., param / 2., 0]]
    structure = orm.StructureData(cell=cell)
    structure.append_atom(position=(0., 0., 0.), symbols='Si', name='Si')
    structure.append_atom(position=(param / 4., param / 4., param / 4.), symbols='Si', name='Si')

    upf = generate_upf_data('Si')
    inputs = {
        'code': fixture_code(entry_point_name),
        'structure': structure,
        'kpoints': generate_kpoints_mesh(2),
        'parameters': orm.Dict(dict=parameters),
        'pseudos': {
            'Si': upf
        },
        'metadata': {
            'options': get_default_options()
        }
    }

    calc_info = generate_calc_job(fixture_sandbox, entry_point_name, inputs)

    cmdline_params = ['-in', 'aiida.in']
    local_copy_list = [(upf.uuid, upf.filename, u'./pseudo/Si.upf')]
    retrieve_list = ['aiida.out', './out/aiida.save/data-file-schema.xml', './out/aiida.save/data-file.xml']
    retrieve_temporary_list = [['./out/aiida.save/K*[0-9]/eigenval*.xml', '.', 2]]

    # Check the attributes of the returned `CalcInfo`
    assert isinstance(calc_info, datastructures.CalcInfo)
    assert sorted(calc_info.cmdline_params) == sorted(cmdline_params)
    assert sorted(calc_info.local_copy_list) == sorted(local_copy_list)
    assert sorted(calc_info.retrieve_list) == sorted(retrieve_list)
    assert sorted(calc_info.retrieve_temporary_list) == sorted(retrieve_temporary_list)
    assert sorted(calc_info.remote_symlink_list) == sorted([])

    with fixture_sandbox.open('aiida.in') as handle:
        input_written = handle.read()

    # Checks on the files written to the sandbox folder as raw input
    assert sorted(fixture_sandbox.get_content_list()) == sorted(['aiida.in', 'pseudo', 'out'])
    file_regression.check(input_written, encoding='utf-8', extension='.in')
示例#26
0
def test_ph_default(fixture_database, fixture_computer_localhost,
                    fixture_sandbox_folder, generate_calc_job,
                    generate_code_localhost, generate_structure,
                    generate_kpoints_mesh, generate_remote_data,
                    file_regression):
    """Test a default `PhCalculation`."""
    entry_point_name = 'quantumespresso.ph'
    parent_entry_point = 'quantumespresso.pw'
    remote_path = fixture_sandbox_folder.abspath

    inputs = {
        'code':
        generate_code_localhost(entry_point_name, fixture_computer_localhost),
        'parent_folder':
        generate_remote_data(fixture_computer_localhost, remote_path,
                             parent_entry_point),
        'qpoints':
        generate_kpoints_mesh(2),
        'parameters':
        orm.Dict(dict={'INPUTPH': {}}),
        'metadata': {
            'options': get_default_options()
        }
    }

    calc_info = generate_calc_job(fixture_sandbox_folder, entry_point_name,
                                  inputs)

    cmdline_params = ['-in', 'aiida.in']
    retrieve_list = [
        './out/_ph0/aiida.phsave/tensors.xml', 'DYN_MAT', 'aiida.out'
    ]
    local_copy_list = []

    # Check the attributes of the returned `CalcInfo`
    assert isinstance(calc_info, datastructures.CalcInfo)
    assert sorted(
        calc_info.codes_info[0].cmdline_params) == sorted(cmdline_params)
    assert sorted(calc_info.local_copy_list) == sorted(local_copy_list)
    assert sorted(calc_info.retrieve_list) == sorted(retrieve_list)
    assert sorted(calc_info.remote_symlink_list) == sorted([])

    with fixture_sandbox_folder.open('aiida.in') as handle:
        input_written = handle.read()

    # Checks on the files written to the sandbox folder as raw input
    assert sorted(fixture_sandbox_folder.get_content_list()) == sorted(
        ['DYN_MAT', 'aiida.in'])
    file_regression.check(input_written, encoding='utf-8', extension='.in')
示例#27
0
def launch_calculation(code, datum, kpoints_mesh, max_num_machines, max_wallclock_seconds, with_mpi, daemon):
    """Run a MatdynCalculation."""
    from aiida.plugins import CalculationFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    inputs = {
        'code': code,
        'kpoints': kpoints_mesh,
        'force_constants': datum,
        'metadata': {
            'options': get_default_options(max_num_machines, max_wallclock_seconds, with_mpi),
        }
    }

    launch.launch_process(CalculationFactory('quantumespresso.matdyn'), daemon, **inputs)
def submit_workchain(structure):
    print("running dft band structure calculation for {}".format(
        structure.get_formula()))

    from aiida_quantumespresso.utils.resources import get_default_options

    # Submit the DFT bands workchain
    dft_workchain = submit(
        PwBandStructureWorkChain,
        code=code,
        structure=structure,
        #protocol=orm.Dict(dict={'name': 'theos-ht-1.0'}),
        protocol=orm.Dict(dict={'name': 'testing'}),
        options=orm.Dict(dict=get_default_options(
            max_wallclock_seconds=3600 * 5, with_mpi=True)))
    return dft_workchain
示例#29
0
    def _generate_inputs_ph():
        """Generate default inputs for a `PhCalculation."""
        from aiida.orm import Dict
        from aiida_quantumespresso.utils.resources import get_default_options

        inputs = {
            'code': fixture_code('quantumespresso.matdyn'),
            'parent_folder': generate_remote_data(fixture_localhost, fixture_sandbox.abspath, 'quantumespresso.pw'),
            'qpoints': generate_kpoints_mesh(2),
            'parameters': Dict(dict={'INPUTPH': {}}),
            'metadata': {
                'options': get_default_options()
            }
        }

        return inputs
示例#30
0
def test_pw_default(aiida_profile, fixture_localhost, fixture_sandbox,
                    fixture_code, generate_calc_job, generate_remote_data,
                    tmpdir, file_regression):
    """Test a default `Pw2gwCalculation`."""
    entry_point_name = 'quantumespresso.pw2gw'

    parameters = {
        'INPUTPP': {
            'qplda': False,
            'vxcdiag': False,
            'vkb': False,
            'Emin': 0.0,
            'Emax': 15.0,
            'DeltaE': 0.001,
        }
    }

    parent = generate_remote_data(
        fixture_localhost,
        str(tmpdir),
        'quantumespresso.pw',
    )

    inputs = {
        'code': fixture_code(entry_point_name),
        'parameters': orm.Dict(dict=parameters),
        'parent_folder': parent,
        'metadata': {
            'options': get_default_options()
        }
    }

    calc_info = generate_calc_job(fixture_sandbox, entry_point_name, inputs)

    retrieve_list = [
        'aiida.out', 'epsX.dat', 'epsY.dat', 'epsZ.dat', 'epsTOT.dat'
    ]

    assert isinstance(calc_info, datastructures.CalcInfo)
    assert sorted(calc_info.retrieve_list) == sorted(retrieve_list)

    with fixture_sandbox.open('aiida.in') as handle:
        input_written = handle.read()

    # Checks on the files written to the sandbox folder as raw input
    assert sorted(fixture_sandbox.get_content_list()) == sorted(['aiida.in'])
    file_regression.check(input_written, encoding='utf-8', extension='.in')