Exemplo n.º 1
0
def main(pot_family, import_from, queue, code, computer, no_import):
    load_dbenv_if_not_loaded()
    from aiida.orm import WorkflowFactory, Code
    from aiida.work import submit

    if not no_import:
        click.echo('importing POTCAR files...')
        with cli_spinner():
            import_pots(import_from, pot_family)

    code = Code.get_from_string('{}@{}'.format(code, computer))
    workflow = WorkflowFactory('vasp.base')

    inputs = AttributeDict()
    inputs.structure = create_structure_Si()
    inputs.kpoints = create_kpoints()
    inputs.incar = create_params_simple()
    inputs.code = code
    inputs.potcar_family = get_data_node('str', pot_family)
    inputs.potcar_mapping = get_data_node('parameter', dict={'Si': 'Si'})
    options = AttributeDict()
    options.queue_name = queue
    options.resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 4}
    inputs.options = get_data_node('parameter', dict=options)

    submit(workflow, **inputs)
Exemplo n.º 2
0
        def generate_sub_input(inputs, namespace, task):
            """
            Generate inputs for tasks, merge those in the namespace from those
            given in the inputs
            """
            if namespace in self.inputs:
                self.report(
                    'Taking input from the {} namespace'.format(namespace))
                bands_inputs = AttributeDict(
                    self.exposed_inputs(base_work, namespace=namespace))
            else:
                bands_inputs = AttributeDict(
                    {'calc': {
                        'parameters': orm.Dict(dict={'task': task})
                    }})

            # Special treatment - combine the paramaters
            parameters = inputs.calc.parameters.get_dict()
            bands_parameters = bands_inputs.calc.parameters.get_dict()

            nested_update(parameters, bands_parameters)
            # Make sure the task name is correct
            nested_update(parameters, {'task': self._task_name})

            # Update the SCF name space with those from the bands name space
            nested_update(inputs, bands_inputs)

            # Apply the new parameters
            inputs.calc.parameters = orm.Dict(dict=parameters)

            return inputs
Exemplo n.º 3
0
def init_smearing_parameters():
    """Fixture for a set of general input parameters regarding Fermi integral smearing."""
    general_parameters = AttributeDict()
    general_parameters.smearing = AttributeDict()
    general_parameters.smearing.gaussian = True

    return general_parameters
Exemplo n.º 4
0
 def inner(inputs=None, settings=None):
     from aiida.plugins import CalculationFactory
     from aiida.engine import run
     calculation = CalculationFactory('vasp.vasp')
     mock_vasp.store()
     create_authinfo(computer=mock_vasp.computer, store=True)
     kpoints, _ = vasp_kpoints
     parameters = AttributeDict()
     parameters = vasp_params.get_dict()
     inpts = AttributeDict()
     inpts.code = Code.get_from_string('mock-vasp@localhost')
     inpts.structure = vasp_structure
     inpts.parameters = get_data_class('dict')(dict=parameters)
     inpts.kpoints = kpoints
     inpts.potential = get_data_class(
         'vasp.potcar').get_potcars_from_structure(
             structure=inpts.structure,
             family_name=POTCAR_FAMILY_NAME,
             mapping=POTCAR_MAP)
     options = {
         'withmpi': False,
         'queue_name': 'None',
         'resources': {
             'num_machines': 1,
             'num_mpiprocs_per_machine': 1
         },
         'max_wallclock_seconds': 3600
     }
     inpts.metadata = {}
     inpts.metadata['options'] = options
     if inputs is not None:
         inpts.update(inputs)
     results_and_node = run.get_node(calculation, **inpts)
     return results_and_node
Exemplo n.º 5
0
def init_charge_parameters():
    """Fixture for a set of general input parameters regarding how to construct initial charge density."""
    general_parameters = AttributeDict()
    general_parameters.charge = AttributeDict()
    general_parameters.charge.constant_charge = True

    return general_parameters
Exemplo n.º 6
0
def test_charge_parameters():
    """Test charge parameters."""
    parameters = AttributeDict()
    parameters.charge = AttributeDict()
    parameters.charge.from_wave = True
    massager = ParametersMassage(parameters)
    parameters.charge.from_wave = False
    parameters.charge.from_charge = True
    massager = ParametersMassage(parameters)
    assert massager.parameters[_DEFAULT_OVERRIDE_NAMESPACE].icharg == 1
    parameters.charge.from_charge = False
    parameters.charge.from_atomic = True
    massager = ParametersMassage(parameters)
    assert massager.parameters[_DEFAULT_OVERRIDE_NAMESPACE].icharg == 2
    parameters.charge.from_atomic = False
    parameters.charge.from_potential = True
    massager = ParametersMassage(parameters)
    assert massager.parameters[_DEFAULT_OVERRIDE_NAMESPACE].icharg == 4
    parameters.charge.from_potential = False
    parameters.charge.constant_charge = True
    massager = ParametersMassage(parameters)
    assert massager.parameters[_DEFAULT_OVERRIDE_NAMESPACE].icharg == 11
    parameters.charge.constant_charge = False
    parameters.charge.constant_atomic = True
    massager = ParametersMassage(parameters)
    assert massager.parameters[_DEFAULT_OVERRIDE_NAMESPACE].icharg == 12
Exemplo n.º 7
0
def test_charge_parameters():
    """Test charge parameters."""
    parameters = AttributeDict()
    parameters.charge = AttributeDict()
    parameters.charge.from_wave = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    parameters.charge.from_wave = False
    parameters.charge.from_charge = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.icharg == 1
    parameters.charge.from_charge = False
    parameters.charge.from_atomic = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.icharg == 2
    parameters.charge.from_atomic = False
    parameters.charge.from_potential = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.icharg == 4
    parameters.charge.from_potential = False
    parameters.charge.constant_charge = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.icharg == 11
    parameters.charge.constant_charge = False
    parameters.charge.constant_atomic = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.icharg == 12
Exemplo n.º 8
0
    def inner(settings=None, parameters=None):

        inputs = AttributeDict()

        metadata = AttributeDict({
            'options': {
                'resources': {
                    'num_machines': 1,
                    'num_mpiprocs_per_machine': 1
                }
            }
        })

        if settings is not None:
            inputs.settings = Dict(dict=settings)

        if isinstance(parameters, dict):
            parameters = get_data_class('dict')(dict=parameters)

        if parameters is None:
            parameters = vasp_params

        inputs.code = vasp_code
        inputs.metadata = metadata
        inputs.parameters = parameters
        inputs.kpoints, _ = vasp_kpoints
        inputs.structure = vasp_structure
        inputs.potential = potentials

        inputs.wannier_parameters = wannier_params
        inputs.wannier_projections = wannier_projections

        return inputs
Exemplo n.º 9
0
def test_skip_parameters_validate():  # pylint: disable=invalid-name
    """Test that it is possibly to completely by-pass parameters checking."""
    parameters = AttributeDict()
    parameters[_DEFAULT_OVERRIDE_NAMESPACE] = AttributeDict()
    parameters[_DEFAULT_OVERRIDE_NAMESPACE].not_valid = 200
    massager = ParametersMassage(parameters, skip_parameters_validation=True)
    assert massager.parameters[_DEFAULT_OVERRIDE_NAMESPACE].not_valid == 200
Exemplo n.º 10
0
 def setup(self):
     super().setup()
     self.ctx.inputs = AttributeDict(self.exposed_inputs(BigDFTCalculation))
     if self.inputs.get('run_opts') is not None:
         self.ctx.inputs.metadata = AttributeDict(
             self.inputs.run_opts.get_dict())
     else:
         self.ctx.inputs.metadata = {}
Exemplo n.º 11
0
def init_bands_parameters():
    """Fixture for a set of general input parameters for band structure calculations."""
    general_parameters = AttributeDict()
    general_parameters.bands = AttributeDict()
    general_parameters.bands.decompose_bands = False
    general_parameters.bands.decompose_wave = False

    return general_parameters
Exemplo n.º 12
0
def test_pwcutoff_to_encut():
    """Test that the pwcutoff is converted to encut."""
    parameters = AttributeDict()
    parameters.electronic = AttributeDict()
    parameters.electronic.pwcutoff = 200
    massager = ParametersMassage(parameters)
    assert massager.parameters[
        _DEFAULT_OVERRIDE_NAMESPACE].encut == parameters.electronic.pwcutoff
Exemplo n.º 13
0
def init_simple_workchain():
    """Fixture to simulate a fake workchain to store the exit codes and a dummy report function."""
    workchain = AttributeDict()
    workchain.exit_codes = AttributeDict()
    workchain.exit_codes.ERROR_INVALID_PARAMETER_DETECTED = 1
    workchain.exit_codes.ERROR_MISSING_PARAMETER_DETECTED = 1
    workchain.report = print

    return workchain
Exemplo n.º 14
0
def test_orbital_projections():  # pylint: disable=too-many-statements
    """Test the parameters associated with orbital projections."""
    parameters = AttributeDict()
    parameters.bands = AttributeDict()
    parameters.bands.decompose_wave = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.lorbit == 5
    parameters.bands.decompose_wave = False
    parameters.bands.decompose_bands = True
    parameters.bands.decompose_auto = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.lorbit == 14
    parameters.bands.decompose_auto = False
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.lorbit == 10
    parameters.bands.lm = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.lorbit == 11
    parameters.bands.phase = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.lorbit == 12
    parameters.bands.lm = False
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.lorbit == 12

    # Now do the once with a Wigner-Seitz radius supplied
    parameters.bands.wigner_seitz_radius = [2.0]
    parameters.bands.lm = False
    parameters.bands.phase = False
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.lorbit == 0
    print(massager.parameters.rwigs)
    assert int(massager.parameters.rwigs[0]) == 2
    parameters.bands.lm = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.lorbit == 1
    parameters.bands.phase = True
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.lorbit == 2
    parameters.bands.lm = False
    massager = ParametersMassage(None, parameters)
    assert massager.exit_code is None
    assert massager.parameters.lorbit == 2

    # Should raise ValueError if Wigner-Seitz radius is not defined as a list.
    parameters.bands.wigner_seitz_radius = 2.0
    with pytest.raises(ValueError):
        massager = ParametersMassage(None, parameters)
Exemplo n.º 15
0
def test_unsupported_fail_override():
    """Test that any supplied unsupported parameters in the regular parameters dictionary yield error."""
    parameters = AttributeDict()
    parameters[_DEFAULT_OVERRIDE_NAMESPACE] = AttributeDict()
    parameters[_DEFAULT_OVERRIDE_NAMESPACE].not_valid = 200
    matching_string = re.compile(
        r'^The supplied key: not_valid is not a support VASP parameter.$')
    with pytest.raises(ValueError, match=matching_string):
        _ = ParametersMassage(parameters)
Exemplo n.º 16
0
def main(code_string, incar, kmesh, structure, potential_family,
         potential_mapping, options):
    """Main method to setup the calculation."""

    # We set the workchain you would like to call
    workchain = WorkflowFactory('vasp.relax')

    # And finally, we declare the options, settings and input containers
    settings = AttributeDict()
    inputs = AttributeDict()

    # Organize settings
    settings.parser_settings = {}

    # Set inputs for the following WorkChain execution
    # Set code
    inputs.code = Code.get_from_string(code_string)
    # Set structure
    inputs.structure = structure
    # Set k-points grid density
    kpoints = DataFactory('array.kpoints')()
    kpoints.set_kpoints_mesh(kmesh)
    inputs.kpoints = kpoints
    # Set parameters
    inputs.parameters = DataFactory('dict')(dict=incar)
    # Set potentials and their mapping
    inputs.potential_family = DataFactory('str')(potential_family)
    inputs.potential_mapping = DataFactory('dict')(dict=potential_mapping)
    # Set options
    inputs.options = DataFactory('dict')(dict=options)
    # Set settings
    inputs.settings = DataFactory('dict')(dict=settings)
    # Set workchain related inputs, in this case, give more explicit output to report
    inputs.verbose = DataFactory('bool')(True)

    # Relaxation related parameters that is passed to the relax workchain
    relax = AttributeDict()
    # Turn on relaxation
    relax.perform = DataFactory('bool')(True)
    # Select relaxation algorithm
    relax.algo = DataFactory('str')('cg')
    # Set force cutoff limit (EDIFFG, but no sign needed)
    relax.force_cutoff = DataFactory('float')(0.01)
    # Turn on relaxation of positions (strictly not needed as the default is on)
    # The three next parameters correspond to the well known ISIF=3 setting
    relax.positions = DataFactory('bool')(True)
    # Turn on relaxation of the cell shape (defaults to False)
    relax.shape = DataFactory('bool')(True)
    # Turn on relaxation of the volume (defaults to False)
    relax.volume = DataFactory('bool')(True)
    # Set maximum number of ionic steps
    relax.steps = DataFactory('int')(100)
    # Set the relaxation parameters on the inputs
    inputs.relax = relax
    # Submit the requested workchain with the supplied inputs
    submit(workchain, **inputs)
Exemplo n.º 17
0
def init_dict2():
    """Fixture for dictionary two."""
    dct = AttributeDict()
    dct.dct1 = AttributeDict()
    dct.dct1.test = 2.0
    dct.dct1.test2 = 'string2'
    dct.dct1.test3 = [2.0]
    dct.dct1.test4 = {'key2': 'value2'}

    return dct
Exemplo n.º 18
0
def init_dict1():
    """Fixture for dictionary one."""
    dct = AttributeDict()
    dct.dct1 = AttributeDict()
    dct.dct1.test = 1.0
    dct.dct1.test2 = 'string1'
    dct.dct1.test3 = [1.0]
    dct.dct1.test4 = {'key1': 'value1'}

    return dct
Exemplo n.º 19
0
def test_additional_override_namespaces(init_relax_parameters):  # pylint: disable=invalid-name
    """Test that we can supply additional override namespaces and that they are unmodified in the massager."""
    init_relax_parameters.myspace = AttributeDict({'myspaceparameter': 1})
    init_relax_parameters.yourspace = AttributeDict({'yourspaceparameter': 1})
    massager = ParametersMassage(init_relax_parameters,
                                 settings=AttributeDict({
                                     'additional_override_namespaces':
                                     ['myspace', 'yourspace']
                                 }))
    assert massager.parameters.myspace.myspaceparameter == 1
    assert massager.parameters.yourspace.yourspaceparameter == 1
Exemplo n.º 20
0
def init_relax_parameters():
    """Fixture for a set of general input parameters for relaxation."""
    general_parameters = AttributeDict()
    general_parameters.relax = AttributeDict()
    general_parameters.relax.algo = 'cg'
    general_parameters.relax.force_cutoff = 0.01
    general_parameters.relax.steps = 60
    general_parameters.relax.positions = True
    general_parameters.relax.shape = True
    general_parameters.relax.volume = True

    return general_parameters
Exemplo n.º 21
0
def main(code_string, incar, kmesh, structure, potential_family,
         potential_mapping, options):
    """Main method to setup the calculation."""

    # First, we need to fetch the AiiDA datatypes which will
    # house the inputs to our calculation
    dict_data = DataFactory('dict')
    kpoints_data = DataFactory('array.kpoints')

    # Then, we set the workchain you would like to call
    workchain = WorkflowFactory('vasp.master')

    # And finally, we declare the options, settings and input containers
    settings = AttributeDict()
    inputs = AttributeDict()

    # Organize settings
    settings.parser_settings = {
        'output_params': ['total_energies', 'maximum_force']
    }

    # Set inputs for the following WorkChain execution
    # Set code
    inputs.code = Code.get_from_string(code_string)
    # Set structure
    inputs.structure = structure
    # Set k-points grid density
    kpoints = kpoints_data()
    kpoints.set_kpoints_mesh(kmesh)
    inputs.kpoints = kpoints
    # Set parameters
    inputs.parameters = dict_data(dict=incar)
    # Set potentials and their mapping
    inputs.potential_family = Str(potential_family)
    inputs.potential_mapping = dict_data(dict=potential_mapping)
    # Set options
    inputs.options = dict_data(dict=options)
    # Set settings
    inputs.settings = dict_data(dict=settings)
    # Set workchain related inputs, in this case, give more explicit output to repor
    inputs.verbose = Bool(True)

    # Master, convergence and relaxation related parameters that is passed to the master,
    # convergence and relaxation workchain, respectively
    # Turn of relaxation
    relax = AttributeDict()
    relax.perform = Bool(False)
    inputs.relax = relax
    # Extract electronic band structure
    inputs.extract_bands = Bool(True)
    # Submit the requested workchain with the supplied inputs
    submit(workchain, **inputs)
Exemplo n.º 22
0
 def _init_settings(self):
     """Initialize the settings."""
     # Make sure we parse the bands
     if 'settings' in self.inputs:
         settings = AttributeDict(self.inputs.settings.get_dict())
     else:
         settings = AttributeDict({'parser_settings': {}})
     dict_entry = {'add_bands': True}
     try:
         settings.parser_settings.update(dict_entry)
     except AttributeError:
         settings.parser_settings = dict_entry
     self.ctx.inputs.settings = settings
Exemplo n.º 23
0
def inherit_and_merge_parameters(inputs):
    """
    Goes trough the inputs namespaces and the namespaces in the inputs.parameters and merge them.

    Note that parameters specified in the inputs.parameters will override what is supplied as workchain input,
    in case there is overlap.
    """
    parameters = AttributeDict()
    namespaces = ['electronic', 'bands', 'smearing', 'charge', 'relax', 'converge']
    for namespace in namespaces:  # pylint: disable=too-many-nested-blocks
        parameters[namespace] = AttributeDict()
        try:
            for key, item in inputs[namespace].items():
                if isinstance(item, DataFactory('array')):
                    # Only allow one array per input
                    if len(item.get_arraynames()) > 1:
                        raise IndexError(
                            'The input array with a key {} contains more than one array. Please make sure an input only contains one array.'
                            .format(key))
                    for array in item.get_arraynames():
                        parameters[namespace][key] = item.get_array(array)
                elif isinstance(item, DataFactory('dict')):
                    parameters[namespace][key] = item.get_dict()
                elif isinstance(item, DataFactory('list')):
                    parameters[namespace][key] = item.get_list()
                else:
                    parameters[namespace][key] = item.value
        except KeyError:
            pass

    # Now get the input parameters and update the dictionary. This means,
    # any supplied namespace in the parameters (i.e. inputs.parameters.somekey) will override what is supplied to the workchain
    # input namespace (i.e. inputs.somekey).
    try:
        # inputs might not have parameters, or parameters might be empty
        input_parameters = AttributeDict(inputs.parameters.get_dict())
    except AttributeError:
        input_parameters = {}

    # Now check that no loose keys are residing on the root of input_parameters, everything should be in
    # the vasp or aiida namespace
    #valid_keys = ['vasp', 'aiida']
    #if not list(input_parameters.keys()).sort() == valid_keys.sort():
    #    raise ValueError('Unsupported keys detected on parameter root. '
    #                     'Please make sure all keys reside inside the vasp or aiida namespace.')

    # We cannot use regular update here, as we only want to replace each key if it exists, if a key
    # contains a new dict we need to traverse that, hence we have a function to perform this update
    update_nested_dict(parameters, input_parameters)

    return parameters
Exemplo n.º 24
0
 def _init_settings(self):
     """Initialize the settings."""
     # Make sure we parse the output structure when we want to perform
     # relaxations (override if contrary entry exists).
     if 'settings' in self.inputs:
         settings = AttributeDict(self.inputs.settings.get_dict())
     else:
         settings = AttributeDict({'parser_settings': {}})
     if self.perform_relaxation():
         dict_entry = {'add_structure': True}
         try:
             settings.parser_settings.update(dict_entry)
         except AttributeError:
             settings.parser_settings = dict_entry
     self.ctx.inputs.settings = settings
Exemplo n.º 25
0
def inherit_and_merge_parameters(inputs):
    """
    Goes trough the inputs namespaces and the namespaces in the inputs.parameters and merge them.

    Note that parameters specified in the inputs.parameters will override what is supplied as workchain input,
    in case there is overlap.
    """
    parameters = AttributeDict()
    namespaces = _BASE_NAMESPACES

    # We start with a clean parameters and first set the allowed namespaces and its content from the inputs of the workchain
    for namespace in namespaces:  # pylint: disable=too-many-nested-blocks
        parameters[namespace] = AttributeDict()
        try:
            for key, item in inputs[namespace].items():
                if isinstance(item, DataFactory('array')):
                    # Only allow one array per input
                    if len(item.get_arraynames()) > 1:
                        raise IndexError(
                            'The input array with a key {} contains more than one array. Please make sure an input only contains one array.'
                            .format(key))
                    for array in item.get_arraynames():
                        parameters[namespace][key] = item.get_array(array)
                elif isinstance(item, DataFactory('dict')):
                    parameters[namespace][key] = item.get_dict()
                elif isinstance(item, DataFactory('list')):
                    parameters[namespace][key] = item.get_list()
                else:
                    parameters[namespace][key] = item.value
        except KeyError:
            pass

    # Then obtain the inputs.parameters.
    # Here we do not do any checks for valid parameters, that is done later when reaching the ParameterMassager.
    try:
        input_parameters = AttributeDict(inputs.parameters.get_dict())
    except AttributeError:
        # Inputs might not have parameters
        input_parameters = AttributeDict()

    # Now the namespace and content of the workchain inputs and the inputs.parameters are merged.
    # Any supplied namespace in the parameters (i.e. inputs.parameters.somekey) will override what
    # is supplied to the workchain input namespace (i.e. inputs.somekey).
    # We cannot use regular update here, as we only want to replace each key if it exists, if a key
    # contains a new dict we need to traverse that, hence we have a function to perform this update.
    update_nested_dict(parameters, input_parameters)

    return parameters
Exemplo n.º 26
0
    def run_scf(self):
        """
        Run the SCF calculation
        """

        base_work = WorkflowFactory(self._base_wk_string)
        inputs = AttributeDict(self.exposed_inputs(base_work, namespace='scf'))
        inputs.metadata.call_link_label = 'scf'
        inputs.calc.structure = self.ctx.current_structure

        # Ensure that writing the check/castep_bin
        param_dict = inputs.calc.parameters.get_dict()
        if 'PARAM' in param_dict:
            ensure_checkpoint(param_dict['PARAM'])
        else:
            ensure_checkpoint(param_dict)

        # Update if changes are made
        if param_dict != inputs.calc.parameters.get_dict():
            self.report(
                "Updated the PARAM to make sure castep_bin file will be written"
            )
            inputs.calc.parameters = orm.Dict(dict=param_dict)

        running = self.submit(base_work, **inputs)
        self.report('Running SCF calculation {}'.format(running))
        self.to_context(workchain_scf=running)
Exemplo n.º 27
0
def test_validate_input(test_crystal_code, test_structure_data,
                        crystal_calc_parameters, test_basis_family_predefined):
    from aiida.common.extendeddicts import AttributeDict
    from aiida_crystal_dft.calculations.serial import CrystalSerialCalculation
    inputs = AttributeDict()
    with pytest.raises(ValueError):
        CrystalSerialCalculation(inputs)
    inputs.metadata = {
        'options': {
            'resources': {
                'num_machines': 1,
                'num_mpiprocs_per_machine': 1
            }
        }
    }
    inputs.code = test_crystal_code
    with pytest.raises(ValueError):
        CrystalSerialCalculation(inputs)
    inputs.structure = test_structure_data
    with pytest.raises(ValueError):
        CrystalSerialCalculation(inputs)
    inputs.parameters = crystal_calc_parameters
    # TODO: write schemas code checking that either basis or basis_family is present!
    # with pytest.raises(ValueError):
    #     CrystalSerialCalculation(inputs)
    # inputs.basis_family = test_basis_family_predefined
    assert CrystalSerialCalculation(inputs)
Exemplo n.º 28
0
    def validate_resources(cls, **kwargs):
        """Validate the resources against the job resource class of this scheduler.

        :param kwargs: dictionary of values to define the job resources
        :return: attribute dictionary with the parsed parameters populated
        :raises ValueError: if the resources are invalid or incomplete
        """
        resources = AttributeDict()

        try:
            resources.parallel_env = kwargs.pop('parallel_env')
        except KeyError:
            raise ValueError('`parallel_env` must be specified and must be a string')
        else:
            if not isinstance(resources.parallel_env, str):
                raise ValueError('`parallel_env` must be specified and must be a string')

        try:
            resources.tot_num_mpiprocs = int(kwargs.pop('tot_num_mpiprocs'))
        except (KeyError, TypeError, ValueError):
            raise ValueError('`tot_num_mpiprocs` must be specified and must be an integer')

        if resources.tot_num_mpiprocs < 1:
            raise ValueError('`tot_num_mpiprocs` must be greater than or equal to one.')

        if kwargs:
            raise ValueError(f"these parameters were not recognized: {', '.join(list(kwargs.keys()))}")

        return resources
Exemplo n.º 29
0
    def __init__(
        self,
        inputs: Optional[dict] = None,
        logger: Optional[logging.Logger] = None,
        runner: Optional['Runner'] = None,
        enable_persistence: bool = True
    ) -> None:
        """Construct a WorkChain instance.

        Construct the instance only if it is a sub class of `WorkChain`, otherwise raise `InvalidOperation`.

        :param inputs: work chain inputs
        :param logger: aiida logger
        :param runner: work chain runner
        :param enable_persistence: whether to persist this work chain

        """
        if self.__class__ == WorkChain:
            raise exceptions.InvalidOperation('cannot construct or launch a base `WorkChain` class.')

        super().__init__(inputs, logger, runner, enable_persistence=enable_persistence)

        self._stepper: Optional[Stepper] = None
        self._awaitables: List[Awaitable] = []
        self._context = AttributeDict()
Exemplo n.º 30
0
    def __init__(self,
                 inputs=None,
                 logger=None,
                 runner=None,
                 enable_persistence=True):
        """Construct a WorkChain instance.

        Construct the instance only if it is a sub class of `WorkChain`, otherwise raise `InvalidOperation`.

        :param inputs: work chain inputs
        :type inputs: dict

        :param logger: aiida logger
        :type logger: :class:`logging.Logger`

        :param runner: work chain runner
        :type: :class:`aiida.engine.runners.Runner`

        :param enable_persistence: whether to persist this work chain
        :type enable_persistence: bool

        """
        if self.__class__ == WorkChain:
            raise exceptions.InvalidOperation(
                'cannot construct or launch a base `WorkChain` class.')

        super().__init__(inputs,
                         logger,
                         runner,
                         enable_persistence=enable_persistence)

        self._stepper = None
        self._awaitables = []
        self._context = AttributeDict()