Example #1
0
def launch_inpgen(structure, inpgen, calc_parameters, settings, daemon):
    """
    Launch an inpgen calcjob on given input

    If no code is given it queries the DB for inpgen codes and uses the one with
    the newest creation time.

    Either structure or anysource_structure can be specified.
    Default structure is Si bulk.
    """

    process_class = CalculationFactory('fleur.inpgen')
    inputs = {
        'code': inpgen,
        'structure': structure,
        'parameters': calc_parameters,
        'settings': settings,
        'metadata': {
            'options': {
                'withmpi': False,
                'max_wallclock_seconds': 6000,
                'resources': {
                    'num_machines': 1,
                    'num_mpiprocs_per_machine': 1,
                }
            }
        }
    }
    inputs = clean_nones(inputs)
    builder = process_class.get_builder()
    builder.update(inputs)
    launch_process(builder, daemon)
Example #2
0
def test_input_creation(
    db_test_app,  # pylint: disable=unused-argument
    get_potential_data,
    calc_type,
    potential_type,
    file_regression,
):
    """
    Test the generation of the input file for lammps
    """
    pot_data = get_potential_data(potential_type)
    potential_data = DataFactory('lammps.potential')(
        potential_type=pot_data.type,
        data=pot_data.data,
    )
    parameter_data = get_calc_parameters(
        '17 Aug 2017',
        calc_type,
        potential_data.default_units,
        potential_type,
    )

    calc = CalculationFactory(calc_type)
    content = calc.create_main_input_content(
        parameter_data,
        potential_data,
        kind_symbols=['A', 'B'],
        structure_filename='input.data',
        trajectory_filename='output.traj',
        system_filename='sys_info.txt',
        restart_filename='calc.restart',
    )
    file_regression.check(content)
Example #3
0
    def get_builder(self):
        """Create and return a new `ProcessBuilder` for the `CalcJob` class of the plugin configured for this code.

        The configured calculation plugin class is defined by the `get_input_plugin_name` method.

        .. note:: it also sets the ``builder.code`` value.

        :return: a `ProcessBuilder` instance with the `code` input already populated with ourselves
        :raise aiida.common.EntryPointError: if the specified plugin does not exist.
        :raise ValueError: if no default plugin was specified.
        """
        from aiida.plugins import CalculationFactory

        plugin_name = self.get_input_plugin_name()

        if plugin_name is None:
            raise ValueError(
                'no default calculation input plugin specified for this code')

        try:
            process_class = CalculationFactory(plugin_name)
        except EntryPointError:
            raise EntryPointError(
                'the calculation entry point `{}` could not be loaded'.format(
                    plugin_name))

        builder = process_class.get_builder()
        builder.code = self

        return builder
def get_immigrant_builder(calculation_folder,
                          calculator_settings,
                          calc_type=None):
    if calc_type:
        code = Code.get_from_string(
            calculator_settings[calc_type]['code_string'])
    else:
        code = Code.get_from_string(calculator_settings['code_string'])

    if code.attributes['input_plugin'] in ['vasp.vasp']:
        if calc_type is None:
            settings_dict = calculator_settings.get_dict()
        else:
            settings_dict = calculator_settings[calc_type]

        calc_cls = CalculationFactory('vasp.vasp')
        params = {
            'metadata': {
                'options': settings_dict['options']
            },
            'settings': settings_dict
        }
        if 'potential_family' in settings_dict:
            params['potential_family'] = settings_dict['potential_family']
        if 'potential_mapping' in settings_dict:
            params['potential_mapping'] = settings_dict['potential_mapping']

        _, builder = calc_cls.immigrant(code, calculation_folder, **params)
        builder.metadata['options']['parser_name'] = 'vasp.vasp'
    else:
        raise RuntimeError("Code could not be found.")

    return builder
Example #5
0
def launch_fleur(fleurinp, fleur, parent_folder, settings, daemon,
                 max_num_machines, max_wallclock_seconds,
                 num_mpiprocs_per_machine, option_node, with_mpi, launch_base):
    """
    Launch a base_fleur workchain.
    If launch_base is False launch a single fleur calcjob instead.

    """

    process_class = CalculationFactory('fleur.fleur')
    workchain_class = WorkflowFactory('fleur.base')

    inputs = {
        'code': fleur,
        'fleurinpdata': fleurinp,
        'parent_folder': parent_folder,
        'settings': settings,
        'metadata': {
            'options': {
                'withmpi': with_mpi,
                'max_wallclock_seconds': max_wallclock_seconds,
                'resources': {
                    'num_machines': max_num_machines,
                    'num_mpiprocs_per_machine': num_mpiprocs_per_machine,
                }
            }
        }
    }

    if not launch_base:
        inputs = clean_nones(inputs)
        builder = process_class.get_builder()
        builder.update(inputs)
    else:
        if option_node is None:
            option_node = Dict(
                dict={
                    'withmpi': with_mpi,
                    'max_wallclock_seconds': max_wallclock_seconds,
                    'resources': {
                        'num_machines': max_num_machines,
                        'num_mpiprocs_per_machine': num_mpiprocs_per_machine
                    }
                })

        inputs_base = {
            'code': fleur,
            'fleurinpdata': fleurinp,
            'parent_folder': parent_folder,
            'settings': settings,
            'options': option_node
        }
        inputs_base = clean_nones(inputs_base)
        builder = workchain_class.get_builder()
        builder.update(**inputs_base)

    launch_process(builder, daemon)
def silicon_builder(db_test_app):
    """Prepare a mock - ready calculation for silicon"""
    silicon = orm.StructureData()
    r_unit = 2.6954645
    silicon.set_cell(np.array([[1, 1, 0], [1, 0, 1], [0, 1, 1]]) * r_unit)
    silicon.append_atom(symbols=["Si"], position=[0, 0, 0])
    silicon.append_atom(symbols=["Si"], position=[r_unit * 0.5] * 3)
    silicon.label = "Si"
    silicon.description = "A silicon structure"
    param_dict = {
        # Notice that the keywords are group into two sub-dictionaries
        # just like you would do when preparing the inputs by hand
        "CELL": {
            "symmetry_generate": True,
            "snap_to_symmetry": True,
            # Pass a list of string to set a BLOCK inputs
            #"cell_constraints":
            #["0 0 0", "0 0 0"]
        },
        "PARAM": {
            "task": "singlepoint",
            "basis_precision": "medium",
            "fix_occupancy":
            True,  # Use bool type to make it easy for querying
            "opt_strategy": "memory",
            "num_dump_cycles": 0,
            "write_formatted_density": True
        }
    }
    # We need to create a Dict node that holds the dictionary
    param = orm.Dict(dict=param_dict)
    kpoints = orm.KpointsData()
    # Use gamma and 0.25, 0.25, 0.25
    kpoints.set_kpoints_mesh((4, 4, 4), offset=(0, 0, 0))
    c9 = OTFGData(otfg_entry="C9")
    CastepCalculation = CalculationFactory('castep.castep')
    code_path = check_output(['which', 'castep.mock'],
                             universal_newlines=True).strip()
    castep_mock = orm.Code((db_test_app.localhost, code_path),
                           input_plugin_name='castep.castep')

    builder = CastepCalculation.get_builder()
    builder.structure = silicon
    builder.parameters = param
    builder.kpoints = kpoints
    builder.code = castep_mock
    builder.pseudos = {'Si': c9}
    builder.metadata.options.withmpi = False
    builder.metadata.options.resources = {
        'num_machines': 1,
        'tot_num_mpiprocs': 2
    }
    builder.metadata.options.max_wallclock_seconds = 600
    builder.metadata.label = "Si SINGLEPOINT"
    builder.metadata.description = 'A Example CASTEP calculation for silicon'
    return builder
 def setup_tbmodels(self, calc_string):
     """
     Helper function to create the builder for TBmodels calculations.
     """
     builder = CalculationFactory(calc_string).get_builder()
     builder.code = self.inputs.code_tbmodels
     builder.metadata.options = dict(
         resources={'num_machines': 1}, withmpi=False
     )
     return builder
Example #8
0
def get_inputs_inpgen(structure, inpgencode, options, label='', description='', settings=None, params=None, **kwargs):
    '''
    Assembles the input dictionary for Fleur Calculation.

    :param structure: input structure of StructureData type
    :param inpgencode: inpgen code of Code type
    :param options: calculation options that will be stored in metadata
    :param label: a string setting a label of the CalcJob in the DB
    :param description: a string setting a description of the CalcJob in the DB
    :param params: input parameters for inpgen code of Dict type

    Example of use::

        inputs_build = get_inputs_inpgen(structure, inpgencode, options, label,
                                         description, params=params)
        future = self.submit(inputs_build)

    '''

    FleurinpProcess = CalculationFactory('fleur.inpgen')
    inputs = FleurinpProcess.get_builder()

    if structure:
        inputs.structure = structure
    if inpgencode:
        inputs.code = inpgencode
    if params:
        inputs.parameters = params
    if settings:
        inputs.settings = settings
    if description:
        inputs.metadata.description = description
    else:
        inputs.metadata.description = ''

    if label:
        inputs.metadata.label = label
    else:
        inputs.metadata.label = ''

    if not options:
        options = {}
    # inpgen run always serial
    options['withmpi'] = False
    options['resources'] = {'num_machines': 1, 'num_mpiprocs_per_machine': 1}

    if options:
        inputs.metadata.options = options

    # Currently this does not work, find out howto...
    # for key, val in kwargs.items():
    #    inputs[key] = val

    return inputs
def test_missing_input_raises(incar, kpoints, poscar, with_pbe_potcars,
                              vasp_code, aiida_sandbox, use_incar, use_potcar,
                              use_kpoints):
    from aiida.plugins import CalculationFactory
    from aiida_cusp.data import VaspPotcarData
    # set the input plugin for code
    vasp_code.set_attribute('input_plugin', 'cusp.vasp')
    # setup calculation inputs
    inputs = {
        'code': vasp_code,
        'poscar': poscar,
        'metadata': {
            'options': {
                'resources': {
                    'num_machines': 1
                }
            }
        },
    }
    if use_incar:
        inputs.update({'incar': incar})
    if use_kpoints:
        inputs.update({'kpoints': kpoints})
    if use_potcar:
        inputs.update({'potcar': VaspPotcarData.from_structure(poscar, 'pbe')})
    VaspCalculation = CalculationFactory('cusp.vasp')
    vasp_calc = VaspCalculation(inputs=inputs)
    if all([use_incar, use_kpoints, use_potcar]):
        vasp_calc.prepare_for_submission(aiida_sandbox)
    else:
        with pytest.raises(Exception) as exception:
            vasp_calc.prepare_for_submission(aiida_sandbox)
        assert "non-optional inputs are missing" in str(exception.value)
def test_invalid_restart_inputs_raise(vasp_code, poscar, with_pbe_potcars,
                                      invalid_input):
    from aiida.plugins import CalculationFactory
    from aiida_cusp.data import VaspPotcarData
    VaspCalculation = CalculationFactory('cusp.vasp')
    inputs = {
        'code': vasp_code,
        'metadata': {
            'options': {
                'resources': {
                    'num_machines': 1
                },
            },
        },
    }
    if invalid_input == 'poscar':
        inputs.update({'poscar': poscar})
    if invalid_input == 'potcar':
        potcar = VaspPotcarData.from_structure(poscar, 'pbe')
        inputs.update({'potcar': potcar})
    vasp_basic_calculation = VaspCalculation(inputs=inputs)
    with pytest.raises(Exception) as exception:
        # need to call the name mangeled protected method explicitly
        vasp_basic_calculation._VaspBasicCalculation__verify_restart_inputs()
    err_msg = "the following defined inputs are not allowed in a restarted"
    assert err_msg in str(exception.value)
Example #11
0
    def run_pw2wan(self):
        """Run pw2wannier90.x."""
        self.out('nscf_output', self.ctx.pw_nscf.outputs.output_parameters)
        self.out('nnkp_file', self.ctx.w90_pp.outputs.nnkp_file)

        self.ctx.pw2wannier_parameters = {
            'inputpp': {
                'write_amn': True,
                'write_unk': True,
                'write_mmn': True,
            }
        }
        settings = {'ADDITIONAL_RETRIEVE_LIST': ['*.amn', '*.mmn', '*.eig']}
        inputs = {
            'code': self.inputs.pw2wannier90_code,
            'parameters': orm.Dict(dict=self.ctx.pw2wannier_parameters),
            'parent_folder': self.ctx.pw_nscf.outputs.remote_folder,
            'nnkp_file': self.ctx.w90_pp.outputs.nnkp_file,
            'settings': Dict(dict=settings),
            'metadata': {
                'options': {
                    'resources': {
                        'num_machines': int(self.inputs.num_machines)
                    },
                    'max_wallclock_seconds':
                    int(self.inputs.max_wallclock_seconds),
                    'withmpi': True,
                }
            }
        }
        running = self.submit(
            CalculationFactory('quantumespresso.pw2wannier90'), **inputs)
        self.report('launching pw2wannier90<{}>(pw2wannier90 step)'.format(
            running.pk))
        return ToContext(pw2wannier=running)
Example #12
0
    def run_w90(self):
        """Run the Wannier90 main run with wannier90.x."""
        self.out('matrices_folder', self.ctx.pw2wannier.outputs.retrieved)
        self.out('pw2wan_remote_folder',
                 self.ctx.pw2wannier.outputs.remote_folder)

        inputs = {
            'code': self.inputs.wannier_code,
            'structure': self.inputs.structure,
            'parameters': orm.Dict(dict=self.ctx.w90_pp_parameters),
            'kpoints': self.ctx.kpoints_nscf_explicit,
            'kpoint_path': self.inputs.kpoint_path,
            'remote_input_folder': self.ctx.pw2wannier.outputs.remote_folder,
            'projections': self.inputs.projections,
            'metadata': {
                'options': {
                    'resources': {
                        'num_machines': int(self.inputs.num_machines)
                    },
                    'max_wallclock_seconds':
                    int(self.inputs.max_wallclock_seconds),
                    'withmpi': False,
                }
            }
        }

        running = self.submit(CalculationFactory('wannier90.wannier90'),
                              **inputs)
        self.report('launching Wannier90<{}> (main run)'.format(running.pk))

        return ToContext(w90=running)
Example #13
0
    def test_exit_codes(self):
        """Test the properties to return various (sub) sets of existing exit codes."""
        ArithmeticAddCalculation = CalculationFactory('arithmetic.add')  # pylint: disable=invalid-name

        exit_codes = ArithmeticAddCalculation.exit_codes
        self.assertIsInstance(exit_codes, ExitCodesNamespace)
        for _, value in exit_codes.items():
            self.assertIsInstance(value, ExitCode)

        exit_statuses = ArithmeticAddCalculation.get_exit_statuses(['ERROR_NO_RETRIEVED_FOLDER'])
        self.assertIsInstance(exit_statuses, list)
        for entry in exit_statuses:
            self.assertIsInstance(entry, int)

        with self.assertRaises(AttributeError):
            ArithmeticAddCalculation.get_exit_statuses(['NON_EXISTING_EXIT_CODE_LABEL'])
Example #14
0
def launch_calculation(code, parent_folder, single_file, max_num_machines, max_wallclock_seconds, with_mpi, daemon):
    """Run a Pw2wannier90Calculation with some sample parameters and the provided inputs."""
    from aiida.orm import Dict
    from aiida.plugins import CalculationFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    parameters = {
        'inputpp': {
            'write_amn': True,
            'write_mmn': True,
            'write_unk': False,
            'scdm_proj': True,
            'scdm_entanglement': 'isolated',
        }
    }

    settings = {'ADDITIONAL_RETRIEVE_LIST': ['*.amn', '*.mmn', '*.eig']}

    inputs = {
        'code': code,
        'parent_folder': parent_folder,
        'nnkp_file': single_file,
        'parameters': Dict(dict=parameters),
        'settings': Dict(dict=settings),
        'metadata': {
            'options': get_default_options(max_num_machines, max_wallclock_seconds, with_mpi),
        }
    }

    launch.launch_process(CalculationFactory('quantumespresso.pw2wannier90'), daemon, **inputs)
Example #15
0
def launch_calculation(code, kpoints_mesh, calculation, max_num_machines,
                       max_wallclock_seconds, with_mpi, daemon):
    """Run a PhCalculation."""
    from aiida import orm
    from aiida.plugins import CalculationFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    # Check that the parent calculation node comes from quantumespresso.pw.
    # I cannot move this check into the option declaration, because CalcJobNode is not subclassed by the specific
    # calculation plugins (only Process is), and there is no feature yet to filter by the associated process_type.
    expected_process_type = 'aiida.calculations:quantumespresso.pw'
    if calculation.process_type != expected_process_type:
        raise click.BadParameter(
            'The input calculation node has a process_type: {}; should be {}'.
            format(calculation.process_type, expected_process_type))

    parent_folder = calculation.get_outgoing(
        node_class=orm.RemoteData,
        link_label_filter='remote_folder').one().node

    inputs = {
        'code': code,
        'qpoints': kpoints_mesh,
        'parameters': orm.Dict(dict={'INPUTPH': {}}),
        'parent_folder': parent_folder,
        'metadata': {
            'options':
            get_default_options(max_num_machines, max_wallclock_seconds,
                                with_mpi),
        }
    }

    launch.launch_process(CalculationFactory('quantumespresso.ph'), daemon,
                          **inputs)
Example #16
0
def test_process(logger_code):
    """
    Test running a calculation.

    Also checks its outputs.
    """
    from aiida.plugins import DataFactory, CalculationFactory
    from aiida.engine import run
    from aiida.common.extendeddicts import AttributeDict

    from aiida_logger.tests import TEST_DIR  # pylint: disable=wrong-import-position

    # Prepare input parameters
    parameters = AttributeDict()
    parameters.comment_string = '#'
    parameters.labels = True

    # Define input files to use
    SinglefileData = DataFactory('singlefile')
    datafile = SinglefileData(
        file=os.path.join(TEST_DIR, 'input_files', 'datafile'))

    # Set up calculation
    inputs = {
        'code': logger_code,
        'parameters': DataFactory('dict')(dict=parameters),
        'datafiles': {
            'datafile': datafile
        },
        'metadata': {
            'options': {
                'resources': {
                    'num_machines': 1,
                    'num_mpiprocs_per_machine': 1
                },
                'parser_name': 'logger',
                'withmpi': False,
                'output_filename': 'logger.out'
            },
            'description': 'Test job submission with the aiida_logger plugin'
        },
    }

    result = run(CalculationFactory('logger'), **inputs)

    assert 'data' in result
    assert 'metadata' in result

    data = result['data']
    metadata = result['metadata']
    metadata = metadata.get_dict()

    assert 'labels' in metadata
    assert 'comments' in metadata
    assert metadata['labels'] == ['time', 'param1', 'param2', 'param3']
    assert metadata['comments'][0] == '# This is an example file'
    test_array = np.array([[1.0e+00, 3.0e+00, 4.0e+00, 5.0e+00],
                           [2.0e+00, 4.0e+00, 5.7e+00, -1.0e-01],
                           [3.0e+00, 1.0e-03, 1.0e+03, 8.0e-01]])
    np.testing.assert_allclose(data.get_array('content'), test_array)
Example #17
0
def test_is_neb(vasp_code, poscar, is_restart, is_neb):
    from aiida.orm import RemoteData
    from aiida.common.links import LinkType
    from aiida.plugins import CalculationFactory
    from aiida.engine import run_get_node
    from aiida_cusp.data import VaspPotcarData
    # define code
    vasp_code.set_attribute('input_plugin', 'cusp.vasp')
    # setup calculator
    inputs = {
        'code': vasp_code,
        'metadata': {'options': {'resources': {'num_machines': 1}}},
    }
    if is_neb:
        neb_path = {'node_00': poscar, 'node_01': poscar, 'node_02': poscar}
        inputs.update({'neb_path': neb_path})
    else:
        inputs.update({'poscar': poscar})
    VaspCalculation = CalculationFactory('cusp.vasp')
    vasp_calc_base = VaspCalculation(inputs=inputs)
    # if restart create a second calculator using a remote_folder connected
    # to the first calculation as input
    if is_restart:
        inputs.pop('poscar', None)
        inputs.pop('neb_path', None)
        remote_data = RemoteData(computer=vasp_code.computer, remote_path='')
        remote_data.add_incoming(vasp_calc_base.node.store(),
                                 link_type=LinkType.CREATE,
                                 link_label='remote_folder')
        inputs.update({'restart': {'folder': remote_data}})
        vasp_calc_base = VaspCalculation(inputs=inputs)
    # assert is_neb() returns the desired result
    result = vasp_calc_base.is_neb()
    assert result is is_neb
Example #18
0
def test_siestacalc_inpgen(aiida_profile, fixture_code, generate_structure,
                           generate_psml_fam):
    """Test the validation of subclasses of `InputsGenerator`."""

    generate_psml_fam("PseudoDojo/0.4/PBE/SR/standard/psml",
                      "Si")  #This will stay for the entire session!

    from aiida_siesta.utils.protocols_system.input_generators import SiestaCalculationInputGenerator

    inp_gen = SiestaCalculationInputGenerator(
        CalculationFactory("siesta.siesta"))
    structure = generate_structure()
    protocol = inp_gen.get_default_protocol_name()
    code = fixture_code("siesta.siesta")
    code.store()
    calc_engines = {
        "siesta": {
            'code': code.uuid,
            'options': {
                "resources": {
                    "num_mpiprocs_per_machine": 1
                },
                "max_wallclock_seconds": 360
            }
        }
    }

    build = inp_gen.get_filled_builder(structure,
                                       calc_engines,
                                       protocol,
                                       spin="polarized")

    assert "parameters" in build
def test_neb_poscar_overwrite_switch(switch, tmpdir, vasp_code, aiida_sandbox,
                                     monkeypatch):
    import pathlib
    from aiida.orm import RemoteData
    from aiida.plugins import CalculationFactory
    from aiida_cusp.data import VaspPotcarData
    # set the input plugin for code
    vasp_code.set_attribute('input_plugin', 'cusp.vasp')
    # setup a remote restart directory with POSCAR and CONTCAR
    computer = vasp_code.computer
    subfolders = ['00', '01', '02']
    for subfolder in subfolders:
        pathlib.Path(tmpdir / subfolder).mkdir()
        pathlib.Path(tmpdir / subfolder / 'POSCAR').touch()
        pathlib.Path(tmpdir / subfolder / 'CONTCAR').touch()
    remote_path = str(tmpdir)
    remote_data = RemoteData(computer=computer, remote_path=remote_path)
    inputs = {
        'code': vasp_code,
        'restart': {
            'folder': remote_data,
            'contcar_to_poscar': switch
        },
        'metadata': {
            'options': {
                'resources': {
                    'num_machines': 1
                }
            }
        },
    }
    VaspCalculation = CalculationFactory('cusp.vasp')
    # mock the is_neb() method to avoid the search of the remote_folders
    # parent CalcJobNode (we know it **is** a NEB calculation!)
    monkeypatch.setattr(VaspCalculation, 'is_neb', lambda self: True)
    vasp_neb_calculation = VaspCalculation(inputs=inputs)
    calcinfo = vasp_neb_calculation.prepare_for_submission(aiida_sandbox)
    remote_copy_list = calcinfo.remote_copy_list
    for subfolder in subfolders:
        # find the remote_copy_list for a specific NEB subfolder
        reduced_remote_list = []
        for (uuid, abspath_remote, relpath_input) in remote_copy_list:
            if pathlib.Path(abspath_remote).parent.name == subfolder:
                reduced_remote_list.append((abspath_remote, relpath_input))
        copied_files = [pathlib.Path(f).name for (f, _) in reduced_remote_list]
        # the contcar file will always be copied no matter if the switch is
        # set or not
        assert 'CONTCAR' in copied_files
        # now check for a single NEB subfolder if CONTCAR is copied on itself
        # or on the new POSCAR
        for (abspath_remote, relpath_input) in reduced_remote_list:
            filename_remote = pathlib.Path(abspath_remote).name
            filename_input = pathlib.Path(relpath_input).name
            if filename_remote == 'CONTCAR':
                if switch:  # True: CONTCAR --> POSCAR
                    assert filename_input == 'POSCAR'
                    assert 'POSCAR' not in copied_files
                else:  # False: CONTCAR --> CONTCAR
                    assert filename_input == 'CONTCAR'
                    assert 'POSCAR' in copied_files
Example #20
0
    def generate_calcinfo(entry_point_name, folder, inputs=None):
        """Generate a `CalcInfo` instance for testing calculation jobs.

        A new `CalcJob` process instance is instantiated,
        and `prepare_for_submission` is called to populate the supplied folder,
        with raw inputs.

        Parameters
        ----------
        entry_point_name: str
        folder: aiida.common.folders.Folder
        inputs: dict or None

        """
        from aiida.engine.utils import instantiate_process
        from aiida.manage.manager import get_manager
        from aiida.plugins import CalculationFactory

        manager = get_manager()
        runner = manager.get_runner()

        process_class = CalculationFactory(entry_point_name)
        process = instantiate_process(runner, process_class, **inputs)

        calc_info = process.prepare_for_submission(folder)

        return calc_info
Example #21
0
def test_accepted_parser_settings(vasp_code, setting, expected_exit_code):
    from aiida.plugins import CalculationFactory
    from aiida_cusp.parsers.vasp_file_parser import VaspFileParser
    # define code
    vasp_code.set_attribute('input_plugin', 'cusp.vasp')
    # setup calculator
    inputs = {
        'code': vasp_code,
        'metadata': {
            'options': {
                'resources': {
                    'num_machines': 1
                },
                'parser_settings': setting,
            }
        },
    }
    VaspCalculation = CalculationFactory('cusp.vasp')
    vasp_calc_node = VaspCalculation(inputs=inputs).node
    # init custom options and instantiate the parser class
    parser = VaspFileParser(vasp_calc_node)
    exit_code = parser.verify_and_set_parser_settings()
    if not exit_code:
        assert exit_code is None
        assert parser.settings == setting
    else:
        assert exit_code.status == expected_exit_code
Example #22
0
def test_submit(aiida_profile, clear_database, ionization_file, charge_file,
                hkust1_cif, basic_options):
    """Test submitting a calculation"""
    from aiida.plugins import CalculationFactory, DataFactory

    EqeqCalculation = CalculationFactory('qeq.eqeq')

    # Prepare input parameters
    EQeqParameters = DataFactory('qeq.eqeq')
    parameters = EQeqParameters({'method': 'ewald'})

    inputs = {
        'code': tests.get_code(entry_point='qeq.eqeq'),
        'structure': hkust1_cif,
        'parameters': parameters,
        'charge_data': charge_file,
        'ionization_data': ionization_file,
        'metadata': {
            'options': basic_options,
            'label': "aiida_qeq EQEQ test",
            'description':
            "Test EQEQ job submission with the aiida_qeq plugin",
        },
    }
    result = run(EqeqCalculation, **inputs)

    charges_list = result['json_with_charges'].get_content()
    assert charges_list.startswith('[0.878')
Example #23
0
    def setUpClass(cls, *args, **kwargs):
        """Define a useful CalcJobNode to test the CalcJobResultManager.

        We emulate a node for the `TemplateReplacer` calculation job class. To do this we have to make sure the
        process type is set correctly and an output parameter node is created.
        """
        super(TestCalcJobResultManager, cls).setUpClass(*args, **kwargs)
        cls.process_class = CalculationFactory('templatereplacer')
        cls.process_type = get_entry_point_string_from_class(cls.process_class.__module__, cls.process_class.__name__)
        cls.node = CalcJobNode(computer=cls.computer, process_type=cls.process_type)
        cls.node.set_option('resources', {'num_machines': 1, 'num_mpiprocs_per_machine': 1})
        cls.node.store()

        cls.key_one = 'key_one'
        cls.key_two = 'key_two'
        cls.val_one = 'val_one'
        cls.val_two = 'val_two'
        cls.keys = [cls.key_one, cls.key_two]

        cls.result_node = Dict(dict={
            cls.key_one: cls.val_one,
            cls.key_two: cls.val_two,
        }).store()

        cls.result_node.add_incoming(cls.node, LinkType.CREATE, cls.process_class.spec().default_output_node)
Example #24
0
def test_generic_datafile_parsing(fixture_retrieved):  # noqa: F811
    """Test a datafile with a comment section, labels and integer and floats."""
    from aiida_logger.parsers.file_parsers.datafile import DatafileParser

    dummy_calculation = CalculationFactory('arithmetic.add')
    exit_codes = dummy_calculation.exit_codes

    parameters = DataFactory('dict')(dict={
        'comment_string': '#',
        'labels': True
    })

    datafile_parser = DatafileParser(fixture_retrieved, 'datafile', exit_codes,
                                     parameters)
    result = datafile_parser.parse()
    data = result['data']
    metadata = result['metadata'].get_dict()

    assert 'labels' in metadata
    assert 'comments' in metadata
    assert metadata['labels'] == ['time', 'param1', 'param2', 'param3']
    assert metadata['comments'][0] == '# This is an example file'
    test_array = np.array([[1.0e+00, 3.0e+00, 4.0e+00, 5.0e+00],
                           [2.0e+00, 4.0e+00, 5.7e+00, -1.0e-01],
                           [3.0e+00, 1.0e-03, 1.0e+03, 8.0e-01]])
    np.testing.assert_allclose(data.get_array('content'), test_array)
Example #25
0
def test_output_node_namespaces(vasp_code, filepath, tmpdir):
    import pathlib
    from aiida.plugins import CalculationFactory
    from aiida_cusp.parsers.vasp_file_parser import VaspFileParser
    from aiida_cusp.utils.defaults import PluginDefaults
    # setup files in the temporary directory
    fpath = pathlib.Path(tmpdir) / filepath
    if not fpath.parent.exists():
        fpath.parent.mkdir(parents=True)
    fpath.touch()  # define code
    vasp_code.set_attribute('input_plugin', 'cusp.vasp')
    # setup calculator and instantiate parser class
    inputs = {
        'code': vasp_code,
        'metadata': {
            'options': {
                'resources': {
                    'num_machines': 1
                },
                'parser_settings': {
                    'parse_files': ['somefile']
                },
            },
        },
    }
    VaspCalculation = CalculationFactory('cusp.vasp')
    vasp_calc = VaspCalculation(inputs=inputs)
    parser = VaspFileParser(vasp_calc.node)
    exit_code = parser.parse(retrieved_temporary_folder=tmpdir)
    assert exit_code is None
    # test outputs can actually be linked (this would fail if the namespace
    # is not available)
    for linkname, node in parser.outputs.items():
        vasp_calc.out(linkname, node)
Example #26
0
 def inner(inputs=None, settings=None):
     from aiida.plugins import CalculationFactory
     from aiida.engine import run
     calculation = CalculationFactory('vasp.vasp')
     mock_vasp.store()
     create_authinfo(computer=mock_vasp.computer, store=True)
     kpoints, _ = vasp_kpoints
     inpts = AttributeDict()
     inpts.code = Code.get_from_string('mock-vasp@localhost')
     inpts.structure = vasp_structure
     inpts.parameters = vasp_params
     inpts.kpoints = kpoints
     inpts.potential = get_data_class(
         'vasp.potcar').get_potcars_from_structure(
             structure=inpts.structure,
             family_name=POTCAR_FAMILY_NAME,
             mapping=POTCAR_MAP)
     options = {
         'withmpi': False,
         'queue_name': 'None',
         'resources': {
             'num_machines': 1,
             'num_mpiprocs_per_machine': 1
         },
         'max_wallclock_seconds': 3600
     }
     inpts.metadata = {}
     inpts.metadata['options'] = options
     if inputs is not None:
         inpts.update(inputs)
     results_and_node = run.get_node(calculation, **inpts)
     return results_and_node
def test_process(rhino_zfs_code):
    """Test running a calculation
    note this does not test that the expected outputs are created of output parsing"""
    from aiida.plugins import DataFactory, CalculationFactory
    from aiida.engine import run

    # Prepare input parameters
    DiffParameters = DataFactory('rhino_zfs')
    parameters = DiffParameters({'ignore-case': True})

    from aiida.orm import SinglefileData
    file1 = SinglefileData(
        file=os.path.join(TEST_DIR, "input_files", 'file1.txt'))
    file2 = SinglefileData(
        file=os.path.join(TEST_DIR, "input_files", 'file2.txt'))

    # set up calculation
    inputs = {
        'code': rhino_zfs_code,
        'parameters': parameters,
        'file1': file1,
        'file2': file2,
        'metadata': {
            'options': {
                'max_wallclock_seconds': 30
            },
        },
    }

    result = run(CalculationFactory('rhino_zfs'), **inputs)
    computed_diff = result['rhino_zfs'].get_content()

    assert 'content1' in computed_diff
    assert 'content2' in computed_diff
Example #28
0
def test_run_failure(new_workdir):
    """Testing CP2K failure"""

    from aiida.engine import run
    from aiida.plugins import CalculationFactory
    from aiida.orm import Dict
    from aiida.common.exceptions import OutputParsingError

    computer = get_computer(workdir=new_workdir)
    code = get_code(entry_point="cp2k", computer=computer)

    # a broken CP2K input
    parameters = Dict(dict={"GLOBAL": {"FOO_BAR_QUUX": 42}})
    options = {
        "resources": {
            "num_machines": 1,
            "num_mpiprocs_per_machine": 1
        },
        "max_wallclock_seconds": 1 * 2 * 60,
    }

    print("Submitted calculation...")
    inputs = {
        "parameters": parameters,
        "code": code,
        "metadata": {
            "options": options
        }
    }

    with pytest.raises(OutputParsingError):
        run(CalculationFactory("cp2k"), **inputs)
Example #29
0
def launch_calculation(code, calculation, max_num_machines,
                       max_wallclock_seconds, with_mpi, daemon):
    """Run a Q2rCalculation."""
    from aiida.plugins import CalculationFactory
    from aiida_quantumespresso.utils.resources import get_default_options

    # Check that the parent calculation node comes from quantumespresso.ph.
    # I cannot move this check into the option declaration, because CalcJobNode is not subclassed by the specific
    # calculation plugins (only Process is), and there is no feature yet to filter by the associated process_type.
    expected_process_type = 'aiida.calculations:quantumespresso.ph'
    if calculation.process_type != expected_process_type:
        raise click.BadParameter(
            'The input calculation node has a process_type: {}; should be {}'.
            format(calculation.process_type, expected_process_type))

    inputs = {
        'code': code,
        'parent_folder': calculation.outputs.remote_folder,
        'metadata': {
            'options':
            get_default_options(max_num_machines, max_wallclock_seconds,
                                with_mpi),
        }
    }

    launch.launch_process(CalculationFactory('quantumespresso.q2r'), daemon,
                          **inputs)
Example #30
0
def test_process(diff_code):
    """Test running a calculation
    note this does not test that the expected outputs are created of output parsing"""

    # Prepare input parameters
    DiffParameters = DataFactory("diff")
    parameters = DiffParameters({"ignore-case": True})

    file1 = SinglefileData(
        file=os.path.join(TEST_DIR, "input_files", "file1.txt"))
    file2 = SinglefileData(
        file=os.path.join(TEST_DIR, "input_files", "file2.txt"))

    # set up calculation
    inputs = {
        "code": diff_code,
        "parameters": parameters,
        "file1": file1,
        "file2": file2,
        "metadata": {
            "options": {
                "max_wallclock_seconds": 30
            },
        },
    }

    result = run(CalculationFactory("diff"), **inputs)
    computed_diff = result["diff"].get_content()

    assert "content1" in computed_diff
    assert "content2" in computed_diff