Ejemplo n.º 1
0
def main(pot_family, import_from, queue, code, computer, no_import):
    load_dbenv_if_not_loaded()
    from aiida.orm import WorkflowFactory, Code
    from aiida.work import submit

    if not no_import:
        click.echo('importing POTCAR files...')
        with cli_spinner():
            import_pots(import_from, pot_family)

    code = Code.get_from_string('{}@{}'.format(code, computer))
    workflow = WorkflowFactory('vasp.base')

    inputs = AttributeDict()
    inputs.structure = create_structure_Si()
    inputs.kpoints = create_kpoints()
    inputs.incar = create_params_simple()
    inputs.code = code
    inputs.potcar_family = get_data_node('str', pot_family)
    inputs.potcar_mapping = get_data_node('parameter', dict={'Si': 'Si'})
    options = AttributeDict()
    options.queue_name = queue
    options.resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 4}
    inputs.options = get_data_node('parameter', dict=options)

    submit(workflow, **inputs)
Ejemplo n.º 2
0
def test_store_duplicate(fresh_aiida_env, potcar_node_pair):
    """
    Storing a duplicate POTCAR node must fail.

    Uniqueness constraints to test for:

        * ``sha512`` attribute must be unique
        * the combination of all other attributes must be unique
    """
    potcar_path = data_path('potcar', 'As', 'POTCAR')

    file_node = get_data_node('vasp.potcar_file', file=potcar_path)
    file_node.set_attribute('sha512', 'foo')
    with pytest.raises(UniquenessError):
        file_node.store()

    file_node = get_data_node('vasp.potcar_file', file=potcar_path)
    file_node.set_attribute('symbol', 'Ta')
    with pytest.raises(UniquenessError):
        file_node.store()

    data_node = get_data_node('vasp.potcar', potcar_file_node=potcar_node_pair['file'])
    data_node.set_attribute('sha512', 'foo')
    with pytest.raises(UniquenessError):
        data_node.store()

    data_node = get_data_node('vasp.potcar', potcar_file_node=potcar_node_pair['file'])
    data_node.set_attribute('symbol', 'Ta')
    with pytest.raises(UniquenessError):
        data_node.store()

    assert get_data_class('vasp.potcar').find_one(symbol='As')
    assert get_data_class('vasp.potcar_file').find_one(symbol='As')
Ejemplo n.º 3
0
    def ionic_trajectories(self):
        """Get trajectory properties as TrajectoryData and ArrayData nodes."""
        array_node = get_data_node('array')
        trajectory_node = get_data_node('array.trajectory')

        pmg_trajectory = self.vasprun_obj.ionic_steps
        trajectories = {}
        for key in pmg_trajectory[0].keys():
            trajectories[key] = [
                step.get(key, None) for step in pmg_trajectory
            ]

        structures = trajectories.pop('structure')
        trajectory_node.set_trajectory(
            stepids=arange(len(pmg_trajectory)),
            cells=array([
                get_data_node('structure', pymatgen=s).cell for s in structures
            ]),
            symbols=array([spec.symbol for spec in structures[0].species]),
            positions=array([[site.coords for site in struc.sites]
                             for struc in structures]))

        for key, data in trajectories.items():
            data = array(data)
            array_node.set_array(key, data)
            trajectory_node.set_array(key, data)

        return trajectory_node, array_node
Ejemplo n.º 4
0
def test_base(fresh_aiida_env, vasp_params, potentials, vasp_kpoints, vasp_structure, mock_vasp):
    """Test submitting only, not correctness, with mocked vasp code."""
    from aiida.orm import WorkflowFactory, Code
    from aiida import work

    rmq_config = None
    runner = work.Runner(poll_interval=0., rmq_config=rmq_config, enable_persistence=True)
    work.set_runner(runner)

    base_wf_proc = WorkflowFactory('vasp.base')

    mock_vasp.store()
    print(mock_vasp.get_remote_exec_path())
    comp = mock_vasp.get_computer()
    create_authinfo(computer=comp).store()

    # ~ os_env = os.environ.copy()
    # ~ sp.call(['verdi', 'daemon', 'start'], env=os_env)
    # ~ print sp.check_output(['verdi', 'daemon', 'status'], env=os_env)
    # ~ print sp.check_output(['which', 'verdi'], env=os_env)

    kpoints, _ = vasp_kpoints
    inputs = AttributeDict()
    inputs.code = Code.get_from_string('mock-vasp@localhost')
    inputs.structure = vasp_structure
    inputs.incar = vasp_params
    inputs.kpoints = kpoints
    inputs.potcar_family = get_data_node('str', POTCAR_FAMILY_NAME)
    inputs.potcar_mapping = get_data_node('parameter', dict=POTCAR_MAP)
    inputs.options = get_data_node(
        'parameter', dict={
            'queue_name': 'None',
            'resources': {
                'num_machines': 1,
                'num_mpiprocs_per_machine': 1
            }
        })
    inputs.max_iterations = get_data_node('int', 1)
    inputs.settings = get_data_node('parameter', dict={'parser_settings': {'add_structure': False, 'should_parse_CONTCAR': False}})

    # ~ workchain = run(base_wf_proc, **inputs)
    results = work.run(base_wf_proc, **inputs)
    # ~ workchain = load_node(running.pk)
    # ~ timeout = 5
    # ~ waiting_for = 0
    # ~ while not workchain.is_terminated and waiting_for < timeout:
    # ~ time.sleep(1)
    # ~ waiting_for += 1
    assert 'retrieved' in results
    assert 'output_parameters' in results
    assert 'remote_folder' in results
Ejemplo n.º 5
0
def test_get_data_node(fresh_aiida_env):
    """Make sure the get_data_node returns objects for the basic data types."""
    for data_type in BASIC_DATA_TYPES:
        the_module_ref = __import__('aiida.orm',
                                    fromlist=[data_type.capitalize()])
        aiida_data_type_class = getattr(the_module_ref, data_type.capitalize())
        if data_type == 'bool':
            data_node = get_data_node(data_type, True)
            aiida_data_node = aiida_data_type_class(True)
            assert data_node.value == aiida_data_node.value
        if data_type == 'int':
            data_node = get_data_node(data_type, 1)
            aiida_data_node = aiida_data_type_class(1)
            assert data_node.value == aiida_data_node.value
        if data_type == 'float':
            data_node = get_data_node(data_type, 1.0)
            aiida_data_node = aiida_data_type_class(1.0)
            assert data_node.value == aiida_data_node.value
        if data_type == 'str':
            data_node = get_data_node(data_type, '')
            aiida_data_node = aiida_data_type_class('')
            assert data_node.value == aiida_data_node.value
        if data_type == 'list':
            data_node = get_data_node(data_type, list=[])
            aiida_data_node = aiida_data_type_class(list=[])
            assert data_node.get_list() == aiida_data_node.get_list()
        if data_type == 'dict':
            data_node = get_data_node(data_type, dict={})
            aiida_data_node = aiida_data_type_class(dict={})
            assert set(data_node.get_dict()) == set(aiida_data_node.get_dict())

    with pytest.raises(MissingEntryPointError):
        get_data_node('garbage', True)
Ejemplo n.º 6
0
def test_potcar_from_structure(fresh_aiida_env, potcar_family):
    """Test getting POTCARS from a family for a structure."""
    indium_2 = get_data_node('structure')
    indium_2.append_atom(position=[0, 0, 0], symbols='In')
    indium_2.append_atom(position=[1, 0, 0], symbols='In')
    in2_potcars = get_data_class('vasp.potcar').get_potcars_from_structure(indium_2, potcar_family, mapping={'In': 'In_d'})
    assert [kind[0] for kind in in2_potcars.keys()] == ['In']
Ejemplo n.º 7
0
def test_vasp_calc_del_str_ext(run_vasp_calc):
    """Test a run of a basic VASP calculation where one wants to retrieve additional files and store only those."""
    # Let us add an additional file to the retrieve_list (which do not delete the file after parse)
    # and check if it is actually there
    from aiida_vasp.calcs.vasp import VaspCalculation
    retrieve_list_ref = ['_scheduler-stdout.txt', '_scheduler-stderr.txt']
    inputs = {}
    extra_file_to_keep = 'POSCAR'
    inputs['settings'] = get_data_node('dict',
                                       dict={
                                           'ALWAYS_STORE':
                                           False,
                                           'ADDITIONAL_RETRIEVE_LIST':
                                           [extra_file_to_keep]
                                       })
    _, node = run_vasp_calc(inputs)
    retrieve_list_ref = [
        '_scheduler-stdout.txt', '_scheduler-stderr.txt', 'POSCAR'
    ]
    retrieve_temporary_list_ref = VaspCalculation._ALWAYS_RETRIEVE_LIST
    retrieve_list = node.get_retrieve_list()
    retrieve_temporary_list = node.get_retrieve_temporary_list()
    assert set(retrieve_temporary_list) == set(retrieve_temporary_list_ref)
    assert set(retrieve_list_ref) == set(retrieve_list)
    retrieve_list_ref = [
        '_scheduler-stdout.txt', '_scheduler-stderr.txt', 'POSCAR'
    ]
    files = node.outputs.retrieved.list_objects()
    file_names = [single_file.name for single_file in files]
    assert set(file_names) == set(retrieve_list_ref)
Ejemplo n.º 8
0
    def run(self):
        import plumpy
        from aiida.engine.processes.calcjobs.tasks import RETRIEVE_COMMAND
        from aiida.common.folders import SandboxFolder

        _ = super(VaspImmigrant, self).run()

        # Make sure the retrieve list is set (done in presubmit so we need to call that also)
        with SandboxFolder() as folder:
            self.presubmit(folder)

        settings = self.inputs.get('settings', None)
        settings = settings.get_dict() if settings else {}
        remote_path = settings.get('import_from_path', None)
        if not remote_path:
            raise InputValidationError(
                'immigrant calculations need an input "settings" containing a key "import_from_path"!'
            )
        self.node.set_remote_workdir(remote_path)  # pylint: disable=protected-access
        remotedata = get_data_node('remote',
                                   computer=self.node.computer,
                                   remote_path=remote_path)
        remotedata.add_incoming(self.node,
                                link_type=LinkType.CREATE,
                                link_label='remote_folder')
        remotedata.store()

        return plumpy.Wait(msg='Waiting to retrieve', data=RETRIEVE_COMMAND)
Ejemplo n.º 9
0
    def define(cls, spec):
        super(VerifyWorkChain, cls).define(spec)
        spec.expose_inputs(cls._next_workchain)
        spec.input('verify.max_iterations',
                   valid_type=get_data_class('int'),
                   required=False,
                   default=lambda: get_data_node('int', 1),
                   help="""
                   The maximum number of iterations to perform.
                   """)
        spec.exit_code(0, 'NO_ERROR', message='the sun is shining')
        spec.exit_code(420,
                       'ERROR_NO_CALLED_WORKCHAIN',
                       message='no called workchain detected')
        spec.exit_code(
            500,
            'ERROR_UNKNOWN',
            message='unknown error detected in the verify workchain')
        spec.outline(
            cls.initialize,
            while_(cls.run_next_workchains)(
                cls.init_next_workchain,
                cls.run_next_workchain,
                cls.verify_next_workchain
            ),
            cls.finalize
        )  # yapf: disable

        spec.expose_outputs(cls._next_workchain)
Ejemplo n.º 10
0
def test_get_base_data(aiida_env):
    """Make sure we can retrieve the Bool data type through get_data_class."""
    bool_cls = get_data_class('bool')
    bool_obj = get_data_node('bool', True)
    from aiida.orm.data.base import Bool
    assert bool_cls == Bool
    assert isinstance(bool_obj, Bool)
Ejemplo n.º 11
0
def test_potcar_from_file(fresh_aiida_env):
    """Test creating a node pair from a file, creating the data node first."""
    potcar_cls = get_data_node('vasp.potcar')
    _, created = potcar_cls.get_or_create_from_file(data_path('potcar', 'As', 'POTCAR'))
    assert created
    _, created = potcar_cls.get_or_create_from_file(data_path('potcar', 'As', 'POTCAR'))
    assert not created
Ejemplo n.º 12
0
def test_vasp_calc_extra(run_vasp_calc):
    """Test a run of a basic VASP calculation where one wants to keep additional files after parsing is completed."""
    # Let us add an additional file to the retrieve_list (which do not delete the file after parse)
    # and check if it is actually there
    from aiida_vasp.calcs.vasp import VaspCalculation
    inputs = {}
    extra_file_to_keep = 'POSCAR'
    inputs['settings'] = get_data_node(
        'dict', dict={'ADDITIONAL_RETRIEVE_LIST': [extra_file_to_keep]})
    _, node = run_vasp_calc(inputs)
    retrieve_temporary_list_ref = []
    retrieve_list_ref = VaspCalculation._ALWAYS_RETRIEVE_LIST + [
        '_scheduler-stdout.txt', '_scheduler-stderr.txt', 'POSCAR'
    ]
    retrieve_temporary_list = node.get_retrieve_temporary_list()
    retrieve_list = node.get_retrieve_list()
    assert retrieve_temporary_list == retrieve_temporary_list_ref
    assert set(retrieve_list_ref) == set(retrieve_list)
    files = node.outputs.retrieved.list_objects()
    file_names = [single_file.name for single_file in files]
    # Exclude Wannier files as they are not in the test set
    retrieve_list_ref_no_wannier = [
        item for item in retrieve_list_ref if 'wannier' not in item
    ]
    assert set(file_names) == set(retrieve_list_ref_no_wannier)
Ejemplo n.º 13
0
 def _enable_charge_density_restart(self):
     """Enables a restart from a previous charge density file."""
     # Make sure we set the restart folder (the charge density file is not
     # copied locally, but is present in the folder of the previous remote directory)
     self.ctx.inputs.restart_folder = self.ctx.workchains[
         -1].outputs.remote_folder
     # Also enable the clean_workdir again
     self.ctx.inputs.clean_workdir = get_data_node('bool', True)
Ejemplo n.º 14
0
def vasp_structure_poscar(vasp_structure):
    """Fixture: Well formed POSCAR contents."""
    aiida_structure = vasp_structure
    if isinstance(vasp_structure, get_data_class('cif')):
        ase_structure = vasp_structure.get_ase()
        aiida_structure = get_data_node('structure', ase=ase_structure)
    writer = PoscarParser(data=aiida_structure)
    return writer
Ejemplo n.º 15
0
 def get_output_parameters(self):
     """Get the output parameter node."""
     output_parameter_dict = self.vasprun_adapter.output_dict
     if self.outcar_adapter:
         output_parameter_dict.update(self.outcar_adapter.output_dict)
     output_parameters = get_data_node('parameter',
                                       dict=output_parameter_dict)
     return output_parameters
Ejemplo n.º 16
0
def main(pot_family, import_from, queue, code, computer, no_import):
    load_dbenv_if_not_loaded()
    from aiida.orm import WorkflowFactory, Code
    from aiida.work import submit

    if not no_import:
        click.echo('importing POTCAR files...')
        with cli_spinner():
            import_pots(import_from, pot_family)

    code = Code.get_from_string('{}@{}'.format(code, computer))
    workflow = WorkflowFactory('vasp.relax')

    inputs = AttributeDict()
    inputs.structure = create_structure_perturbed()
    inputs.kpoints = AttributeDict()
    inputs.kpoints.distance = get_data_node('float', 0.2)
    inputs.relax = AttributeDict()
    inputs.convergence = AttributeDict()
    inputs.convergence.shape = AttributeDict()
    inputs.convergence.on = get_data_node('bool', True)
    inputs.convergence.positions = get_data_node('float', 0.1)
    inputs.incar_add = get_data_node('parameter', dict={
        'nsw': 1, 'ediffg': -0.0001, 'encut': 240, 'ismear': 0, 'sigma': 0.1, 'system': 'test-case:test_relax_wf',
    })  # yapf: disable
    inputs.restart = AttributeDict()
    inputs.code = code
    inputs.potcar_family = get_data_node('str', pot_family)
    inputs.potcar_mapping = get_data_node('parameter', dict={'Si': 'Si'})
    options = AttributeDict()
    options.queue_name = queue
    options.resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 4}
    inputs.options = get_data_node('parameter', dict=options)

    submit(workflow, **inputs)
Ejemplo n.º 17
0
    def get_param_node(self):
        """
        Create a ParameterData node containing the incar key/value pairs.

        :kwarg annotate: [True] store the node and add extras to preserve
            order and comments of the INCAR. Implies that the node gets stored in the process!
        """
        node = get_data_node('parameter', dict=self.get_dict())
        return node
Ejemplo n.º 18
0
def potcar_node_pair(fresh_aiida_env):
    """Create a POTCAR node pair."""
    potcar_path = data_path('potcar', 'As', 'POTCAR')
    potcar_file_node = get_data_node('vasp.potcar_file', file=potcar_path)
    potcar_file_node.store()
    return {
        'file': potcar_file_node,
        'potcar': get_data_class('vasp.potcar').find_one(symbol='As')
    }
Ejemplo n.º 19
0
def test_vasp_calc_delete(run_vasp_calc):
    """Test a run of a basic VASP calculation where one does not want to store the always retrieved files after parsing."""
    retrieve_list_ref = ['_scheduler-stdout.txt', '_scheduler-stderr.txt']
    inputs = {}
    inputs['settings'] = get_data_node('dict', dict={'ALWAYS_STORE': False})
    _, node = run_vasp_calc(inputs)
    files = node.outputs.retrieved.list_objects()
    file_names = [single_file.name for single_file in files]
    assert set(file_names) == set(retrieve_list_ref)
Ejemplo n.º 20
0
def test_potcar_from_structure(fresh_aiida_env, potcar_family):
    """Test getting POTCARS from a family for a structure."""
    indium_2 = get_data_node('structure')
    indium_2.append_atom(position=[0, 0, 0], symbols='In')
    indium_2.append_atom(position=[1, 0, 0], symbols='In', name='In1')
    in2_potcars = get_data_class('vasp.potcar').get_potcars_from_structure(indium_2, potcar_family, mapping={'In': 'In_d', 'In1': 'In_d'})
    assert set(in2_potcars.keys()) == {'In', 'In1'}
    in_d_potcar = get_data_class('vasp.potcar').find(family_name=potcar_family, full_name='In_d')[0]
    assert in2_potcars['In'].uuid == in_d_potcar.uuid == in2_potcars['In1'].uuid
Ejemplo n.º 21
0
def test_parser_dict(fresh_aiida_env, incar_dict_example):
    """
    Pass a dict to the INCAR parser.

    Should return an AiiDA datastructure for dict.

    """

    parser = IncarParser(data=get_data_node('dict', dict=incar_dict_example))
    assert isinstance(parser.incar, get_data_class('dict'))
Ejemplo n.º 22
0
    def _structure(self):
        """
        Get the input structure as AiiDa StructureData.

        This is required in order to support CifData as input as well.
        """
        structure = self.inputs.structure
        if not hasattr(structure, 'get_pymatgen'):
            structure = get_data_node('structure', ase=structure.get_ase())
        return structure
Ejemplo n.º 23
0
def duplicate_potcar_data(potcar_node):
    """Create and store (and return) a duplicate of a given PotcarData node."""
    from aiida_vasp.data.potcar import temp_potcar
    file_node = get_data_node('vasp.potcar_file')
    with temp_potcar(potcar_node.get_content()) as potcar_file:
        file_node.add_file(potcar_file.strpath)
        file_node.set_attribute('sha512', 'abcd')
        file_node.set_attribute('full_name', potcar_node.full_name)
        file_node.store()
    data_node, _ = get_data_class('vasp.potcar').get_or_create(file_node)
    return data_node
Ejemplo n.º 24
0
 def _init_inputs(self):
     """Initialize inputs."""
     try:
         self._verbose = self.inputs.verbose.value
         self.ctx.inputs.verbose = self.inputs.verbose
     except AttributeError:
         pass
     # If we want to keep previous files for relaunch, do not clean remote folders
     if self.extract_bands() or self.extract_dos():
         self.ctx.inputs.clean_workdir = get_data_node('bool', False)
     self._init_structure()
     self._init_kpoints()
Ejemplo n.º 25
0
def vasp_calc_and_ref(create_calc_and_ref, ref_incar):
    """Fixture for non varying setup of a vasp calculation"""
    from aiida_vasp.calcs.vasp import VaspCalculation
    calc, ref = create_calc_and_ref(VaspCalculation, ref_incar=ref_incar)
    calc.use_settings(
        get_data_node(
            'parameter',
            dict={'parser_settings': {
                'add_bands': True,
                'add_dos': True
            }}))
    return calc, ref
Ejemplo n.º 26
0
 def prepare_calculation(self):
     """Set the restart folder and set INCAR tags for a restart."""
     if isinstance(self.ctx.restart_calc, self._calculation_class):
         self.ctx.inputs.restart_folder = self.ctx.restart_calc.out.remote_folder
         old_incar = AttributeDict(self.ctx.inputs.incar.get_dict())
         incar = old_incar.copy()
         if 'istart' in incar:
             incar.istart = 1
         if 'icharg' in incar:
             incar.icharg = 1
         if incar != old_incar:
             self.ctx.inputs.incar = get_data_node('parameter', dict=incar)
Ejemplo n.º 27
0
def vasp2w90_calc_and_ref(create_calc_and_ref, ref_incar_vasp2w90,
                          wannier_params, wannier_projections, ref_win):
    """Fixture for non varying setup of a vasp2w90 calculation"""
    from aiida_vasp.calcs.vasp2w90 import Vasp2w90Calculation
    calc, ref = create_calc_and_ref(Vasp2w90Calculation,
                                    ref_incar=ref_incar_vasp2w90)
    calc.use_wannier_parameters(wannier_params)
    calc.use_wannier_projections(wannier_projections)
    calc.use_settings(get_data_node('parameter', dict={'poscar_precision':
                                                       12}))
    ref['win'] = ref_win
    return calc, ref
Ejemplo n.º 28
0
 def band_structure(self):
     """Give the band structure as BandsData node."""
     bands_node = get_data_node('array.bands')
     bands_node.set_kpointsdata(self.actual_kpoints)  # has to be set first
     try:
         bands_object = self.vasprun_obj.get_band_structure()
         structure = get_data_node('structure',
                                   pymatgen=bands_object.structure)
         bands_node.set_cell_from_structure(structure)
         bands_data = bands_object.bands
         bands_node_data = []
         for spin in [Spin.up, Spin.down]:
             if spin in bands_data:
                 bands_node_data.append(bands_data[spin].transpose())
         bands_node_data = array(bands_node_data)
         bands_node.set_bands(bands=bands_node_data)
     except AttributeError:
         if self.logger:
             self.logger.warning(
                 'Band structure could not be parsed, possibly because the final structure was missing from the xml'
             )
     return bands_node
Ejemplo n.º 29
0
def test_relax_wc(fresh_aiida_env, vasp_params, potentials, mock_vasp):
    # def test_relax_wc(fresh_aiida_env, vasp_params, potentials, mock_vasp, mock_relax_wc):
    """Test submitting only, not correctness, with mocked vasp code."""
    from aiida.orm import Code
    from aiida.plugins import WorkflowFactory
    from aiida.engine import run

    workchain = WorkflowFactory('vasp.relax')

    mock_vasp.store()
    create_authinfo(computer=mock_vasp.computer, store=True)

    structure = PoscarParser(file_path=data_path('test_relax_wc', 'inp', 'POSCAR')).structure
    kpoints = KpointsParser(file_path=data_path('test_relax_wc', 'inp', 'KPOINTS')).kpoints
    parameters = IncarParser(file_path=data_path('test_relax_wc', 'inp', 'INCAR')).incar
    parameters['system'] = 'test-case:test_relax_wc'
    parameters = {'incar': {k: v for k, v in parameters.items() if k not in ['isif', 'ibrion', 'nsw', 'ediffg']}}
    parameters['relax'] = {}
    parameters['relax']['perform'] = True
    parameters['relax']['algo'] = 'cg'
    parameters['relax']['force_cutoff'] = 0.01

    inputs = AttributeDict()
    inputs.code = Code.get_from_string('mock-vasp@localhost')
    inputs.structure = structure
    inputs.kpoints = kpoints
    inputs.parameters = get_data_node('dict', dict=parameters)
    inputs.potential_family = get_data_node('str', POTCAR_FAMILY_NAME)
    inputs.potential_mapping = get_data_node('dict', dict=POTCAR_MAP)
    inputs.options = get_data_node('dict',
                                   dict={
                                       'withmpi': False,
                                       'queue_name': 'None',
                                       'max_wallclock_seconds': 1,
                                       'import_sys_environment': True,
                                       'resources': {
                                           'num_machines': 1,
                                           'num_mpiprocs_per_machine': 1
                                       },
                                   })
    inputs.max_iterations = get_data_node('int', 1)
    inputs.clean_workdir = get_data_node('bool', False)
    inputs.verbose = get_data_node('bool', True)
    results, node = run.get_node(workchain, **inputs)
    assert node.exit_status == 0
    assert 'relax' in results
    relax = results['relax']
    assert 'structure' in relax
    sites = relax['structure'].sites
    assert sites[0].kind_name == 'Si'
    assert sites[1].kind_name == 'Si'
    np.testing.assert_allclose(sites[0].position, [4.8125, 4.8125, 4.8125])
    np.testing.assert_allclose(sites[1].position, [0.6875, 0.6875, 0.715])
Ejemplo n.º 30
0
    def parameters(self):
        """Assemble the 'output_params' node."""

        parameters = {}
        outcar_parameters = self._parsed_data.get('ocp_parameters')
        if outcar_parameters is not None:
            parameters.update(outcar_parameters)
        for quantity in self.settings.get('output_params',
                                          DEFAULT_OPTIONS['output_params']):
            parameters[quantity] = getattr(self, quantity)

        output_parameters = get_data_node('parameter', dict=parameters)

        return output_parameters