Exemple #1
0
    def inner(settings=None, parameters=None):

        inputs = AttributeDict()

        metadata = AttributeDict({
            'options': {
                'resources': {
                    'num_machines': 1,
                    'num_mpiprocs_per_machine': 1
                }
            }
        })

        if settings is not None:
            inputs.settings = Dict(dict=settings)

        if isinstance(parameters, dict):
            parameters = get_data_class('dict')(dict=parameters)

        if parameters is None:
            parameters = AttributeDict(vasp_params.get_dict())
            parameters = get_data_class('dict')(dict=parameters)

        inputs.code = vasp_code
        inputs.metadata = metadata
        inputs.parameters = parameters
        inputs.kpoints, _ = vasp_kpoints
        inputs.structure = vasp_structure
        inputs.potential = potentials

        inputs.wannier_parameters = wannier_params
        inputs.wannier_projections = wannier_projections

        return inputs
Exemple #2
0
def test_parser_nodes(request, calc_with_retrieved):
    """Test a few basic node items of the parser."""
    from aiida.plugins import ParserFactory

    settings_dict = {
        'parser_settings': {
            'add_bands': True,
            'add_kpoints': True,
            'add_misc': ['fermi_level']
        }
    }

    file_path = str(request.fspath.join('..') + '../../../test_data/basic')

    node = calc_with_retrieved(file_path, settings_dict)

    parser_cls = ParserFactory('vasp.vasp')
    result, _ = parser_cls.parse_from_node(
        node, store_provenance=False, retrieved_temporary_folder=file_path)

    misc = result['misc']
    bands = result['bands']
    kpoints = result['kpoints']

    assert isinstance(misc, get_data_class('dict'))
    assert isinstance(bands, get_data_class('array.bands'))
    assert isinstance(kpoints, get_data_class('array.kpoints'))
    assert misc.get_dict()['fermi_level'] == 5.96764939
Exemple #3
0
def test_traj_forces(fresh_aiida_env, vasprun_parser):
    """
    Check that the parsed forces in TrajectoryData are of type ArrayData.

    Also check that the entries are as expected, e.g. correct value and
    that the first and last entry is the same (static run).

    """

    inputs = get_node_composer_inputs_from_file_parser(
        vasprun_parser, quantity_keys=['trajectory'])
    data_obj = NodeComposer.compose('array.trajectory', inputs)

    # test object
    ref_obj = get_data_class('array.trajectory')
    assert isinstance(data_obj, ref_obj)
    data_obj_arr = data_obj.get_array('forces')
    # test entries
    assert np.all(data_obj_arr[0][0] == np.array([-0.24286901, 0.0, 0.0]))
    assert np.all(data_obj_arr[0][-1] == np.array(
        [-0.73887169, -0.43727184, -0.43727184]))
    # test object
    ref_obj = get_data_class('array.trajectory')
    assert isinstance(data_obj, ref_obj)
    data_obj = data_obj.get_array('forces')
    # test entries
    assert np.all(data_obj[0][0] == np.array([-0.24286901, 0.0, 0.0]))
    assert np.all(
        data_obj[0][-1] == np.array([-0.73887169, -0.43727184, -0.43727184]))
    assert np.all(data_obj[0][-1] == data_obj[1][-1])
    assert np.all(data_obj[0][0] == data_obj[1][0])
def test_write_parser(fresh_aiida_env, tmpdir, incar_dict_example):
    """Test writing an INCAR from a dict, read and compare."""

    # create AiiDA dictionary instance
    incar_params = get_data_class('dict')(dict=incar_dict_example)
    assert isinstance(incar_params, get_data_class('dict'))
    parser = IncarParser(data=incar_params)

    # now write
    temp_file = str(tmpdir.join('INCAR'))
    parser.write(temp_file)
    # read again
    parser_reparse = IncarParser(file_path=temp_file)
    result = parser_reparse.incar
    # compare
    comp_dict = {'encut': 350, 'sigma': 0.05, 'lreal': False, 'prec': 'Accurate'}
    assert str(sorted(result)) == str(sorted(comp_dict))

    # Test validation
    with_invalid = dict(incar_dict_example)
    with_invalid.update(foo='bar')
    incar_params = get_data_class('dict')(dict=with_invalid)
    parser = IncarParser(data=incar_params)
    with pytest.raises(InputValidationError):
        parser.write(temp_file)
Exemple #5
0
def test_store_duplicate(fresh_aiida_env, potcar_node_pair):
    """
    Storing a duplicate POTCAR node must fail.

    Uniqueness constraints to test for:

        * ``sha512`` attribute must be unique
        * the combination of all other attributes must be unique
    """
    potcar_path = data_path('potcar', 'As', 'POTCAR')

    file_node = get_data_node('vasp.potcar_file', file=potcar_path)
    file_node.set_attribute('sha512', 'foo')
    with pytest.raises(UniquenessError):
        file_node.store()

    file_node = get_data_node('vasp.potcar_file', file=potcar_path)
    file_node.set_attribute('symbol', 'Ta')
    with pytest.raises(UniquenessError):
        file_node.store()

    data_node = get_data_node('vasp.potcar', potcar_file_node=potcar_node_pair['file'])
    data_node.set_attribute('sha512', 'foo')
    with pytest.raises(UniquenessError):
        data_node.store()

    data_node = get_data_node('vasp.potcar', potcar_file_node=potcar_node_pair['file'])
    data_node.set_attribute('symbol', 'Ta')
    with pytest.raises(UniquenessError):
        data_node.store()

    assert get_data_class('vasp.potcar').find_one(symbol='As')
    assert get_data_class('vasp.potcar_file').find_one(symbol='As')
Exemple #6
0
 def inner(inputs=None, settings=None):
     from aiida.plugins import CalculationFactory
     from aiida.engine import run
     calculation = CalculationFactory('vasp.vasp')
     mock_vasp.store()
     create_authinfo(computer=mock_vasp.computer, store=True)
     kpoints, _ = vasp_kpoints
     parameters = AttributeDict()
     parameters = vasp_params.get_dict()
     inpts = AttributeDict()
     inpts.code = Code.get_from_string('mock-vasp@localhost')
     inpts.structure = vasp_structure
     inpts.parameters = get_data_class('dict')(dict=parameters)
     inpts.kpoints = kpoints
     inpts.potential = get_data_class(
         'vasp.potcar').get_potcars_from_structure(
             structure=inpts.structure,
             family_name=POTCAR_FAMILY_NAME,
             mapping=POTCAR_MAP)
     options = {
         'withmpi': False,
         'queue_name': 'None',
         'resources': {
             'num_machines': 1,
             'num_mpiprocs_per_machine': 1
         },
         'max_wallclock_seconds': 3600
     }
     inpts.metadata = {}
     inpts.metadata['options'] = options
     if inputs is not None:
         inpts.update(inputs)
     results_and_node = run.get_node(calculation, **inpts)
     return results_and_node
Exemple #7
0
def test_potcar_from_structure(fresh_aiida_env, potcar_family):
    """Test getting POTCARS from a family for a structure."""
    indium_2 = get_data_node('structure')
    indium_2.append_atom(position=[0, 0, 0], symbols='In')
    indium_2.append_atom(position=[1, 0, 0], symbols='In', name='In1')
    in2_potcars = get_data_class('vasp.potcar').get_potcars_from_structure(indium_2, potcar_family, mapping={'In': 'In_d', 'In1': 'In_d'})
    assert set(in2_potcars.keys()) == {'In', 'In1'}
    in_d_potcar = get_data_class('vasp.potcar').find(family_name=potcar_family, full_name='In_d')[0]
    assert in2_potcars['In'].uuid == in_d_potcar.uuid == in2_potcars['In1'].uuid
Exemple #8
0
def test_get_data_class(fresh_aiida_env):
    """Make sure the get_data_class accept valid types."""
    for data_type in BASIC_DATA_TYPES:
        data_type_class = get_data_class(data_type)
        the_module_ref = __import__('aiida.orm',
                                    fromlist=[data_type.capitalize()])
        aiida_data_type_class = getattr(the_module_ref, data_type.capitalize())
        assert data_type_class == aiida_data_type_class

    with pytest.raises(MissingEntryPointError):
        get_data_class('garbage')
Exemple #9
0
 def define(cls, spec):
     super(Vasp2w90Calculation, cls).define(spec)
     spec.input('wannier_parameters',
                valid_type=get_data_class('dict'),
                required=False,
                help='Input parameters for the Wannier90 interface.')
     spec.input(
         'wannier_projections',
         valid_type=(get_data_class('orbital'), List),
         required=False,
         help='Projections to be defined in the Wannier90 input file.')
Exemple #10
0
def test_parser_nodes(_parse_me):
    """Test a few basic node items of the parser."""

    _, nodes = _parse_me(folder='basic')
    parameters = nodes['output_parameters']
    bands = nodes['output_bands']
    kpoints = nodes['output_kpoints']
    assert isinstance(parameters, get_data_class('parameter'))
    assert isinstance(bands, get_data_class('array.bands'))
    assert isinstance(kpoints, get_data_class('array.kpoints'))
    assert parameters.get_dict()['fermi_level'] == 5.96764939
Exemple #11
0
def test_create_equivalence(potcar_family):
    """Create from file (during upload) and from contents and ensure equivalence."""
    potcar_file_cls = get_data_class('vasp.potcar_file')
    potcar_path = ['potcar', 'As', 'POTCAR']
    potcar_file, created = potcar_file_cls.get_or_create_from_contents(read_file(*potcar_path, mode='rb'))
    assert not created
    assert potcar_file.sha512 == potcar_file_cls.find_one(element='As').sha512
    assert potcar_file.uuid == potcar_file_cls.find_one(element='As').uuid

    potcar_cls = get_data_class('vasp.potcar')
    potcar, created = potcar_cls.get_or_create_from_contents(read_file(*potcar_path, mode='rb'))
    assert not created
    assert potcar.sha512 == potcar_cls.find_one(element='As').sha512
    assert potcar.uuid == potcar_cls.find_one(element='As').uuid
Exemple #12
0
def test_potcar_get_or_create(fresh_aiida_env, potcar_node_pair):
    """Test get_or_create method of PotcarData."""
    potcar_cls = get_data_class('vasp.potcar')
    file_cls = get_data_class('vasp.potcar_file')
    file_as = potcar_node_pair['file']
    potcar_as = potcar_node_pair['potcar']
    node_potcar_as, created_potcar_as = potcar_cls.get_or_create(file_as)
    assert not created_potcar_as
    assert potcar_as.pk == node_potcar_as.pk

    potcar_in_path = data_path('potcar', 'In_d', 'POTCAR')
    node_potcar_in, created_potcar_in = potcar_cls.get_or_create(file_cls(file=potcar_in_path))
    assert created_potcar_in
    assert potcar_cls.exists(sha512=node_potcar_in.sha512)
Exemple #13
0
    def trajectory(self):
        """
        Fetch unitcells, positions, species, forces and stress.

        For all calculation steps from parsevasp and store as TrajectoryData.

        """

        unitcell = self._xml.get_unitcell("all")
        positions = self._xml.get_positions("all")
        species = self._xml.get_species()
        forces = self._xml.get_forces("all")
        stress = self._xml.get_stress("all")
        # make sure all are sorted, first to last calculation
        # (species is constant)
        unitcell = sorted(unitcell.items())
        positions = sorted(positions.items())
        forces = sorted(forces.items())
        stress = sorted(stress.items())
        # convert to numpy
        unitcell = np.asarray(map(operator.itemgetter(1), unitcell))
        positions = np.asarray(map(operator.itemgetter(1), positions))
        forces = np.asarray(map(operator.itemgetter(1), forces))
        stress = np.asarray(map(operator.itemgetter(1), stress))
        # Aiida wants the species as symbols, so invert
        elements = _invert_dict(parsevaspct.elements)
        symbols = np.asarray([elements[item].title() for item in species])

        if (unitcell is not None) and (positions is not None) and \
           (species is not None) and (forces is not None) and \
           (stress is not None):
            array_node = get_data_class('array')()
            trajectory_node = get_data_class('array.trajectory')()

            keys = ('cells', 'positions', 'symbols', 'forces', 'stress')

            trajectory_node.set_trajectory(stepids=np.arange(
                unitcell.shape[0]),
                                           cells=unitcell,
                                           symbols=symbols,
                                           positions=positions)

            for key, data in zip(
                    keys, (unitcell, positions, symbols, forces, stress)):
                array_node.set_array(key, data)
                trajectory_node.set_array(key, data)
            return trajectory_node, array_node

        return None
Exemple #14
0
    def _compose_array_trajectory(node_type, inputs):
        """
        Compose a trajectory node.

        Parameters
        ----------
        node_type : str
            'array.trajectory'
        inputs : dict
            trajectory data is stored at VasprunParser. The keys are
            'cells', 'positions', 'symbols', 'forces', 'stress', 'steps'.

        Returns
        -------
        node : TrajectoryData
            To store the data, due to the definition of TrajectoryData in
            aiida-core v1.0.0, data, using the same keys are those from inputs,
            for 'symbols', the value is stored by set_attribute and
            for the others, the values are stored by by set_array.

        """
        node = get_data_class(node_type)()
        for item in inputs:
            for key, value in inputs[item].items():
                if key == 'symbols':
                    node.set_attribute(key, value)
                else:
                    node.set_array(key, value)
        return node
Exemple #15
0
    def _compose_array_kpoints(node_type, inputs):
        """Compose an array.kpoints node based on inputs."""
        node = get_data_class(node_type)()
        for key in inputs:
            mode = inputs[key]['mode']
            if mode == 'explicit':
                kpoints = inputs[key].get('points')
                cartesian = not kpoints[0].get_direct()
                kpoint_list = []
                weights = []
                for kpoint in kpoints:
                    kpoint_list.append(kpoint.get_point().tolist())
                    weights.append(kpoint.get_weight())

                if weights[0] is None:
                    weights = None

                node.set_kpoints(kpoint_list,
                                 weights=weights,
                                 cartesian=cartesian)

            if mode == 'automatic':
                mesh = inputs[key].get('divisions')
                shifts = inputs[key].get('shifts')
                node.set_kpoints_mesh(mesh, offset=shifts)
        return node
Exemple #16
0
 def _compose_array_bands(self, node_type, inputs):
     """Compose a bands node."""
     node = get_data_class(node_type)()
     kpoints = self._compose_array_kpoints('array.kpoints', {'kpoints': inputs['kpoints']})
     node.set_kpointsdata(kpoints)
     node.set_bands(inputs['eigenvalues'], occupations=inputs['occupancies'])
     return node
Exemple #17
0
    def define(cls, spec):
        super(VerifyWorkChain, cls).define(spec)
        spec.expose_inputs(cls._next_workchain)
        spec.input('verify.max_iterations',
                   valid_type=get_data_class('int'),
                   required=False,
                   default=lambda: get_data_node('int', 1),
                   help="""
                   The maximum number of iterations to perform.
                   """)
        spec.exit_code(0, 'NO_ERROR', message='the sun is shining')
        spec.exit_code(420,
                       'ERROR_NO_CALLED_WORKCHAIN',
                       message='no called workchain detected')
        spec.exit_code(
            500,
            'ERROR_UNKNOWN',
            message='unknown error detected in the verify workchain')
        spec.outline(
            cls.initialize,
            while_(cls.run_next_workchains)(
                cls.init_next_workchain,
                cls.run_next_workchain,
                cls.verify_next_workchain
            ),
            cls.finalize
        )  # yapf: disable

        spec.expose_outputs(cls._next_workchain)
Exemple #18
0
def test_toten_electronic(fresh_aiida_env, vasprun_parser):
    """
    Check that the total energy are of type ArrayData and that we have entries per electronic step

    Also check that the entries are as expected.

    """

    inputs = get_node_composer_inputs_from_file_parser(
        vasprun_parser, quantity_keys=['energies'])
    data_obj = NodeComposer.compose('array', inputs)
    # Test that the object is of the right type
    ref_obj = get_data_class('array')
    assert isinstance(data_obj, ref_obj)
    # Test that the default arrays are present
    assert set(data_obj.get_arraynames()) == set([
        'energy_extrapolated_final', 'energy_extrapolated', 'electronic_steps'
    ])
    energies = data_obj.get_array('energy_extrapolated')
    test_array = np.array([-42.91113666, -42.91113621])
    assert np.allclose(test_array, energies)
    # Test number of entries
    assert energies.shape == (2, )
    # Electronic steps should be two
    test_array = np.array([2])
    assert np.allclose(test_array, data_obj.get_array('electronic_steps'))
    # Testing on VASP 5 so final total energy should not be the same as the last electronic step total energy.
    test_array = np.array([-0.00236711])
    assert np.allclose(test_array,
                       data_obj.get_array('energy_extrapolated_final'))
Exemple #19
0
def test_eigenocc_spin_result(fresh_aiida_env, vasprun_parser):
    """
    Check that the eigenvalues are of type BandData.

    Also check that the entries are as expected, including the
    occupancies. This test is for spin separated systems.

    """

    inputs = get_node_composer_inputs_from_file_parser(
        vasprun_parser,
        quantity_keys=['eigenvalues', 'kpoints', 'occupancies'])
    data_obj = NodeComposer.compose('array.bands', inputs)
    # test object
    ref_obj = get_data_class('array.bands')
    assert isinstance(data_obj, ref_obj)
    eigenocc = data_obj.get_bands(also_occupations=True)
    eigen = eigenocc[0]
    occ = eigenocc[1]
    # test shape of array
    assert eigen.shape == (2, 64, 25)
    assert occ.shape == (2, 64, 25)
    # test a few entries
    assert eigen[0, 0, 0] == -6.2363
    assert eigen[0, 0, 15] == 5.8939
    assert eigen[0, 6, 4] == -1.7438
    assert eigen[1, 0, 0] == -6.2357
    assert eigen[1, 0, 15] == 5.8946
    assert eigen[1, 6, 4] == -1.7432
    assert occ[0, 0, 0] == 1.0
    assert occ[0, 0, 15] == 0.6955
    assert occ[0, 6, 4] == 1.0
    assert occ[1, 0, 0] == 1.0
    assert occ[1, 0, 15] == 0.6938
    assert occ[1, 6, 4] == 1.0
Exemple #20
0
def test_potcar_from_node(potcar_family):
    """Create a PotcarIo instance from a PotcarData node."""
    potcar_ga = get_data_class('vasp.potcar').find_one(element='Ga')
    from_ctor = PotcarIo(potcar_node=potcar_ga)
    verify_potcario(from_ctor)
    from_from = PotcarIo.from_(potcar_ga)
    assert from_ctor == from_from
Exemple #21
0
def test_potcar_from_file_node(potcar_family):
    """Create a PotcarIo instance from a PotcarFileData node."""
    potcar_file_in = get_data_class('vasp.potcar_file').find_one(element='In')
    from_ctor = PotcarIo(potcar_file_node=potcar_file_in)
    verify_potcario(from_ctor)
    from_from = PotcarIo.from_(potcar_file_in)
    assert from_ctor == from_from
Exemple #22
0
def test_structure(fresh_aiida_env, vasprun_parser):
    """
    Test that the structure result node is a StructureData instance.

    Also check various other important properties.

    """

    inputs = get_node_composer_inputs_from_file_parser(
        vasprun_parser, quantity_keys=['structure'])
    data_obj = NodeComposer.compose('structure', inputs)
    # check object
    ref_obj = get_data_class('structure')
    assert isinstance(data_obj, ref_obj)
    # check cell
    unitcell = data_obj.cell
    assert np.all(unitcell[0] == np.array([5.46503124, 0.0, 0.0]))
    assert np.all(unitcell[1] == np.array([0.0, 5.46503124, 0.0]))
    assert np.all(unitcell[2] == np.array([0.0, 0.0, 5.46503124]))
    # check first and last position
    assert np.all(data_obj.sites[0].position == np.array([0.0, 0.0, 0.0]))
    assert np.all(data_obj.sites[7].position == np.array(
        [4.09877343, 4.09877343, 1.36625781]))
    # check volume
    assert data_obj.get_cell_volume() == np.float(163.22171870360754)
Exemple #23
0
def test_hessian(fresh_aiida_env, vasprun_parser):
    """
    Check that the Hessian matrix are of type ArrayData.

    Also check that the entries are as expected.

    """

    inputs = get_node_composer_inputs_from_file_parser(
        vasprun_parser, quantity_keys=['hessian'])
    data_obj = NodeComposer.compose('array', inputs)
    # test object
    ref_obj = get_data_class('array')
    assert isinstance(data_obj, ref_obj)
    hessian = data_obj.get_array('hessian')
    # test shape
    assert hessian.shape == (24, 24)
    # test a few entries
    assert np.all(hessian[0] == np.array([
        -4.63550410e-01, 0.00000000e+00, 0.00000000e+00, -5.91774100e-02,
        0.00000000e+00, 0.00000000e+00, 3.09711000e-02, 0.00000000e+00,
        0.00000000e+00, 3.20435400e-02, 0.00000000e+00, 0.00000000e+00,
        1.15129840e-01, -8.16138200e-02, 8.17234700e-02, 1.14879520e-01,
        8.11324800e-02, 8.27409500e-02, 1.14879520e-01, -8.11324800e-02,
        -8.27409500e-02, 1.15129840e-01, 8.16138200e-02, -8.17234700e-02
    ]))
    assert np.all(hessian[-2] == np.array([
        8.16138200e-02, 1.15195590e-01, -8.38411100e-02, -8.17234700e-02,
        1.14875090e-01, -8.53388100e-02, 3.46686900e-02, 7.00672700e-02,
        2.54288300e-02, -8.26222700e-02, 1.16185510e-01, 7.95575600e-02,
        -3.05970000e-04, 3.16827300e-02, 2.86379000e-03, 5.42080000e-04,
        3.27613500e-02, 1.12576000e-03, -1.34305000e-03, -5.86811100e-02,
        2.83374000e-03, 4.91688400e-02, -4.22101090e-01, 5.73736900e-02
    ]))
Exemple #24
0
 def _compose_array(node_type, inputs):
     """Compose an array node."""
     node = get_data_class(node_type)()
     for item in inputs:
         for key, value in inputs[item].items():
             node.set_array(key, value)
     return node
Exemple #25
0
 def from_(cls, potcar):
     """Determine the best guess at how the input represents a POTCAR file and construct a PotcarIo instance based on that."""
     if isinstance(potcar, str):
         if py_path.local(potcar).exists():
             potcar = cls(path=potcar)
         else:
             potcar = cls(contents=potcar)
     elif isinstance(potcar, get_data_class('vasp.potcar')):
         potcar = cls(potcar_node=potcar)
     elif isinstance(potcar, get_data_class('vasp.potcar_file')):
         potcar = cls(potcar_file_node=potcar)
     elif isinstance(potcar, PotcarIo):
         pass
     else:
         potcar = cls.from_(potcar)
     return potcar
Exemple #26
0
def test_bands(fresh_aiida_env, vasprun_parser):
    """
    Check that the eigenvalues are of type BandData.

    Also check that the entries are as expected including the
    occupancies.

    """

    inputs = get_node_composer_inputs_from_file_parser(
        vasprun_parser,
        quantity_keys=['eigenvalues', 'kpoints', 'occupancies'])
    data_obj = NodeComposer.compose('array.bands', inputs)
    # test object
    ref_obj = get_data_class('array.bands')
    assert isinstance(data_obj, ref_obj)
    eigenocc = data_obj.get_bands(also_occupations=True)
    eigen = eigenocc[0]
    occ = eigenocc[1]
    # test shape of array
    assert eigen.shape == (1, 64, 21)
    assert occ.shape == (1, 64, 21)
    # test a few entries
    assert eigen[0, 0, 0] == -6.2348
    assert eigen[0, 0, 15] == 5.8956
    assert eigen[0, 6, 4] == -1.7424
    assert occ[0, 0, 0] == 1.0
    assert occ[0, 0, 15] == 0.6949
    assert occ[0, 6, 4] == 1.0
Exemple #27
0
def test_upload(fresh_aiida_env, temp_pot_folder):
    """Test uploading a family of POTCAR files."""
    family_name = 'test_family'
    family_desc = 'Test Family'
    potcar_cls = get_data_class('vasp.potcar')
    pot_dir = temp_pot_folder.strpath
    potcar_ga = py_path.local(data_path('potcar')).join('Ga')
    assert not potcar_ga.exists()

    potcar_cls.upload_potcar_family(pot_dir, family_name, family_desc)

    assert potcar_cls.exists(element='In')
    assert potcar_cls.exists(element='As')
    assert potcar_cls.exists(element='Ga')
    assert not potcar_ga.exists()

    # this is supposed to return only one group, however it returns 8 (= number of uploaded files)
    assert [g.label for g in potcar_cls.get_potcar_groups()] == [family_name]

    assert len(potcar_cls.get_potcar_group(family_name).nodes) >= 3

    with pytest.raises(ValueError):
        potcar_cls.upload_potcar_family(pot_dir, family_name, stop_if_existing=True)
    assert not potcar_ga.exists()

    num_files, num_added, num_uploaded = potcar_cls.upload_potcar_family(pot_dir, family_name + '_new', family_desc, stop_if_existing=False)
    assert num_files >= 3
    assert num_added >= 3
    assert num_uploaded == 0
    assert not potcar_ga.exists()
Exemple #28
0
    def _parse_file(self, inputs):
        """Create a DB Node from a KPOINTS file."""

        result = inputs
        result = {}

        if isinstance(self._data_obj, get_data_class('array.kpoints')):
            return {'kpoints-kpoints': self._data_obj}

        try:
            parsed_kpoints = Kpoints(file_path=self._data_obj.path,
                                     logger=self._logger)
        except SystemExit:
            self._logger.warning(
                'Parsevasp exitited abnormally. Returning None.')
            return {'kpoints-kpoints': None}

        if parsed_kpoints.entries.get('mode') == 'line':
            self._logger.warning(
                'The read KPOINTS contained line mode which is'
                'not supported. Returning None.')
            return {'kpoints-kpoints': None}
        result['kpoints-kpoints'] = parsed_kpoints.entries

        return result
Exemple #29
0
def test_pdos(fresh_aiida_env, vasprun_parser):
    """
    Check that the density of states are of type ArrayData.

    Also check that that the entries are as expected.

    """

    inputs = get_node_composer_inputs_from_file_parser(vasprun_parser,
                                                       quantity_keys=['dos'])
    data_obj = NodeComposer.compose('array', inputs)
    # test object
    ref_obj = get_data_class('array')
    assert isinstance(data_obj, ref_obj)
    dos = data_obj.get_array('pdos')
    energy = data_obj.get_array('energy')
    # test shape of array
    assert dos.shape == (8, 1000, 9)
    assert energy.shape == (1000, )
    # test a few entries
    assert np.all(dos[3, 500] == np.array(
        [0.0770, 0.0146, 0.0109, 0.0155, 0.0, 0.0, 0.0, 0.0, 0.0]))
    assert np.all(dos[7, 500] == np.array(
        [0.0747, 0.0121, 0.0092, 0.0116, 0.0, 0.0, 0.0, 0.0, 0.0]))
    assert energy[500] == 0.01
Exemple #30
0
def test_toten_multiple(fresh_aiida_env, vasprun_parser):
    """
    Check that the total energy are of type ArrayData and that we can extract multiple total energies.

    Also check that the entries are as expected.

    """
    inputs = get_node_composer_inputs_from_file_parser(
        vasprun_parser, quantity_keys=['energies'])
    data_obj = NodeComposer.compose('array', inputs)
    # Test that the object is of the right type
    ref_obj = get_data_class('array')
    assert isinstance(data_obj, ref_obj)
    assert set(data_obj.get_arraynames()) == set([
        'electronic_steps', 'energy_free', 'energy_free_final',
        'energy_no_entropy', 'energy_no_entropy_final'
    ])
    test_array = np.array([-42.91231976])
    assert np.allclose(test_array, data_obj.get_array('energy_free'))
    assert np.allclose(test_array, data_obj.get_array('energy_free_final'))
    test_array = np.array([-42.90995265])
    assert np.allclose(test_array, data_obj.get_array('energy_no_entropy'))
    test_array = np.array([-42.91113621])
    assert np.allclose(test_array,
                       data_obj.get_array('energy_no_entropy_final'))